Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(254)

Side by Side Diff: content/common/gpu/media/vaapi_h264_decoder.cc

Issue 14914009: VAVDA: Redesign stage 1. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Add CONTENT_EXPORT to VaapiWrapper Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <dlfcn.h>
6
7 #include <algorithm> 5 #include <algorithm>
8 #include <limits> 6 #include <limits>
9 7
10 #include "base/bind.h" 8 #include "base/bind.h"
11 #include "base/bind_helpers.h" 9 #include "base/bind_helpers.h"
12 #include "base/metrics/histogram.h"
13 #include "base/stl_util.h" 10 #include "base/stl_util.h"
14 #include "content/common/gpu/media/vaapi_h264_decoder.h" 11 #include "content/common/gpu/media/vaapi_h264_decoder.h"
15 #include "third_party/libva/va/va.h"
16 #include "third_party/libva/va/va_x11.h"
17 #include "ui/gl/gl_bindings.h"
18 #include "ui/gl/scoped_binders.h"
19
20 namespace {
21
22 enum VAVDAH264DecoderFailure {
23 FRAME_MBS_ONLY_FLAG_NOT_ONE = 0,
24 GAPS_IN_FRAME_NUM = 1,
25 MID_STREAM_RESOLUTION_CHANGE = 2,
26 INTERLACED_STREAM = 3,
27 VAAPI_ERROR = 4,
28 VAVDA_H264_DECODER_FAILURES_MAX,
29 };
30
31 static void ReportToUMA(VAVDAH264DecoderFailure failure) {
32 UMA_HISTOGRAM_ENUMERATION("Media.VAVDAH264.DecoderFailure",
33 failure,
34 VAVDA_H264_DECODER_FAILURES_MAX);
35 }
36
37 } // namespace
38
39 #define LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg) \
40 do { \
41 DVLOG(1) << err_msg \
42 << " VA error: " << VAAPI_ErrorStr(va_res); \
43 ReportToUMA(VAAPI_ERROR); \
44 } while (0)
45
46 #define VA_LOG_ON_ERROR(va_res, err_msg) \
47 do { \
48 if ((va_res) != VA_STATUS_SUCCESS) { \
49 LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg); \
50 } \
51 } while (0)
52
53 #define VA_SUCCESS_OR_RETURN(va_res, err_msg, ret) \
54 do { \
55 if ((va_res) != VA_STATUS_SUCCESS) { \
56 LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg); \
57 return (ret); \
58 } \
59 } while (0)
60 12
61 namespace content { 13 namespace content {
62 14
63 void *vaapi_handle = NULL; 15 // Decode surface, used for decoding and reference. input_id comes from client
64 void *vaapi_x11_handle = NULL; 16 // and is associated with the surface that was produced as the result
65 17 // of decoding a bitstream buffer with that id.
66 typedef VADisplay (*VaapiGetDisplay)(Display *dpy);
67 typedef int (*VaapiDisplayIsValid)(VADisplay dpy);
68 typedef VAStatus (*VaapiInitialize)(VADisplay dpy,
69 int *major_version,
70 int *minor_version);
71 typedef VAStatus (*VaapiTerminate)(VADisplay dpy);
72 typedef VAStatus (*VaapiGetConfigAttributes)(VADisplay dpy,
73 VAProfile profile,
74 VAEntrypoint entrypoint,
75 VAConfigAttrib *attrib_list,
76 int num_attribs);
77 typedef VAStatus (*VaapiCreateConfig)(VADisplay dpy,
78 VAProfile profile,
79 VAEntrypoint entrypoint,
80 VAConfigAttrib *attrib_list,
81 int num_attribs,
82 VAConfigID *config_id);
83 typedef VAStatus (*VaapiDestroyConfig)(VADisplay dpy, VAConfigID config_id);
84 typedef VAStatus (*VaapiCreateSurfaces)(VADisplay dpy,
85 int width,
86 int height,
87 int format,
88 int num_surfaces,
89 VASurfaceID *surfaces);
90 typedef VAStatus (*VaapiDestroySurfaces)(VADisplay dpy,
91 VASurfaceID *surfaces,
92 int num_surfaces);
93 typedef VAStatus (*VaapiCreateContext)(VADisplay dpy,
94 VAConfigID config_id,
95 int picture_width,
96 int picture_height,
97 int flag,
98 VASurfaceID *render_targets,
99 int num_render_targets,
100 VAContextID *context);
101 typedef VAStatus (*VaapiDestroyContext)(VADisplay dpy, VAContextID context);
102 typedef VAStatus (*VaapiPutSurface)(VADisplay dpy,
103 VASurfaceID surface,
104 Drawable draw,
105 short srcx,
106 short srcy,
107 unsigned short srcw,
108 unsigned short srch,
109 short destx,
110 short desty,
111 unsigned short destw,
112 unsigned short desth,
113 VARectangle *cliprects,
114 unsigned int number_cliprects,
115 unsigned int flags);
116 typedef VAStatus (*VaapiSyncSurface)(VADisplay dpy, VASurfaceID render_target);
117 typedef VAStatus (*VaapiBeginPicture)(VADisplay dpy,
118 VAContextID context,
119 VASurfaceID render_target);
120 typedef VAStatus (*VaapiRenderPicture)(VADisplay dpy,
121 VAContextID context,
122 VABufferID *buffers,
123 int num_buffers);
124 typedef VAStatus (*VaapiEndPicture)(VADisplay dpy, VAContextID context);
125 typedef VAStatus (*VaapiCreateBuffer)(VADisplay dpy,
126 VAContextID context,
127 VABufferType type,
128 unsigned int size,
129 unsigned int num_elements,
130 void *data,
131 VABufferID *buf_id);
132 typedef VAStatus (*VaapiDestroyBuffer)(VADisplay dpy, VABufferID buffer_id);
133 typedef const char* (*VaapiErrorStr)(VAStatus error_status);
134
135 #define VAAPI_SYM(name, handle) Vaapi##name VAAPI_##name = NULL
136
137 VAAPI_SYM(GetDisplay, vaapi_x11_handle);
138 VAAPI_SYM(DisplayIsValid, vaapi_handle);
139 VAAPI_SYM(Initialize, vaapi_handle);
140 VAAPI_SYM(Terminate, vaapi_handle);
141 VAAPI_SYM(GetConfigAttributes, vaapi_handle);
142 VAAPI_SYM(CreateConfig, vaapi_handle);
143 VAAPI_SYM(DestroyConfig, vaapi_handle);
144 VAAPI_SYM(CreateSurfaces, vaapi_handle);
145 VAAPI_SYM(DestroySurfaces, vaapi_handle);
146 VAAPI_SYM(CreateContext, vaapi_handle);
147 VAAPI_SYM(DestroyContext, vaapi_handle);
148 VAAPI_SYM(PutSurface, vaapi_x11_handle);
149 VAAPI_SYM(SyncSurface, vaapi_x11_handle);
150 VAAPI_SYM(BeginPicture, vaapi_handle);
151 VAAPI_SYM(RenderPicture, vaapi_handle);
152 VAAPI_SYM(EndPicture, vaapi_handle);
153 VAAPI_SYM(CreateBuffer, vaapi_handle);
154 VAAPI_SYM(DestroyBuffer, vaapi_handle);
155 VAAPI_SYM(ErrorStr, vaapi_handle);
156
157 // static
158 bool VaapiH264Decoder::pre_sandbox_init_done_ = false;
159
160 class VaapiH264Decoder::DecodeSurface { 18 class VaapiH264Decoder::DecodeSurface {
161 public: 19 public:
162 DecodeSurface(const GLXFBConfig& fb_config, 20 DecodeSurface(int poc,
163 Display* x_display, 21 int32 input_id,
164 VADisplay va_display, 22 const scoped_refptr<VASurface>& va_surface);
165 const base::Callback<bool(void)>& make_context_current, 23 DecodeSurface(int poc, const scoped_refptr<DecodeSurface>& dec_surface);
166 VASurfaceID va_surface_id,
167 int32 picture_buffer_id,
168 uint32 texture_id,
169 int width, int height);
170 ~DecodeSurface(); 24 ~DecodeSurface();
171 25
172 VASurfaceID va_surface_id() { 26 int poc() {
173 return va_surface_id_; 27 return poc_;
174 } 28 }
175 29
176 int32 picture_buffer_id() { 30 scoped_refptr<VASurface> va_surface() {
177 return picture_buffer_id_; 31 return va_surface_;
178 }
179
180 uint32 texture_id() {
181 return texture_id_;
182 }
183
184 bool available() {
185 return available_;
186 }
187
188 bool used() {
189 return used_;
190 }
191
192 void set_used(bool used) {
193 DCHECK(!available_);
194 used_ = used;
195 }
196
197 bool at_client() {
198 return at_client_;
199 }
200
201 void set_at_client(bool at_client) {
202 DCHECK(!available_);
203 at_client_ = at_client;
204 } 32 }
205 33
206 int32 input_id() { 34 int32 input_id() {
207 return input_id_; 35 return input_id_;
208 } 36 }
209 37
210 int poc() {
211 return poc_;
212 }
213
214 // Associate the surface with |input_id| and |poc|, and make it unavailable
215 // (in use).
216 void Acquire(int32 input_id, int poc);
217
218 // Make this surface available, ready to be reused.
219 void Release();
220
221 // Has to be called before output to sync texture contents.
222 // Returns true if successful.
223 bool Sync();
224
225 private: 38 private:
226 Display* x_display_; 39 int poc_;
227 VADisplay va_display_;
228 base::Callback<bool(void)> make_context_current_;
229 VASurfaceID va_surface_id_;
230
231 // Client-provided ids.
232 int32 input_id_; 40 int32 input_id_;
233 int32 picture_buffer_id_; 41 scoped_refptr<VASurface> va_surface_;
234 uint32 texture_id_;
235
236 int width_;
237 int height_;
238
239 // Available for decoding (data no longer used for reference or displaying).
240 // TODO(posciak): this is almost surely not needed anymore. Rethink and
241 // remove if possible.
242 bool available_;
243 // Used for decoding.
244 bool used_;
245 // Whether the surface has been sent to client for display.
246 bool at_client_;
247
248 // PicOrderCount
249 int poc_;
250
251 // Pixmaps bound to this texture.
252 Pixmap x_pixmap_;
253 GLXPixmap glx_pixmap_;
254
255 DISALLOW_COPY_AND_ASSIGN(DecodeSurface);
256 }; 42 };
257 43
258 VaapiH264Decoder::DecodeSurface::DecodeSurface( 44 VaapiH264Decoder::DecodeSurface::DecodeSurface(
259 const GLXFBConfig& fb_config, 45 int poc,
260 Display* x_display, 46 int32 input_id,
261 VADisplay va_display, 47 const scoped_refptr<VASurface>& va_surface)
262 const base::Callback<bool(void)>& make_context_current, 48 : poc_(poc),
263 VASurfaceID va_surface_id, 49 input_id_(input_id),
264 int32 picture_buffer_id, 50 va_surface_(va_surface) {
265 uint32 texture_id, 51 DCHECK(va_surface_.get());
266 int width, int height)
267 : x_display_(x_display),
268 va_display_(va_display),
269 make_context_current_(make_context_current),
270 va_surface_id_(va_surface_id),
271 input_id_(0),
272 picture_buffer_id_(picture_buffer_id),
273 texture_id_(texture_id),
274 width_(width),
275 height_(height),
276 available_(false),
277 used_(false),
278 at_client_(false),
279 poc_(0),
280 x_pixmap_(0),
281 glx_pixmap_(0) {
282 // Bind the surface to a texture of the given width and height,
283 // allocating pixmaps as needed.
284 if (!make_context_current_.Run())
285 return;
286
287 gfx::ScopedTextureBinder texture_binder(GL_TEXTURE_2D, texture_id_);
288 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
289 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
290
291 XWindowAttributes win_attr;
292 int screen = DefaultScreen(x_display_);
293 XGetWindowAttributes(x_display_, RootWindow(x_display_, screen), &win_attr);
294 //TODO(posciak): pass the depth required by libva, not the RootWindow's depth
295 x_pixmap_ = XCreatePixmap(x_display_, RootWindow(x_display_, screen),
296 width_, height_, win_attr.depth);
297 if (!x_pixmap_) {
298 DVLOG(1) << "Failed creating an X Pixmap for TFP";
299 return;
300 }
301
302 static const int pixmap_attr[] = {
303 GLX_TEXTURE_TARGET_EXT, GLX_TEXTURE_2D_EXT,
304 GLX_TEXTURE_FORMAT_EXT, GLX_TEXTURE_FORMAT_RGB_EXT,
305 GL_NONE,
306 };
307
308 glx_pixmap_ = glXCreatePixmap(x_display_, fb_config, x_pixmap_, pixmap_attr);
309 if (!glx_pixmap_) {
310 // x_pixmap_ will be freed in the destructor.
311 DVLOG(1) << "Failed creating a GLX Pixmap for TFP";
312 return;
313 }
314
315 glXBindTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT, NULL);
316
317 available_ = true;
318 } 52 }
319 53
320 VaapiH264Decoder::DecodeSurface::~DecodeSurface() { 54 VaapiH264Decoder::DecodeSurface::~DecodeSurface() {
321 // Unbind surface from texture and deallocate resources.
322 if (glx_pixmap_ && make_context_current_.Run()) {
323 glXReleaseTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT);
324 glXDestroyPixmap(x_display_, glx_pixmap_);
325 }
326
327 if (x_pixmap_)
328 XFreePixmap(x_display_, x_pixmap_);
329 XSync(x_display_, False); // Needed to work around buggy vdpau-driver.
330 } 55 }
331 56
332 void VaapiH264Decoder::DecodeSurface::Acquire(int32 input_id, int poc) { 57 VaapiH264Decoder::VaapiH264Decoder(
333 DCHECK_EQ(available_, true); 58 VaapiWrapper* vaapi_wrapper,
334 available_ = false; 59 const OutputPicCB& output_pic_cb,
335 at_client_ = false; 60 const ReportErrorToUmaCB& report_error_to_uma_cb)
336 used_ = true; 61 : max_pic_order_cnt_lsb_(0),
337 input_id_ = input_id; 62 max_frame_num_(0),
338 poc_ = poc; 63 max_pic_num_(0),
339 } 64 max_long_term_frame_idx_(0),
340 65 curr_sps_id_(-1),
341 void VaapiH264Decoder::DecodeSurface::Release() { 66 curr_pps_id_(-1),
342 DCHECK_EQ(available_, false); 67 vaapi_wrapper_(vaapi_wrapper),
343 available_ = true; 68 output_pic_cb_(output_pic_cb),
344 used_ = false; 69 report_error_to_uma_cb_(report_error_to_uma_cb) {
345 at_client_ = false;
346 }
347
348 bool VaapiH264Decoder::DecodeSurface::Sync() {
349 if (!make_context_current_.Run())
350 return false;
351
352 // Wait for the data to be put into the buffer so it'd ready for output.
353 VAStatus va_res = VAAPI_SyncSurface(va_display_, va_surface_id_);
354 VA_SUCCESS_OR_RETURN(va_res, "Failed syncing decoded picture", false);
355
356 // Put the decoded data into XPixmap bound to the texture.
357 va_res = VAAPI_PutSurface(va_display_,
358 va_surface_id_, x_pixmap_,
359 0, 0, width_, height_,
360 0, 0, width_, height_,
361 NULL, 0, 0);
362 VA_SUCCESS_OR_RETURN(va_res, "Failed putting decoded picture to texture",
363 false);
364
365 return true;
366 }
367
368 VaapiH264Decoder::VaapiH264Decoder() {
369 Reset(); 70 Reset();
370 curr_input_id_ = -1;
371 x_display_ = NULL;
372 fb_config_ = NULL;
373 va_display_ = NULL;
374 curr_sps_id_ = -1;
375 curr_pps_id_ = -1;
376 pic_width_ = -1;
377 pic_height_ = -1;
378 max_frame_num_ = 0;
379 max_pic_num_ = 0;
380 max_long_term_frame_idx_ = 0;
381 max_pic_order_cnt_lsb_ = 0;
382 state_ = kUninitialized;
383 num_available_decode_surfaces_ = 0;
384 va_context_created_ = false;
385 last_output_poc_ = 0;
386 } 71 }
387 72
388 VaapiH264Decoder::~VaapiH264Decoder() { 73 VaapiH264Decoder::~VaapiH264Decoder() {
389 Destroy();
390 } 74 }
391 75
392 // This puts the decoder in state where it keeps stream data and is ready
393 // to resume playback from a random location in the stream, but drops all
394 // inputs and outputs and makes all surfaces available for use.
395 void VaapiH264Decoder::Reset() { 76 void VaapiH264Decoder::Reset() {
396 frame_ready_at_hw_ = false;
397
398 curr_pic_.reset(); 77 curr_pic_.reset();
399 78
79 curr_input_id_ = -1;
400 frame_num_ = 0; 80 frame_num_ = 0;
401 prev_frame_num_ = -1; 81 prev_frame_num_ = -1;
402 prev_frame_num_offset_ = -1; 82 prev_frame_num_offset_ = -1;
403 83
404 prev_ref_has_memmgmnt5_ = false; 84 prev_ref_has_memmgmnt5_ = false;
405 prev_ref_top_field_order_cnt_ = -1; 85 prev_ref_top_field_order_cnt_ = -1;
406 prev_ref_pic_order_cnt_msb_ = -1; 86 prev_ref_pic_order_cnt_msb_ = -1;
407 prev_ref_pic_order_cnt_lsb_ = -1; 87 prev_ref_pic_order_cnt_lsb_ = -1;
408 prev_ref_field_ = H264Picture::FIELD_NONE; 88 prev_ref_field_ = H264Picture::FIELD_NONE;
409 89
410 // When called from the constructor, although va_display_ is invalid, 90 vaapi_wrapper_->DestroyPendingBuffers();
411 // |pending_slice_bufs_| and |pending_va_bufs_| are empty.
412 DestroyPendingBuffers();
413
414 pending_slice_bufs_ = std::queue<VABufferID>();
415 pending_va_bufs_ = std::queue<VABufferID>();
416 91
417 ref_pic_list0_.clear(); 92 ref_pic_list0_.clear();
418 ref_pic_list1_.clear(); 93 ref_pic_list1_.clear();
419 94
420 for (POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.begin(); 95 for (DecSurfacesInUse::iterator it = decode_surfaces_in_use_.begin();
421 it != poc_to_decode_surfaces_.end(); ) { 96 it != decode_surfaces_in_use_.end(); ) {
422 int poc = it->second->poc(); 97 int poc = it->second->poc();
423 // Must be incremented before UnassignSurfaceFromPoC as this call 98 // Must be incremented before UnassignSurfaceFromPoC as this call
424 // invalidates |it|. 99 // invalidates |it|.
425 ++it; 100 ++it;
426 UnassignSurfaceFromPoC(poc); 101 UnassignSurfaceFromPoC(poc);
427 } 102 }
428 DCHECK(poc_to_decode_surfaces_.empty()); 103 DCHECK(decode_surfaces_in_use_.empty());
429 104
430 dpb_.Clear(); 105 dpb_.Clear();
431 parser_.Reset(); 106 parser_.Reset();
432 last_output_poc_ = 0; 107 last_output_poc_ = 0;
433 108
434 // Still initialized and ready to decode, unless called from constructor, 109 state_ = kIdle;
435 // which will change it back.
436 state_ = kAfterReset;
437 } 110 }
438 111
439 void VaapiH264Decoder::Destroy() { 112 void VaapiH264Decoder::ReuseSurface(
440 if (state_ == kUninitialized) 113 const scoped_refptr<VASurface>& va_surface) {
441 return; 114 available_va_surfaces_.push_back(va_surface);
442
443 VAStatus va_res;
444 bool destroy_surfaces = false;
445 switch (state_) {
446 case kDecoding:
447 case kAfterReset:
448 case kError:
449 destroy_surfaces = true;
450 // fallthrough
451 case kInitialized:
452 if (!make_context_current_.Run())
453 break;
454 if (destroy_surfaces)
455 DestroyVASurfaces();
456 DestroyPendingBuffers();
457 va_res = VAAPI_DestroyConfig(va_display_, va_config_id_);
458 VA_LOG_ON_ERROR(va_res, "vaDestroyConfig failed");
459 va_res = VAAPI_Terminate(va_display_);
460 VA_LOG_ON_ERROR(va_res, "vaTerminate failed");
461 // fallthrough
462 case kUninitialized:
463 break;
464 }
465
466 state_ = kUninitialized;
467 }
468
469 // Maps Profile enum values to VaProfile values.
470 bool VaapiH264Decoder::SetProfile(media::VideoCodecProfile profile) {
471 switch (profile) {
472 case media::H264PROFILE_BASELINE:
473 profile_ = VAProfileH264Baseline;
474 break;
475 case media::H264PROFILE_MAIN:
476 profile_ = VAProfileH264Main;
477 break;
478 case media::H264PROFILE_HIGH:
479 profile_ = VAProfileH264High;
480 break;
481 default:
482 return false;
483 }
484 return true;
485 }
486
487 class ScopedPtrXFree {
488 public:
489 void operator()(void* x) const {
490 ::XFree(x);
491 }
492 };
493
494 bool VaapiH264Decoder::InitializeFBConfig() {
495 const int fbconfig_attr[] = {
496 GLX_DRAWABLE_TYPE, GLX_PIXMAP_BIT,
497 GLX_BIND_TO_TEXTURE_TARGETS_EXT, GLX_TEXTURE_2D_BIT_EXT,
498 GLX_BIND_TO_TEXTURE_RGB_EXT, GL_TRUE,
499 GLX_Y_INVERTED_EXT, GL_TRUE,
500 GL_NONE,
501 };
502
503 int num_fbconfigs;
504 scoped_ptr_malloc<GLXFBConfig, ScopedPtrXFree> glx_fb_configs(
505 glXChooseFBConfig(x_display_, DefaultScreen(x_display_), fbconfig_attr,
506 &num_fbconfigs));
507 if (!glx_fb_configs)
508 return false;
509 if (!num_fbconfigs)
510 return false;
511
512 fb_config_ = glx_fb_configs.get()[0];
513 return true;
514 }
515
516 bool VaapiH264Decoder::Initialize(
517 media::VideoCodecProfile profile,
518 Display* x_display,
519 GLXContext glx_context,
520 const base::Callback<bool(void)>& make_context_current,
521 const OutputPicCB& output_pic_cb,
522 const SubmitDecodeCB& submit_decode_cb) {
523 DCHECK_EQ(state_, kUninitialized);
524
525 output_pic_cb_ = output_pic_cb;
526 submit_decode_cb_ = submit_decode_cb;
527
528 x_display_ = x_display;
529 make_context_current_ = make_context_current;
530
531 if (!make_context_current_.Run())
532 return false;
533
534 if (!SetProfile(profile)) {
535 DVLOG(1) << "Unsupported profile";
536 return false;
537 }
538
539 if (!InitializeFBConfig()) {
540 DVLOG(1) << "Could not get a usable FBConfig";
541 return false;
542 }
543
544 va_display_ = VAAPI_GetDisplay(x_display_);
545 if (!VAAPI_DisplayIsValid(va_display_)) {
546 DVLOG(1) << "Could not get a valid VA display";
547 return false;
548 }
549
550 int major_version, minor_version;
551 VAStatus va_res;
552 va_res = VAAPI_Initialize(va_display_, &major_version, &minor_version);
553 VA_SUCCESS_OR_RETURN(va_res, "vaInitialize failed", false);
554 DVLOG(1) << "VAAPI version: " << major_version << "." << minor_version;
555
556 VAConfigAttrib attrib;
557 attrib.type = VAConfigAttribRTFormat;
558
559 VAEntrypoint entrypoint = VAEntrypointVLD;
560 va_res = VAAPI_GetConfigAttributes(va_display_, profile_, entrypoint,
561 &attrib, 1);
562 VA_SUCCESS_OR_RETURN(va_res, "vaGetConfigAttributes failed", false);
563
564 if (!(attrib.value & VA_RT_FORMAT_YUV420)) {
565 DVLOG(1) << "YUV420 not supported";
566 return false;
567 }
568
569 va_res = VAAPI_CreateConfig(va_display_, profile_, entrypoint,
570 &attrib, 1, &va_config_id_);
571 VA_SUCCESS_OR_RETURN(va_res, "vaCreateConfig failed", false);
572
573 state_ = kInitialized;
574 return true;
575 }
576
577 void VaapiH264Decoder::ReusePictureBuffer(int32 picture_buffer_id) {
578 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id);
579 if (it == decode_surfaces_.end()) {
580 DVLOG(1) << "Asked to reuse an invalid surface "
581 << picture_buffer_id;
582 return;
583 }
584 if (it->second->available()) {
585 DVLOG(1) << "Asked to reuse an already available surface "
586 << picture_buffer_id;
587 return;
588 }
589
590 it->second->set_at_client(false);
591 if (!it->second->used()) {
592 it->second->Release();
593 ++num_available_decode_surfaces_;
594 }
595 }
596
597 bool VaapiH264Decoder::AssignPictureBuffer(int32 picture_buffer_id,
598 uint32 texture_id) {
599 DCHECK_EQ(state_, kDecoding);
600
601 if (decode_surfaces_.size() >= GetRequiredNumOfPictures()) {
602 DVLOG(1) << "Got more surfaces than required";
603 return false;
604 }
605
606 // This will not work if we start using VDA.DismissPicture()
607 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface(
608 fb_config_, x_display_, va_display_, make_context_current_,
609 va_surface_ids_[decode_surfaces_.size()], picture_buffer_id, texture_id,
610 pic_width_, pic_height_));
611 if (!dec_surface->available()) {
612 DVLOG(1) << "Error creating a decoding surface (binding to texture?)";
613 return false;
614 }
615
616 DVLOG(2) << "New picture assigned, texture id: " << dec_surface->texture_id()
617 << " pic buf id: " << dec_surface->picture_buffer_id()
618 << " will use va surface " << dec_surface->va_surface_id();
619
620 bool inserted = decode_surfaces_.insert(std::make_pair(picture_buffer_id,
621 dec_surface)).second;
622 DCHECK(inserted);
623 ++num_available_decode_surfaces_;
624
625 return true;
626 }
627
628 bool VaapiH264Decoder::CreateVASurfaces() {
629 DCHECK_NE(pic_width_, -1);
630 DCHECK_NE(pic_height_, -1);
631 if (state_ == kAfterReset)
632 return true;
633 DCHECK_EQ(state_, kInitialized);
634
635 // Allocate VASurfaces in driver.
636 VAStatus va_res = VAAPI_CreateSurfaces(va_display_, pic_width_,
637 pic_height_, VA_RT_FORMAT_YUV420,
638 GetRequiredNumOfPictures(),
639 va_surface_ids_);
640 VA_SUCCESS_OR_RETURN(va_res, "vaCreateSurfaces failed", false);
641
642 DCHECK(decode_surfaces_.empty());
643 // And create a context associated with them.
644 va_res = VAAPI_CreateContext(va_display_, va_config_id_,
645 pic_width_, pic_height_, VA_PROGRESSIVE,
646 va_surface_ids_, GetRequiredNumOfPictures(),
647 &va_context_id_);
648
649 if (va_res != VA_STATUS_SUCCESS) {
650 DVLOG(1) << "Error creating a decoding surface (binding to texture?)";
651 VAAPI_DestroySurfaces(va_display_, va_surface_ids_,
652 GetRequiredNumOfPictures());
653 return false;
654 }
655
656 va_context_created_ = true;
657
658 return true;
659 }
660
661 void VaapiH264Decoder::DestroyVASurfaces() {
662 DCHECK(state_ == kDecoding || state_ == kError || state_ == kAfterReset);
663 decode_surfaces_.clear();
664
665 // This can happen if we fail during DecodeInitial.
666 if (!va_context_created_)
667 return;
668
669 VAStatus va_res = VAAPI_DestroyContext(va_display_, va_context_id_);
670 VA_LOG_ON_ERROR(va_res, "vaDestroyContext failed");
671
672 va_res = VAAPI_DestroySurfaces(va_display_, va_surface_ids_,
673 GetRequiredNumOfPictures());
674 VA_LOG_ON_ERROR(va_res, "vaDestroySurfaces failed");
675
676 va_context_created_ = false;
677 }
678
679 void VaapiH264Decoder::DestroyPendingBuffers() {
680 while (!pending_slice_bufs_.empty()) {
681 VABufferID buffer = pending_slice_bufs_.front();
682 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, buffer);
683 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
684 pending_slice_bufs_.pop();
685 }
686 while (!pending_va_bufs_.empty()) {
687 VABufferID buffer = pending_va_bufs_.front();
688 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, buffer);
689 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
690 pending_va_bufs_.pop();
691 }
692 } 115 }
693 116
694 // Fill |va_pic| with default/neutral values. 117 // Fill |va_pic| with default/neutral values.
695 static void InitVAPicture(VAPictureH264* va_pic) { 118 static void InitVAPicture(VAPictureH264* va_pic) {
696 memset(va_pic, 0, sizeof(*va_pic)); 119 memset(va_pic, 0, sizeof(*va_pic));
697 va_pic->picture_id = VA_INVALID_ID; 120 va_pic->picture_id = VA_INVALID_ID;
698 va_pic->flags = VA_PICTURE_H264_INVALID; 121 va_pic->flags = VA_PICTURE_H264_INVALID;
699 } 122 }
700 123
701 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) { 124 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) {
702 DCHECK(pic); 125 DCHECK(pic);
703 POCToDecodeSurfaces::iterator iter = poc_to_decode_surfaces_.find( 126
704 pic->pic_order_cnt); 127 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt);
705 if (iter == poc_to_decode_surfaces_.end()) { 128 if (!dec_surface) {
706 DVLOG(1) << "Could not find surface with POC: " << pic->pic_order_cnt;
707 // Cannot provide a ref picture, will corrupt output, but may be able 129 // Cannot provide a ref picture, will corrupt output, but may be able
708 // to recover. 130 // to recover.
709 InitVAPicture(va_pic); 131 InitVAPicture(va_pic);
710 return; 132 return;
711 } 133 }
712 134
713 va_pic->picture_id = iter->second->va_surface_id(); 135 va_pic->picture_id = dec_surface->va_surface()->id();
714 va_pic->frame_idx = pic->frame_num; 136 va_pic->frame_idx = pic->frame_num;
715 va_pic->flags = 0; 137 va_pic->flags = 0;
716 138
717 switch (pic->field) { 139 switch (pic->field) {
718 case H264Picture::FIELD_NONE: 140 case H264Picture::FIELD_NONE:
719 break; 141 break;
720 case H264Picture::FIELD_TOP: 142 case H264Picture::FIELD_TOP:
721 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD; 143 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD;
722 break; 144 break;
723 case H264Picture::FIELD_BOTTOM: 145 case H264Picture::FIELD_BOTTOM:
(...skipping 19 matching lines...) Expand all
743 // Libva does not document this, but other implementations (e.g. mplayer) 165 // Libva does not document this, but other implementations (e.g. mplayer)
744 // do it this way as well. 166 // do it this way as well.
745 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) { 167 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) {
746 if ((*rit)->ref) 168 if ((*rit)->ref)
747 FillVAPicture(&va_pics[i++], *rit); 169 FillVAPicture(&va_pics[i++], *rit);
748 } 170 }
749 171
750 return i; 172 return i;
751 } 173 }
752 174
753 // Can only be called when all surfaces are already bound 175 VaapiH264Decoder::DecodeSurface* VaapiH264Decoder::DecodeSurfaceByPoC(int poc) {
754 // to textures (cannot be run at the same time as AssignPictureBuffer). 176 DecSurfacesInUse::iterator iter = decode_surfaces_in_use_.find(poc);
755 bool VaapiH264Decoder::AssignSurfaceToPoC(int poc) { 177 if (iter == decode_surfaces_in_use_.end()) {
756 DCHECK_GT(num_available_decode_surfaces_, 0) << decode_surfaces_.size(); 178 DVLOG(1) << "Could not find surface assigned to POC: " << poc;
757 179 return NULL;
758 // Find a surface not currently holding data used for reference and/or
759 // to be displayed and mark it as used.
760 DecodeSurfaces::iterator iter = decode_surfaces_.begin();
761 for (; iter != decode_surfaces_.end(); ++iter) {
762 if (!iter->second->available())
763 continue;
764
765 --num_available_decode_surfaces_;
766 DCHECK_GE(num_available_decode_surfaces_, 0);
767
768 // Associate with input id and poc and mark as unavailable.
769 iter->second->Acquire(curr_input_id_, poc);
770 DVLOG(4) << "Will use surface " << iter->second->va_surface_id()
771 << " for POC " << iter->second->poc()
772 << " input ID: " << iter->second->input_id();
773 bool inserted = poc_to_decode_surfaces_.insert(std::make_pair(
774 poc, iter->second.get())).second;
775 DCHECK(inserted);
776 return true;
777 } 180 }
778 181
779 // Could not find an available surface. 182 return iter->second.get();
780 return false;
781 } 183 }
782 184
783 // Can only be called when all surfaces are already bound 185 bool VaapiH264Decoder::AssignSurfaceToPoC(int32 input_id, int poc) {
784 // to textures (cannot be run at the same time as AssignPictureBuffer). 186 if (available_va_surfaces_.empty()) {
187 DVLOG(1) << "No VA Surfaces available";
188 return false;
189 }
190
191 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface(
192 poc, input_id, available_va_surfaces_.back()));
193 available_va_surfaces_.pop_back();
194
195 DVLOG(4) << "POC " << poc
196 << " will use surface " << dec_surface->va_surface()->id();
197
198 bool inserted = decode_surfaces_in_use_.insert(
199 std::make_pair(poc, dec_surface)).second;
200 DCHECK(inserted);
201
202 return true;
203 }
204
785 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) { 205 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) {
786 DecodeSurface* dec_surface; 206 DecSurfacesInUse::iterator it = decode_surfaces_in_use_.find(poc);
787 POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.find(poc); 207 if (it == decode_surfaces_in_use_.end()) {
788 if (it == poc_to_decode_surfaces_.end()) {
789 DVLOG(1) << "Asked to unassign an unassigned POC " << poc; 208 DVLOG(1) << "Asked to unassign an unassigned POC " << poc;
790 return; 209 return;
791 } 210 }
792 dec_surface = it->second;
793 DVLOG(4) << "POC " << poc << " no longer using surface "
794 << dec_surface->va_surface_id();
795 poc_to_decode_surfaces_.erase(it);
796 211
797 dec_surface->set_used(false); 212 DVLOG(4) << "POC " << poc << " no longer using VA surface "
798 if (!dec_surface->at_client()) { 213 << it->second->va_surface()->id();
799 dec_surface->Release(); 214
800 ++num_available_decode_surfaces_; 215 decode_surfaces_in_use_.erase(it);
801 }
802 } 216 }
803 217
804 // Fill a VAPictureParameterBufferH264 to be later sent to the HW decoder.
805 bool VaapiH264Decoder::SendPPS() { 218 bool VaapiH264Decoder::SendPPS() {
806 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); 219 const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
807 DCHECK(pps); 220 DCHECK(pps);
808 221
809 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); 222 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id);
810 DCHECK(sps); 223 DCHECK(sps);
811 224
812 DCHECK(curr_pic_.get()); 225 DCHECK(curr_pic_.get());
813 226
814 VAPictureParameterBufferH264 pic_param; 227 VAPictureParameterBufferH264 pic_param;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
877 // Init reference pictures' array. 290 // Init reference pictures' array.
878 for (int i = 0; i < 16; ++i) 291 for (int i = 0; i < 16; ++i)
879 InitVAPicture(&pic_param.ReferenceFrames[i]); 292 InitVAPicture(&pic_param.ReferenceFrames[i]);
880 293
881 // And fill it with picture info from DPB. 294 // And fill it with picture info from DPB.
882 FillVARefFramesFromDPB(pic_param.ReferenceFrames, 295 FillVARefFramesFromDPB(pic_param.ReferenceFrames,
883 arraysize(pic_param.ReferenceFrames)); 296 arraysize(pic_param.ReferenceFrames));
884 297
885 pic_param.num_ref_frames = sps->max_num_ref_frames; 298 pic_param.num_ref_frames = sps->max_num_ref_frames;
886 299
887 // Allocate a buffer in driver for this parameter buffer and upload data. 300 return vaapi_wrapper_->SubmitBuffer(VAPictureParameterBufferType,
888 VABufferID pic_param_buf_id; 301 sizeof(VAPictureParameterBufferH264),
889 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, 302 &pic_param);
890 VAPictureParameterBufferType,
891 sizeof(VAPictureParameterBufferH264),
892 1, &pic_param, &pic_param_buf_id);
893 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for PPS", false);
894
895 // Queue its VA buffer ID to be committed on HW decode run.
896 pending_va_bufs_.push(pic_param_buf_id);
897
898 return true;
899 } 303 }
900 304
901 // Fill a VAIQMatrixBufferH264 to be later sent to the HW decoder.
902 bool VaapiH264Decoder::SendIQMatrix() { 305 bool VaapiH264Decoder::SendIQMatrix() {
903 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); 306 const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
904 DCHECK(pps); 307 DCHECK(pps);
905 308
906 VAIQMatrixBufferH264 iq_matrix_buf; 309 VAIQMatrixBufferH264 iq_matrix_buf;
907 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264)); 310 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264));
908 311
909 if (pps->pic_scaling_matrix_present_flag) { 312 if (pps->pic_scaling_matrix_present_flag) {
910 for (int i = 0; i < 6; ++i) { 313 for (int i = 0; i < 6; ++i) {
911 for (int j = 0; j < 16; ++j) 314 for (int j = 0; j < 16; ++j)
(...skipping 11 matching lines...) Expand all
923 for (int j = 0; j < 16; ++j) 326 for (int j = 0; j < 16; ++j)
924 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j]; 327 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j];
925 } 328 }
926 329
927 for (int i = 0; i < 2; ++i) { 330 for (int i = 0; i < 2; ++i) {
928 for (int j = 0; j < 64; ++j) 331 for (int j = 0; j < 64; ++j)
929 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j]; 332 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j];
930 } 333 }
931 } 334 }
932 335
933 // Allocate a buffer in driver for this parameter buffer and upload data. 336 return vaapi_wrapper_->SubmitBuffer(VAIQMatrixBufferType,
934 VABufferID iq_matrix_buf_id; 337 sizeof(VAIQMatrixBufferH264),
935 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, 338 &iq_matrix_buf);
936 VAIQMatrixBufferType,
937 sizeof(VAIQMatrixBufferH264), 1,
938 &iq_matrix_buf, &iq_matrix_buf_id);
939 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for IQMatrix",
940 false);
941
942 // Queue its VA buffer ID to be committed on HW decode run.
943 pending_va_bufs_.push(iq_matrix_buf_id);
944
945 return true;
946 } 339 }
947 340
948 bool VaapiH264Decoder::SendVASliceParam(H264SliceHeader* slice_hdr) { 341 bool VaapiH264Decoder::SendVASliceParam(H264SliceHeader* slice_hdr) {
949 const H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); 342 const H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id);
950 DCHECK(pps); 343 DCHECK(pps);
951 344
952 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); 345 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id);
953 DCHECK(sps); 346 DCHECK(sps);
954 347
955 VASliceParameterBufferH264 slice_param; 348 VASliceParameterBufferH264 slice_param;
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 418
1026 int i; 419 int i;
1027 H264Picture::PtrVector::iterator it; 420 H264Picture::PtrVector::iterator it;
1028 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it; 421 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it;
1029 ++it, ++i) 422 ++it, ++i)
1030 FillVAPicture(&slice_param.RefPicList0[i], *it); 423 FillVAPicture(&slice_param.RefPicList0[i], *it);
1031 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it; 424 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it;
1032 ++it, ++i) 425 ++it, ++i)
1033 FillVAPicture(&slice_param.RefPicList1[i], *it); 426 FillVAPicture(&slice_param.RefPicList1[i], *it);
1034 427
1035 // Allocate a buffer in driver for this parameter buffer and upload data. 428 return vaapi_wrapper_->SubmitBuffer(VASliceParameterBufferType,
1036 VABufferID slice_param_buf_id; 429 sizeof(VASliceParameterBufferH264),
1037 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, 430 &slice_param);
1038 VASliceParameterBufferType,
1039 sizeof(VASliceParameterBufferH264),
1040 1, &slice_param, &slice_param_buf_id);
1041 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice param",
1042 false);
1043
1044 // Queue its VA buffer ID to be committed on HW decode run.
1045 pending_slice_bufs_.push(slice_param_buf_id);
1046
1047 return true;
1048 } 431 }
1049 432
1050 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) { 433 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) {
1051 // Can't help it, blame libva... 434 // Can't help it, blame libva...
1052 void* non_const_ptr = const_cast<uint8*>(ptr); 435 void* non_const_ptr = const_cast<uint8*>(ptr);
1053 436 return vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType, size,
1054 VABufferID slice_data_buf_id; 437 non_const_ptr);
1055 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_,
1056 VASliceDataBufferType, size, 1,
1057 non_const_ptr, &slice_data_buf_id);
1058 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice data",
1059 false);
1060
1061 pending_slice_bufs_.push(slice_data_buf_id);
1062 return true;
1063 } 438 }
1064 439
1065 bool VaapiH264Decoder::QueueSlice(H264SliceHeader* slice_hdr) { 440 bool VaapiH264Decoder::QueueSlice(H264SliceHeader* slice_hdr) {
1066 DCHECK(curr_pic_.get()); 441 DCHECK(curr_pic_.get());
1067 442
1068 if (!SendVASliceParam(slice_hdr)) 443 if (!SendVASliceParam(slice_hdr))
1069 return false; 444 return false;
1070 445
1071 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size)) 446 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size))
1072 return false; 447 return false;
1073 448
1074 return true; 449 return true;
1075 } 450 }
1076 451
452 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever
453 // possible.
1077 bool VaapiH264Decoder::DecodePicture() { 454 bool VaapiH264Decoder::DecodePicture() {
1078 DCHECK(!frame_ready_at_hw_);
1079 DCHECK(curr_pic_.get()); 455 DCHECK(curr_pic_.get());
1080 456
1081 // Find the surface associated with the picture to be decoded. 457 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt;
1082 DecodeSurface* dec_surface = 458 DecodeSurface* dec_surface = DecodeSurfaceByPoC(curr_pic_->pic_order_cnt);
1083 poc_to_decode_surfaces_[curr_pic_->pic_order_cnt]; 459 if (!dec_surface) {
1084 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt 460 DVLOG(1) << "Asked to decode an invalid POC " << curr_pic_->pic_order_cnt;
1085 << " into surface " << dec_surface->va_surface_id(); 461 return false;
462 }
1086 463
1087 DVLOG(4) << "Pending VA bufs to commit: " << pending_va_bufs_.size(); 464 if (!vaapi_wrapper_->DecodeAndDestroyPendingBuffers(
1088 DVLOG(4) << "Pending slice bufs to commit: " << pending_slice_bufs_.size(); 465 dec_surface->va_surface()->id())) {
1089 466 DVLOG(1) << "Failed decoding picture";
1090 DCHECK(pending_slice_bufs_.size()); 467 return false;
1091 scoped_ptr<std::queue<VABufferID> > va_bufs(new std::queue<VABufferID>()); 468 }
1092 std::swap(*va_bufs, pending_va_bufs_);
1093 scoped_ptr<std::queue<VABufferID> > slice_bufs(new std::queue<VABufferID>());
1094 std::swap(*slice_bufs, pending_slice_bufs_);
1095
1096 // Fire up a parallel job on the GPU on the ChildThread to decode and put
1097 // the decoded/converted/scaled picture into the pixmap.
1098 // Callee will take care of freeing the buffer queues.
1099 submit_decode_cb_.Run(
1100 dec_surface->picture_buffer_id(), va_bufs.Pass(), slice_bufs.Pass());
1101
1102 // Used to notify clients that we had sufficient data to start decoding
1103 // a new frame.
1104 frame_ready_at_hw_ = true;
1105 469
1106 return true; 470 return true;
1107 } 471 }
1108 472
1109 void VaapiH264Decoder::DestroyBuffers(size_t num_va_buffers,
1110 const VABufferID* va_buffers) {
1111 for (size_t i = 0; i < num_va_buffers; ++i) {
1112 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, va_buffers[i]);
1113 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
1114 }
1115 }
1116 473
1117 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever
1118 // possible.
1119 bool VaapiH264Decoder::SubmitDecode(
1120 int32 picture_buffer_id,
1121 scoped_ptr<std::queue<VABufferID> > va_bufs,
1122 scoped_ptr<std::queue<VABufferID> > slice_bufs) {
1123
1124 static const size_t kMaxVABuffers = 32;
1125 DCHECK_LE(va_bufs->size(), kMaxVABuffers);
1126 DCHECK_LE(slice_bufs->size(), kMaxVABuffers);
1127
1128 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id);
1129 if (it == decode_surfaces_.end()) {
1130 DVLOG(1) << "Asked to put an invalid buffer";
1131 return false;
1132 }
1133
1134 // Get ready to decode into surface.
1135 VAStatus va_res = VAAPI_BeginPicture(va_display_, va_context_id_,
1136 it->second->va_surface_id());
1137 VA_SUCCESS_OR_RETURN(va_res, "vaBeginPicture failed", false);
1138
1139 // Put buffer IDs for pending parameter buffers into va_buffers[].
1140 VABufferID va_buffers[kMaxVABuffers];
1141 size_t num_va_buffers = va_bufs->size();
1142 for (size_t i = 0; i < num_va_buffers && i < kMaxVABuffers; ++i) {
1143 va_buffers[i] = va_bufs->front();
1144 va_bufs->pop();
1145 }
1146 base::Closure va_buffers_callback =
1147 base::Bind(&VaapiH264Decoder::DestroyBuffers, base::Unretained(this),
1148 num_va_buffers, va_buffers);
1149 base::ScopedClosureRunner va_buffers_deleter(va_buffers_callback);
1150
1151 // And send them to the HW decoder.
1152 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, va_buffers,
1153 num_va_buffers);
1154 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for va_bufs failed", false);
1155
1156 DVLOG(4) << "Committed " << num_va_buffers << "VA buffers";
1157
1158 // Put buffer IDs for pending slice data buffers into slice_buffers[].
1159 VABufferID slice_buffers[kMaxVABuffers];
1160 size_t num_slice_buffers = slice_bufs->size();
1161 for (size_t i = 0; i < num_slice_buffers && i < kMaxVABuffers; ++i) {
1162 slice_buffers[i] = slice_bufs->front();
1163 slice_bufs->pop();
1164 }
1165 base::Closure va_slices_callback =
1166 base::Bind(&VaapiH264Decoder::DestroyBuffers, base::Unretained(this),
1167 num_slice_buffers, slice_buffers);
1168 base::ScopedClosureRunner slice_buffers_deleter(va_slices_callback);
1169
1170 // And send them to the Hw decoder.
1171 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, slice_buffers,
1172 num_slice_buffers);
1173 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for slices failed", false);
1174
1175 DVLOG(4) << "Committed " << num_slice_buffers << "slice buffers";
1176
1177 // Instruct HW decoder to start processing committed buffers (decode this
1178 // picture). This does not block until the end of decode.
1179 va_res = VAAPI_EndPicture(va_display_, va_context_id_);
1180 VA_SUCCESS_OR_RETURN(va_res, "vaEndPicture failed", false);
1181
1182 DVLOG(3) << "Will output from VASurface " << it->second->va_surface_id()
1183 << " to texture id " << it->second->texture_id();
1184
1185 return it->second->Sync();
1186 }
1187
1188
1189 bool VaapiH264Decoder::InitCurrPicture(H264SliceHeader* slice_hdr) { 474 bool VaapiH264Decoder::InitCurrPicture(H264SliceHeader* slice_hdr) {
1190 DCHECK(curr_pic_.get()); 475 DCHECK(curr_pic_.get());
1191 476
1192 memset(curr_pic_.get(), 0, sizeof(H264Picture)); 477 memset(curr_pic_.get(), 0, sizeof(H264Picture));
1193 478
1194 curr_pic_->idr = slice_hdr->idr_pic_flag; 479 curr_pic_->idr = slice_hdr->idr_pic_flag;
1195 480
1196 if (slice_hdr->field_pic_flag) { 481 if (slice_hdr->field_pic_flag) {
1197 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM 482 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM
1198 : H264Picture::FIELD_TOP; 483 : H264Picture::FIELD_TOP;
1199 } else { 484 } else {
1200 curr_pic_->field = H264Picture::FIELD_NONE; 485 curr_pic_->field = H264Picture::FIELD_NONE;
1201 } 486 }
1202 487
1203 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; 488 curr_pic_->ref = slice_hdr->nal_ref_idc != 0;
1204 // This assumes non-interlaced stream. 489 // This assumes non-interlaced stream.
1205 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; 490 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num;
1206 491
1207 if (!CalculatePicOrderCounts(slice_hdr)) 492 if (!CalculatePicOrderCounts(slice_hdr))
1208 return false; 493 return false;
1209 494
1210 // Try to get an empty surface to decode this picture to. 495 // Try to get an empty surface to decode this picture to.
1211 if (!AssignSurfaceToPoC(curr_pic_->pic_order_cnt)) { 496 if (!AssignSurfaceToPoC(curr_input_id_, curr_pic_->pic_order_cnt)) {
1212 DVLOG(1) << "Failed getting a free surface for a picture"; 497 DVLOG(1) << "Failed getting a free surface for a picture";
1213 return false; 498 return false;
1214 } 499 }
1215 500
1216 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; 501 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag;
1217 curr_pic_->adaptive_ref_pic_marking_mode_flag = 502 curr_pic_->adaptive_ref_pic_marking_mode_flag =
1218 slice_hdr->adaptive_ref_pic_marking_mode_flag; 503 slice_hdr->adaptive_ref_pic_marking_mode_flag;
1219 504
1220 // If the slice header indicates we will have to perform reference marking 505 // If the slice header indicates we will have to perform reference marking
1221 // process after this picture is decoded, store required data for that 506 // process after this picture is decoded, store required data for that
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after
1703 // temporarily made one element longer than the required final list. 988 // temporarily made one element longer than the required final list.
1704 // Resize the list back to its required size. 989 // Resize the list back to its required size.
1705 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); 990 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1);
1706 991
1707 return true; 992 return true;
1708 } 993 }
1709 994
1710 bool VaapiH264Decoder::OutputPic(H264Picture* pic) { 995 bool VaapiH264Decoder::OutputPic(H264Picture* pic) {
1711 DCHECK(!pic->outputted); 996 DCHECK(!pic->outputted);
1712 pic->outputted = true; 997 pic->outputted = true;
1713 POCToDecodeSurfaces::iterator iter = poc_to_decode_surfaces_.find( 998 last_output_poc_ = pic->pic_order_cnt;
1714 pic->pic_order_cnt); 999
1715 if (iter == poc_to_decode_surfaces_.end()) 1000 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt);
1001 if (!dec_surface)
1716 return false; 1002 return false;
1717 DecodeSurface* dec_surface = iter->second;
1718 1003
1719 dec_surface->set_at_client(true); 1004 DCHECK_GE(dec_surface->input_id(), 0);
1720 last_output_poc_ = pic->pic_order_cnt;
1721 // Notify the client that a picture can be output.
1722 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt 1005 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt
1723 << " input_id: " << dec_surface->input_id() 1006 << " input_id: " << dec_surface->input_id();
1724 << "output_id: " << dec_surface->picture_buffer_id(); 1007 output_pic_cb_.Run(dec_surface->input_id(), dec_surface->va_surface());
1725 output_pic_cb_.Run(dec_surface->input_id(),
1726 dec_surface->picture_buffer_id());
1727 1008
1728 return true; 1009 return true;
1729 } 1010 }
1730 1011
1731 bool VaapiH264Decoder::Flush() { 1012 void VaapiH264Decoder::ClearDPB() {
1013 // Clear DPB contents, marking the pictures as unused first.
1014 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it)
1015 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
1016
1017 dpb_.Clear();
1018 last_output_poc_ = 0;
1019 }
1020
1021 bool VaapiH264Decoder::OutputAllRemainingPics() {
1732 // Output all pictures that are waiting to be outputted. 1022 // Output all pictures that are waiting to be outputted.
1733 FinishPrevFrameIfPresent(); 1023 FinishPrevFrameIfPresent();
1734 H264Picture::PtrVector to_output; 1024 H264Picture::PtrVector to_output;
1735 dpb_.GetNotOutputtedPicsAppending(to_output); 1025 dpb_.GetNotOutputtedPicsAppending(to_output);
1736 // Sort them by ascending POC to output in order. 1026 // Sort them by ascending POC to output in order.
1737 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); 1027 std::sort(to_output.begin(), to_output.end(), POCAscCompare());
1738 1028
1739 H264Picture::PtrVector::iterator it; 1029 H264Picture::PtrVector::iterator it;
1740 for (it = to_output.begin(); it != to_output.end(); ++it) { 1030 for (it = to_output.begin(); it != to_output.end(); ++it) {
1741 if (!OutputPic(*it)) { 1031 if (!OutputPic(*it)) {
1742 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt; 1032 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt;
1743 return false; 1033 return false;
1744 } 1034 }
1745 } 1035 }
1746 1036
1747 // And clear DPB contents, marking the pictures as unused first.
1748 // The surfaces will be released after they have been displayed and returned.
1749 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) {
1750 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
1751 }
1752 dpb_.Clear();
1753 last_output_poc_ = 0;
1754
1755 return true; 1037 return true;
1756 } 1038 }
1757 1039
1040 bool VaapiH264Decoder::Flush() {
1041 if (!OutputAllRemainingPics())
1042 return false;
1043
1044 ClearDPB();
1045
1046 DCHECK(decode_surfaces_in_use_.empty());
1047 return true;
1048 }
1049
1758 bool VaapiH264Decoder::StartNewFrame(H264SliceHeader* slice_hdr) { 1050 bool VaapiH264Decoder::StartNewFrame(H264SliceHeader* slice_hdr) {
1759 // TODO posciak: add handling of max_num_ref_frames per spec. 1051 // TODO posciak: add handling of max_num_ref_frames per spec.
1760 1052
1761 // If the new frame is an IDR, output what's left to output and clear DPB 1053 // If the new frame is an IDR, output what's left to output and clear DPB
1762 if (slice_hdr->idr_pic_flag) { 1054 if (slice_hdr->idr_pic_flag) {
1763 // (unless we are explicitly instructed not to do so). 1055 // (unless we are explicitly instructed not to do so).
1764 if (!slice_hdr->no_output_of_prior_pics_flag) { 1056 if (!slice_hdr->no_output_of_prior_pics_flag) {
1765 // Output DPB contents. 1057 // Output DPB contents.
1766 if (!Flush()) 1058 if (!Flush())
1767 return false; 1059 return false;
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
1958 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic(); 1250 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic();
1959 if (to_unmark == NULL) { 1251 if (to_unmark == NULL) {
1960 DVLOG(1) << "Couldn't find a short ref picture to unmark"; 1252 DVLOG(1) << "Couldn't find a short ref picture to unmark";
1961 return; 1253 return;
1962 } 1254 }
1963 to_unmark->ref = false; 1255 to_unmark->ref = false;
1964 } 1256 }
1965 } else { 1257 } else {
1966 // Shouldn't get here. 1258 // Shouldn't get here.
1967 DVLOG(1) << "Interlaced video not supported."; 1259 DVLOG(1) << "Interlaced video not supported.";
1968 ReportToUMA(INTERLACED_STREAM); 1260 report_error_to_uma_cb_.Run(INTERLACED_STREAM);
1969 } 1261 }
1970 } else { 1262 } else {
1971 // Stream has instructions how to discard pictures from DPB and how 1263 // Stream has instructions how to discard pictures from DPB and how
1972 // to mark/unmark existing reference pictures. Do it. 1264 // to mark/unmark existing reference pictures. Do it.
1973 // Spec 8.2.5.4. 1265 // Spec 8.2.5.4.
1974 if (curr_pic_->field == H264Picture::FIELD_NONE) { 1266 if (curr_pic_->field == H264Picture::FIELD_NONE) {
1975 HandleMemoryManagementOps(); 1267 HandleMemoryManagementOps();
1976 } else { 1268 } else {
1977 // Shouldn't get here. 1269 // Shouldn't get here.
1978 DVLOG(1) << "Interlaced video not supported."; 1270 DVLOG(1) << "Interlaced video not supported.";
1979 ReportToUMA(INTERLACED_STREAM); 1271 report_error_to_uma_cb_.Run(INTERLACED_STREAM);
1980 } 1272 }
1981 } 1273 }
1982 } 1274 }
1983 } 1275 }
1984 1276
1985 bool VaapiH264Decoder::FinishPicture() { 1277 bool VaapiH264Decoder::FinishPicture() {
1986 DCHECK(curr_pic_.get()); 1278 DCHECK(curr_pic_.get());
1987 1279
1988 // Finish processing previous picture. 1280 // Finish processing previous picture.
1989 // Start by storing previous reference picture data for later use, 1281 // Start by storing previous reference picture data for later use,
1990 // if picture being finished is a reference picture. 1282 // if picture being finished is a reference picture.
1991 if (curr_pic_->ref) { 1283 if (curr_pic_->ref) {
1992 ReferencePictureMarking(); 1284 ReferencePictureMarking();
1993 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; 1285 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5;
1994 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; 1286 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt;
1995 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; 1287 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb;
1996 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; 1288 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb;
1997 prev_ref_field_ = curr_pic_->field; 1289 prev_ref_field_ = curr_pic_->field;
1998 } 1290 }
1999 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; 1291 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5;
2000 prev_frame_num_offset_ = curr_pic_->frame_num_offset; 1292 prev_frame_num_offset_ = curr_pic_->frame_num_offset;
2001 1293
2002 // Remove unused (for reference or later output) pictures from DPB, marking 1294 // Remove unused (for reference or later output) pictures from DPB, marking
2003 // them as such. 1295 // them as such.
2004 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { 1296 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) {
2005 if ((*it)->outputted && !(*it)->ref) 1297 if ((*it)->outputted && !(*it)->ref)
2006 UnassignSurfaceFromPoC((*it)->pic_order_cnt); 1298 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
2007 } 1299 }
2008 dpb_.RemoveUnused(); 1300 dpb_.DeleteUnused();
2009 1301
2010 DVLOG(4) << "Finishing picture, DPB entries: " << dpb_.size() 1302 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size();
2011 << " Num available dec surfaces: "
2012 << num_available_decode_surfaces_;
2013 1303
2014 // Whatever happens below, curr_pic_ will stop managing the pointer to the 1304 // Whatever happens below, curr_pic_ will stop managing the pointer to the
2015 // picture after this function returns. The ownership will either be 1305 // picture after this function returns. The ownership will either be
2016 // transferred to DPB, if the image is still needed (for output and/or 1306 // transferred to DPB, if the image is still needed (for output and/or
2017 // reference), or the memory will be released if we manage to output it here 1307 // reference), or the memory will be released if we manage to output it here
2018 // without having to store it for future reference. 1308 // without having to store it for future reference.
2019 scoped_ptr<H264Picture> pic(curr_pic_.release()); 1309 scoped_ptr<H264Picture> pic(curr_pic_.release());
2020 1310
2021 // Get all pictures that haven't been outputted yet. 1311 // Get all pictures that haven't been outputted yet.
2022 H264Picture::PtrVector not_outputted; 1312 H264Picture::PtrVector not_outputted;
(...skipping 16 matching lines...) Expand all
2039 (*output_candidate)->pic_order_cnt <= last_output_poc_ + 2; 1329 (*output_candidate)->pic_order_cnt <= last_output_poc_ + 2;
2040 ++output_candidate) { 1330 ++output_candidate) {
2041 DCHECK_GE((*output_candidate)->pic_order_cnt, last_output_poc_); 1331 DCHECK_GE((*output_candidate)->pic_order_cnt, last_output_poc_);
2042 if (!OutputPic(*output_candidate)) 1332 if (!OutputPic(*output_candidate))
2043 return false; 1333 return false;
2044 1334
2045 if (!(*output_candidate)->ref) { 1335 if (!(*output_candidate)->ref) {
2046 // Current picture hasn't been inserted into DPB yet, so don't remove it 1336 // Current picture hasn't been inserted into DPB yet, so don't remove it
2047 // if we managed to output it immediately. 1337 // if we managed to output it immediately.
2048 if (*output_candidate != pic) 1338 if (*output_candidate != pic)
2049 dpb_.RemoveByPOC((*output_candidate)->pic_order_cnt); 1339 dpb_.DeleteByPOC((*output_candidate)->pic_order_cnt);
2050 // Mark as unused. 1340 // Mark as unused.
2051 UnassignSurfaceFromPoC((*output_candidate)->pic_order_cnt); 1341 UnassignSurfaceFromPoC((*output_candidate)->pic_order_cnt);
2052 } 1342 }
2053 } 1343 }
2054 1344
2055 // If we haven't managed to output the picture that we just decoded, or if 1345 // If we haven't managed to output the picture that we just decoded, or if
2056 // it's a reference picture, we have to store it in DPB. 1346 // it's a reference picture, we have to store it in DPB.
2057 if (!pic->outputted || pic->ref) { 1347 if (!pic->outputted || pic->ref) {
2058 if (dpb_.IsFull()) { 1348 if (dpb_.IsFull()) {
2059 // If we haven't managed to output anything to free up space in DPB 1349 // If we haven't managed to output anything to free up space in DPB
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
2092 return 0; 1382 return 0;
2093 } 1383 }
2094 } 1384 }
2095 1385
2096 bool VaapiH264Decoder::ProcessSPS(int sps_id) { 1386 bool VaapiH264Decoder::ProcessSPS(int sps_id) {
2097 const H264SPS* sps = parser_.GetSPS(sps_id); 1387 const H264SPS* sps = parser_.GetSPS(sps_id);
2098 DCHECK(sps); 1388 DCHECK(sps);
2099 1389
2100 if (sps->frame_mbs_only_flag == 0) { 1390 if (sps->frame_mbs_only_flag == 0) {
2101 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; 1391 DVLOG(1) << "frame_mbs_only_flag != 1 not supported";
2102 ReportToUMA(FRAME_MBS_ONLY_FLAG_NOT_ONE); 1392 report_error_to_uma_cb_.Run(FRAME_MBS_ONLY_FLAG_NOT_ONE);
2103 return false; 1393 return false;
2104 } 1394 }
2105 1395
2106 if (sps->gaps_in_frame_num_value_allowed_flag) { 1396 if (sps->gaps_in_frame_num_value_allowed_flag) {
2107 DVLOG(1) << "Gaps in frame numbers not supported"; 1397 DVLOG(1) << "Gaps in frame numbers not supported";
2108 ReportToUMA(GAPS_IN_FRAME_NUM); 1398 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM);
2109 return false; 1399 return false;
2110 } 1400 }
2111 1401
2112 curr_sps_id_ = sps->seq_parameter_set_id; 1402 curr_sps_id_ = sps->seq_parameter_set_id;
2113 1403
2114 // Calculate picture height/width in macroblocks and pixels 1404 // Calculate picture height/width in macroblocks and pixels
2115 // (spec 7.4.2.1.1, 7.4.3). 1405 // (spec 7.4.2.1.1, 7.4.3).
2116 int width_mb = sps->pic_width_in_mbs_minus1 + 1; 1406 int width_mb = sps->pic_width_in_mbs_minus1 + 1;
2117 int height_mb = (2 - sps->frame_mbs_only_flag) * 1407 int height_mb = (2 - sps->frame_mbs_only_flag) *
2118 (sps->pic_height_in_map_units_minus1 + 1); 1408 (sps->pic_height_in_map_units_minus1 + 1);
2119 1409
2120 int width = 16 * width_mb; 1410 int width = 16 * width_mb;
2121 int height = 16 * height_mb; 1411 int height = 16 * height_mb;
2122 1412
2123 DVLOG(1) << "New picture size: " << width << "x" << height; 1413 DVLOG(1) << "New picture size: " << width << "x" << height;
2124 if (width == 0 || height == 0) { 1414 if (width == 0 || height == 0) {
2125 DVLOG(1) << "Invalid picture size!"; 1415 DVLOG(1) << "Invalid picture size!";
2126 return false; 1416 return false;
2127 } 1417 }
2128 1418
2129 if ((pic_width_ != -1 || pic_height_ != -1) && 1419 if (!pic_size_.IsEmpty() &&
2130 (width != pic_width_ || height != pic_height_)) { 1420 (width != pic_size_.width() || height != pic_size_.height())) {
2131 DVLOG(1) << "Picture size changed mid-stream"; 1421 DVLOG(1) << "Picture size changed mid-stream";
2132 ReportToUMA(MID_STREAM_RESOLUTION_CHANGE); 1422 report_error_to_uma_cb_.Run(MID_STREAM_RESOLUTION_CHANGE);
2133 return false; 1423 return false;
2134 } 1424 }
2135 1425
2136 pic_width_ = width; 1426 pic_size_.SetSize(width, height);
2137 pic_height_ = height;
2138 1427
2139 max_pic_order_cnt_lsb_ = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4); 1428 max_pic_order_cnt_lsb_ = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
2140 max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4); 1429 max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4);
2141 1430
2142 int level = sps->level_idc; 1431 int level = sps->level_idc;
2143 int max_dpb_mbs = LevelToMaxDpbMbs(level); 1432 int max_dpb_mbs = LevelToMaxDpbMbs(level);
2144 if (max_dpb_mbs == 0) 1433 if (max_dpb_mbs == 0)
2145 return false; 1434 return false;
2146 1435
2147 size_t max_dpb_size = std::min(max_dpb_mbs / (width_mb * height_mb), 1436 size_t max_dpb_size = std::min(max_dpb_mbs / (width_mb * height_mb),
(...skipping 24 matching lines...) Expand all
2172 1461
2173 return true; 1462 return true;
2174 } 1463 }
2175 1464
2176 bool VaapiH264Decoder::ProcessSlice(H264SliceHeader* slice_hdr) { 1465 bool VaapiH264Decoder::ProcessSlice(H264SliceHeader* slice_hdr) {
2177 prev_frame_num_ = frame_num_; 1466 prev_frame_num_ = frame_num_;
2178 frame_num_ = slice_hdr->frame_num; 1467 frame_num_ = slice_hdr->frame_num;
2179 1468
2180 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { 1469 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) {
2181 DVLOG(1) << "Gap in frame_num!"; 1470 DVLOG(1) << "Gap in frame_num!";
2182 ReportToUMA(GAPS_IN_FRAME_NUM); 1471 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM);
2183 return false; 1472 return false;
2184 } 1473 }
2185 1474
2186 if (slice_hdr->field_pic_flag == 0) 1475 if (slice_hdr->field_pic_flag == 0)
2187 max_pic_num_ = max_frame_num_; 1476 max_pic_num_ = max_frame_num_;
2188 else 1477 else
2189 max_pic_num_ = 2 * max_frame_num_; 1478 max_pic_num_ = 2 * max_frame_num_;
2190 1479
2191 // TODO posciak: switch to new picture detection per 7.4.1.2.4. 1480 // TODO posciak: switch to new picture detection per 7.4.1.2.4.
2192 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { 1481 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) {
(...skipping 17 matching lines...) Expand all
2210 state_ = kError; \ 1499 state_ = kError; \
2211 return VaapiH264Decoder::kDecodeError; \ 1500 return VaapiH264Decoder::kDecodeError; \
2212 } while (0) 1501 } while (0)
2213 1502
2214 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeInitial(int32 input_id) { 1503 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeInitial(int32 input_id) {
2215 // Decode enough to get required picture size (i.e. until we find an SPS), 1504 // Decode enough to get required picture size (i.e. until we find an SPS),
2216 // if we get any slice data, we are missing the beginning of the stream. 1505 // if we get any slice data, we are missing the beginning of the stream.
2217 H264NALU nalu; 1506 H264NALU nalu;
2218 H264Parser::Result res; 1507 H264Parser::Result res;
2219 1508
2220 DCHECK_NE(state_, kUninitialized); 1509 if (state_ == kDecoding)
1510 return kReadyToDecode;
2221 1511
2222 curr_input_id_ = input_id; 1512 curr_input_id_ = input_id;
2223 1513
2224 while (1) { 1514 while (1) {
2225 if (state_ == kAfterReset && num_available_decode_surfaces_ == 0) { 1515 // If we've already decoded some of the stream (after reset), we may be able
1516 // to go into decoding state not only starting at/resuming from an SPS, but
1517 // also from other resume points, such as IDRs. In such a case we need an
1518 // output surface in case we end up decoding a frame. Otherwise we just look
1519 // for an SPS and don't need any outputs.
1520 if (curr_sps_id_ != -1 && available_va_surfaces_.empty()) {
2226 DVLOG(4) << "No output surfaces available"; 1521 DVLOG(4) << "No output surfaces available";
2227 return kNoOutputAvailable; 1522 return kNoOutputAvailable;
2228 } 1523 }
2229 1524
2230 // Get next NALU looking for SPS or IDR if after reset. 1525 // Get next NALU looking for SPS or IDR if after reset.
2231 res = parser_.AdvanceToNextNALU(&nalu); 1526 res = parser_.AdvanceToNextNALU(&nalu);
2232 if (res == H264Parser::kEOStream) { 1527 if (res == H264Parser::kEOStream) {
2233 DVLOG(1) << "Could not find SPS before EOS"; 1528 DVLOG(1) << "Could not find SPS before EOS";
2234 return kNeedMoreStreamData; 1529 return kNeedMoreStreamData;
2235 } else if (res != H264Parser::kOk) { 1530 } else if (res != H264Parser::kOk) {
2236 SET_ERROR_AND_RETURN(); 1531 SET_ERROR_AND_RETURN();
2237 } 1532 }
2238 1533
2239 DVLOG(4) << " NALU found: " << static_cast<int>(nalu.nal_unit_type); 1534 DVLOG(4) << " NALU found: " << static_cast<int>(nalu.nal_unit_type);
2240 1535
2241 switch (nalu.nal_unit_type) { 1536 switch (nalu.nal_unit_type) {
2242 case H264NALU::kSPS: 1537 case H264NALU::kSPS:
2243 res = parser_.ParseSPS(&curr_sps_id_); 1538 res = parser_.ParseSPS(&curr_sps_id_);
2244 if (res != H264Parser::kOk) 1539 if (res != H264Parser::kOk)
2245 SET_ERROR_AND_RETURN(); 1540 SET_ERROR_AND_RETURN();
2246 1541
2247 if (!ProcessSPS(curr_sps_id_)) 1542 if (!ProcessSPS(curr_sps_id_))
2248 SET_ERROR_AND_RETURN(); 1543 SET_ERROR_AND_RETURN();
2249 1544
2250 // Just got information about the video size from SPS, so we can
2251 // now allocate surfaces and let the client now we are ready to
2252 // accept output buffers and decode.
2253 if (!CreateVASurfaces())
2254 SET_ERROR_AND_RETURN();
2255
2256 state_ = kDecoding; 1545 state_ = kDecoding;
2257 return kReadyToDecode; 1546 return kReadyToDecode;
2258 1547
2259 case H264NALU::kIDRSlice: 1548 case H264NALU::kIDRSlice:
2260 // If after reset, should be able to recover from an IDR. 1549 // If after reset, should be able to recover from an IDR.
2261 if (state_ == kAfterReset) { 1550 // TODO(posciak): the IDR may require an SPS that we don't have
1551 // available. For now we'd fail if that happens, but ideally we'd like
1552 // to keep going until the next SPS in the stream.
1553 if (curr_sps_id_ != -1) {
2262 H264SliceHeader slice_hdr; 1554 H264SliceHeader slice_hdr;
2263 1555
2264 res = parser_.ParseSliceHeader(nalu, &slice_hdr); 1556 res = parser_.ParseSliceHeader(nalu, &slice_hdr);
2265 if (res != H264Parser::kOk) 1557 if (res != H264Parser::kOk)
2266 SET_ERROR_AND_RETURN(); 1558 SET_ERROR_AND_RETURN();
2267 1559
2268 if (!ProcessSlice(&slice_hdr)) 1560 if (!ProcessSlice(&slice_hdr))
2269 SET_ERROR_AND_RETURN(); 1561 SET_ERROR_AND_RETURN();
2270 1562
2271 state_ = kDecoding; 1563 state_ = kDecoding;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2306 DVLOG(1) << "Decoder not ready: error in stream or not initialized"; 1598 DVLOG(1) << "Decoder not ready: error in stream or not initialized";
2307 return kDecodeError; 1599 return kDecodeError;
2308 } 1600 }
2309 1601
2310 // All of the actions below might result in decoding a picture from 1602 // All of the actions below might result in decoding a picture from
2311 // previously parsed data, but we still have to handle/parse current input 1603 // previously parsed data, but we still have to handle/parse current input
2312 // first. 1604 // first.
2313 // Note: this may drop some already decoded frames if there are errors 1605 // Note: this may drop some already decoded frames if there are errors
2314 // further in the stream, but we are OK with that. 1606 // further in the stream, but we are OK with that.
2315 while (1) { 1607 while (1) {
2316 if (num_available_decode_surfaces_ == 0) { 1608 if (available_va_surfaces_.empty()) {
2317 DVLOG(4) << "No output surfaces available"; 1609 DVLOG(4) << "No output surfaces available";
2318 return kNoOutputAvailable; 1610 return kNoOutputAvailable;
2319 } 1611 }
1612
2320 par_res = parser_.AdvanceToNextNALU(&nalu); 1613 par_res = parser_.AdvanceToNextNALU(&nalu);
2321 if (par_res == H264Parser::kEOStream) 1614 if (par_res == H264Parser::kEOStream)
2322 return kNeedMoreStreamData; 1615 return kNeedMoreStreamData;
2323 else if (par_res != H264Parser::kOk) 1616 else if (par_res != H264Parser::kOk)
2324 SET_ERROR_AND_RETURN(); 1617 SET_ERROR_AND_RETURN();
2325 1618
2326 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type); 1619 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type);
2327 1620
2328 switch (nalu.nal_unit_type) { 1621 switch (nalu.nal_unit_type) {
2329 case H264NALU::kNonIDRSlice: 1622 case H264NALU::kNonIDRSlice:
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2364 SET_ERROR_AND_RETURN(); 1657 SET_ERROR_AND_RETURN();
2365 1658
2366 if (!ProcessPPS(pps_id)) 1659 if (!ProcessPPS(pps_id))
2367 SET_ERROR_AND_RETURN(); 1660 SET_ERROR_AND_RETURN();
2368 break; 1661 break;
2369 1662
2370 default: 1663 default:
2371 // skip NALU 1664 // skip NALU
2372 break; 1665 break;
2373 } 1666 }
2374
2375 // If the last action resulted in decoding a frame, possibly from older
2376 // data, return. Otherwise keep reading the stream.
2377 if (frame_ready_at_hw_) {
2378 frame_ready_at_hw_ = false;
2379 return kDecodedFrame;
2380 }
2381 } 1667 }
2382 } 1668 }
2383 1669
2384 size_t VaapiH264Decoder::GetRequiredNumOfPictures() { 1670 size_t VaapiH264Decoder::GetRequiredNumOfPictures() {
2385 return dpb_.max_num_pics() + kPicsInPipeline; 1671 return dpb_.max_num_pics() + kPicsInPipeline;
2386 } 1672 }
2387 1673
2388 // static
2389 void VaapiH264Decoder::PreSandboxInitialization() {
2390 DCHECK(!pre_sandbox_init_done_);
2391 vaapi_handle = dlopen("libva.so", RTLD_NOW);
2392 vaapi_x11_handle = dlopen("libva-x11.so", RTLD_NOW);
2393 pre_sandbox_init_done_ = vaapi_handle && vaapi_x11_handle;
2394 }
2395
2396 // static
2397 bool VaapiH264Decoder::PostSandboxInitialization() {
2398 if (!pre_sandbox_init_done_)
2399 return false;
2400 #define VAAPI_DLSYM(name, handle) \
2401 VAAPI_##name = reinterpret_cast<Vaapi##name>(dlsym((handle), "va"#name)) \
2402
2403 VAAPI_DLSYM(GetDisplay, vaapi_x11_handle);
2404 VAAPI_DLSYM(DisplayIsValid, vaapi_handle);
2405 VAAPI_DLSYM(Initialize, vaapi_handle);
2406 VAAPI_DLSYM(Terminate, vaapi_handle);
2407 VAAPI_DLSYM(GetConfigAttributes, vaapi_handle);
2408 VAAPI_DLSYM(CreateConfig, vaapi_handle);
2409 VAAPI_DLSYM(DestroyConfig, vaapi_handle);
2410 VAAPI_DLSYM(CreateSurfaces, vaapi_handle);
2411 VAAPI_DLSYM(DestroySurfaces, vaapi_handle);
2412 VAAPI_DLSYM(CreateContext, vaapi_handle);
2413 VAAPI_DLSYM(DestroyContext, vaapi_handle);
2414 VAAPI_DLSYM(PutSurface, vaapi_x11_handle);
2415 VAAPI_DLSYM(SyncSurface, vaapi_x11_handle);
2416 VAAPI_DLSYM(BeginPicture, vaapi_handle);
2417 VAAPI_DLSYM(RenderPicture, vaapi_handle);
2418 VAAPI_DLSYM(EndPicture, vaapi_handle);
2419 VAAPI_DLSYM(CreateBuffer, vaapi_handle);
2420 VAAPI_DLSYM(DestroyBuffer, vaapi_handle);
2421 VAAPI_DLSYM(ErrorStr, vaapi_handle);
2422 #undef VAAPI_DLSYM
2423
2424 return VAAPI_GetDisplay &&
2425 VAAPI_DisplayIsValid &&
2426 VAAPI_Initialize &&
2427 VAAPI_Terminate &&
2428 VAAPI_GetConfigAttributes &&
2429 VAAPI_CreateConfig &&
2430 VAAPI_DestroyConfig &&
2431 VAAPI_CreateSurfaces &&
2432 VAAPI_DestroySurfaces &&
2433 VAAPI_CreateContext &&
2434 VAAPI_DestroyContext &&
2435 VAAPI_PutSurface &&
2436 VAAPI_SyncSurface &&
2437 VAAPI_BeginPicture &&
2438 VAAPI_RenderPicture &&
2439 VAAPI_EndPicture &&
2440 VAAPI_CreateBuffer &&
2441 VAAPI_DestroyBuffer &&
2442 VAAPI_ErrorStr;
2443 }
2444
2445 } // namespace content 1674 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/media/vaapi_h264_decoder.h ('k') | content/common/gpu/media/vaapi_video_decode_accelerator.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698