Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: content/common/gpu/media/vaapi_h264_decoder.cc

Issue 14914009: VAVDA: Redesign stage 1. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <dlfcn.h>
6
7 #include <algorithm> 5 #include <algorithm>
8 #include <limits> 6 #include <limits>
9 7
10 #include "base/bind.h" 8 #include "base/bind.h"
11 #include "base/bind_helpers.h" 9 #include "base/bind_helpers.h"
12 #include "base/metrics/histogram.h"
13 #include "base/stl_util.h" 10 #include "base/stl_util.h"
14 #include "content/common/gpu/media/vaapi_h264_decoder.h" 11 #include "content/common/gpu/media/vaapi_h264_decoder.h"
15 #include "third_party/libva/va/va.h"
16 #include "third_party/libva/va/va_x11.h"
17 #include "ui/gl/gl_bindings.h"
18 #include "ui/gl/scoped_binders.h"
19
20 namespace {
21
22 enum VAVDAH264DecoderFailure {
23 FRAME_MBS_ONLY_FLAG_NOT_ONE = 0,
24 GAPS_IN_FRAME_NUM = 1,
25 MID_STREAM_RESOLUTION_CHANGE = 2,
26 INTERLACED_STREAM = 3,
27 VAAPI_ERROR = 4,
28 VAVDA_H264_DECODER_FAILURES_MAX,
29 };
30
31 static void ReportToUMA(VAVDAH264DecoderFailure failure) {
32 UMA_HISTOGRAM_ENUMERATION("Media.VAVDAH264.DecoderFailure",
33 failure,
34 VAVDA_H264_DECODER_FAILURES_MAX);
35 }
36
37 } // namespace
38
39 #define LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg) \
40 do { \
41 DVLOG(1) << err_msg \
42 << " VA error: " << VAAPI_ErrorStr(va_res); \
43 ReportToUMA(VAAPI_ERROR); \
44 } while (0)
45
46 #define VA_LOG_ON_ERROR(va_res, err_msg) \
47 do { \
48 if ((va_res) != VA_STATUS_SUCCESS) { \
49 LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg); \
50 } \
51 } while (0)
52
53 #define VA_SUCCESS_OR_RETURN(va_res, err_msg, ret) \
54 do { \
55 if ((va_res) != VA_STATUS_SUCCESS) { \
56 LOG_VA_ERROR_AND_RECORD_UMA(va_res, err_msg); \
57 return (ret); \
58 } \
59 } while (0)
60 12
61 namespace content { 13 namespace content {
62 14
63 void *vaapi_handle = NULL; 15 // Decode surface, used for decoding and reference. input_id comes from client
64 void *vaapi_x11_handle = NULL; 16 // and is associated with the surface that was produced as the result
65 17 // of decoding a bitstream buffer with that id.
66 typedef VADisplay (*VaapiGetDisplay)(Display *dpy);
67 typedef int (*VaapiDisplayIsValid)(VADisplay dpy);
68 typedef VAStatus (*VaapiInitialize)(VADisplay dpy,
69 int *major_version,
70 int *minor_version);
71 typedef VAStatus (*VaapiTerminate)(VADisplay dpy);
72 typedef VAStatus (*VaapiGetConfigAttributes)(VADisplay dpy,
73 VAProfile profile,
74 VAEntrypoint entrypoint,
75 VAConfigAttrib *attrib_list,
76 int num_attribs);
77 typedef VAStatus (*VaapiCreateConfig)(VADisplay dpy,
78 VAProfile profile,
79 VAEntrypoint entrypoint,
80 VAConfigAttrib *attrib_list,
81 int num_attribs,
82 VAConfigID *config_id);
83 typedef VAStatus (*VaapiDestroyConfig)(VADisplay dpy, VAConfigID config_id);
84 typedef VAStatus (*VaapiCreateSurfaces)(VADisplay dpy,
85 int width,
86 int height,
87 int format,
88 int num_surfaces,
89 VASurfaceID *surfaces);
90 typedef VAStatus (*VaapiDestroySurfaces)(VADisplay dpy,
91 VASurfaceID *surfaces,
92 int num_surfaces);
93 typedef VAStatus (*VaapiCreateContext)(VADisplay dpy,
94 VAConfigID config_id,
95 int picture_width,
96 int picture_height,
97 int flag,
98 VASurfaceID *render_targets,
99 int num_render_targets,
100 VAContextID *context);
101 typedef VAStatus (*VaapiDestroyContext)(VADisplay dpy, VAContextID context);
102 typedef VAStatus (*VaapiPutSurface)(VADisplay dpy,
103 VASurfaceID surface,
104 Drawable draw,
105 short srcx,
106 short srcy,
107 unsigned short srcw,
108 unsigned short srch,
109 short destx,
110 short desty,
111 unsigned short destw,
112 unsigned short desth,
113 VARectangle *cliprects,
114 unsigned int number_cliprects,
115 unsigned int flags);
116 typedef VAStatus (*VaapiSyncSurface)(VADisplay dpy, VASurfaceID render_target);
117 typedef VAStatus (*VaapiBeginPicture)(VADisplay dpy,
118 VAContextID context,
119 VASurfaceID render_target);
120 typedef VAStatus (*VaapiRenderPicture)(VADisplay dpy,
121 VAContextID context,
122 VABufferID *buffers,
123 int num_buffers);
124 typedef VAStatus (*VaapiEndPicture)(VADisplay dpy, VAContextID context);
125 typedef VAStatus (*VaapiCreateBuffer)(VADisplay dpy,
126 VAContextID context,
127 VABufferType type,
128 unsigned int size,
129 unsigned int num_elements,
130 void *data,
131 VABufferID *buf_id);
132 typedef VAStatus (*VaapiDestroyBuffer)(VADisplay dpy, VABufferID buffer_id);
133 typedef const char* (*VaapiErrorStr)(VAStatus error_status);
134
135 #define VAAPI_SYM(name, handle) Vaapi##name VAAPI_##name = NULL
136
137 VAAPI_SYM(GetDisplay, vaapi_x11_handle);
138 VAAPI_SYM(DisplayIsValid, vaapi_handle);
139 VAAPI_SYM(Initialize, vaapi_handle);
140 VAAPI_SYM(Terminate, vaapi_handle);
141 VAAPI_SYM(GetConfigAttributes, vaapi_handle);
142 VAAPI_SYM(CreateConfig, vaapi_handle);
143 VAAPI_SYM(DestroyConfig, vaapi_handle);
144 VAAPI_SYM(CreateSurfaces, vaapi_handle);
145 VAAPI_SYM(DestroySurfaces, vaapi_handle);
146 VAAPI_SYM(CreateContext, vaapi_handle);
147 VAAPI_SYM(DestroyContext, vaapi_handle);
148 VAAPI_SYM(PutSurface, vaapi_x11_handle);
149 VAAPI_SYM(SyncSurface, vaapi_x11_handle);
150 VAAPI_SYM(BeginPicture, vaapi_handle);
151 VAAPI_SYM(RenderPicture, vaapi_handle);
152 VAAPI_SYM(EndPicture, vaapi_handle);
153 VAAPI_SYM(CreateBuffer, vaapi_handle);
154 VAAPI_SYM(DestroyBuffer, vaapi_handle);
155 VAAPI_SYM(ErrorStr, vaapi_handle);
156
157 // static
158 bool VaapiH264Decoder::pre_sandbox_init_done_ = false;
159
160 class VaapiH264Decoder::DecodeSurface { 18 class VaapiH264Decoder::DecodeSurface {
161 public: 19 public:
162 DecodeSurface(const GLXFBConfig& fb_config, 20 DecodeSurface(int poc,
163 Display* x_display, 21 int32 input_id,
164 VADisplay va_display, 22 const scoped_refptr<VASurface>& va_surface);
165 const base::Callback<bool(void)>& make_context_current, 23 DecodeSurface(int poc, const scoped_refptr<DecodeSurface>& dec_surface);
166 VASurfaceID va_surface_id,
167 int32 picture_buffer_id,
168 uint32 texture_id,
169 int width, int height);
170 ~DecodeSurface(); 24 ~DecodeSurface();
171 25
172 VASurfaceID va_surface_id() { 26 int poc() {
173 return va_surface_id_; 27 return poc_;
174 } 28 }
175 29
176 int32 picture_buffer_id() { 30 VASurfaceID va_surface_id() {
177 return picture_buffer_id_; 31 return va_surface_->id();
178 } 32 }
179 33
180 uint32 texture_id() { 34 scoped_refptr<VASurface> va_surface() {
181 return texture_id_; 35 return va_surface_;
182 }
183
184 bool available() {
185 return available_;
186 }
187
188 bool used() {
189 return used_;
190 }
191
192 void set_used(bool used) {
193 DCHECK(!available_);
194 used_ = used;
195 }
196
197 bool at_client() {
198 return at_client_;
199 }
200
201 void set_at_client(bool at_client) {
202 DCHECK(!available_);
203 at_client_ = at_client;
204 } 36 }
205 37
206 int32 input_id() { 38 int32 input_id() {
207 return input_id_; 39 return input_id_;
208 } 40 }
209 41
210 int poc() {
211 return poc_;
212 }
213
214 // Associate the surface with |input_id| and |poc|, and make it unavailable
215 // (in use).
216 void Acquire(int32 input_id, int poc);
217
218 // Make this surface available, ready to be reused.
219 void Release();
220
221 // Has to be called before output to sync texture contents.
222 // Returns true if successful.
223 bool Sync();
224
225 private: 42 private:
226 Display* x_display_; 43 int poc_;
227 VADisplay va_display_;
228 base::Callback<bool(void)> make_context_current_;
229 VASurfaceID va_surface_id_;
230
231 // Client-provided ids.
232 int32 input_id_; 44 int32 input_id_;
233 int32 picture_buffer_id_; 45 scoped_refptr<VASurface> va_surface_;
234 uint32 texture_id_;
235
236 int width_;
237 int height_;
238
239 // Available for decoding (data no longer used for reference or displaying).
240 // TODO(posciak): this is almost surely not needed anymore. Rethink and
241 // remove if possible.
242 bool available_;
243 // Used for decoding.
244 bool used_;
245 // Whether the surface has been sent to client for display.
246 bool at_client_;
247
248 // PicOrderCount
249 int poc_;
250
251 // Pixmaps bound to this texture.
252 Pixmap x_pixmap_;
253 GLXPixmap glx_pixmap_;
254
255 DISALLOW_COPY_AND_ASSIGN(DecodeSurface);
256 }; 46 };
257 47
258 VaapiH264Decoder::DecodeSurface::DecodeSurface( 48 VaapiH264Decoder::DecodeSurface::DecodeSurface(
259 const GLXFBConfig& fb_config, 49 int poc,
260 Display* x_display, 50 int32 input_id,
261 VADisplay va_display, 51 const scoped_refptr<VASurface>& va_surface)
262 const base::Callback<bool(void)>& make_context_current, 52 : poc_(poc),
263 VASurfaceID va_surface_id, 53 input_id_(input_id),
264 int32 picture_buffer_id, 54 va_surface_(va_surface) {
265 uint32 texture_id, 55 DCHECK(va_surface_.get());
266 int width, int height)
267 : x_display_(x_display),
268 va_display_(va_display),
269 make_context_current_(make_context_current),
270 va_surface_id_(va_surface_id),
271 input_id_(0),
272 picture_buffer_id_(picture_buffer_id),
273 texture_id_(texture_id),
274 width_(width),
275 height_(height),
276 available_(false),
277 used_(false),
278 at_client_(false),
279 poc_(0),
280 x_pixmap_(0),
281 glx_pixmap_(0) {
282 // Bind the surface to a texture of the given width and height,
283 // allocating pixmaps as needed.
284 if (!make_context_current_.Run())
285 return;
286
287 gfx::ScopedTextureBinder texture_binder(GL_TEXTURE_2D, texture_id_);
288 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
289 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
290
291 XWindowAttributes win_attr;
292 int screen = DefaultScreen(x_display_);
293 XGetWindowAttributes(x_display_, RootWindow(x_display_, screen), &win_attr);
294 //TODO(posciak): pass the depth required by libva, not the RootWindow's depth
295 x_pixmap_ = XCreatePixmap(x_display_, RootWindow(x_display_, screen),
296 width_, height_, win_attr.depth);
297 if (!x_pixmap_) {
298 DVLOG(1) << "Failed creating an X Pixmap for TFP";
299 return;
300 }
301
302 static const int pixmap_attr[] = {
303 GLX_TEXTURE_TARGET_EXT, GLX_TEXTURE_2D_EXT,
304 GLX_TEXTURE_FORMAT_EXT, GLX_TEXTURE_FORMAT_RGB_EXT,
305 GL_NONE,
306 };
307
308 glx_pixmap_ = glXCreatePixmap(x_display_, fb_config, x_pixmap_, pixmap_attr);
309 if (!glx_pixmap_) {
310 // x_pixmap_ will be freed in the destructor.
311 DVLOG(1) << "Failed creating a GLX Pixmap for TFP";
312 return;
313 }
314
315 glXBindTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT, NULL);
316
317 available_ = true;
318 } 56 }
319 57
320 VaapiH264Decoder::DecodeSurface::~DecodeSurface() { 58 VaapiH264Decoder::DecodeSurface::~DecodeSurface() {
321 // Unbind surface from texture and deallocate resources.
322 if (glx_pixmap_ && make_context_current_.Run()) {
323 glXReleaseTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT);
324 glXDestroyPixmap(x_display_, glx_pixmap_);
325 }
326
327 if (x_pixmap_)
328 XFreePixmap(x_display_, x_pixmap_);
329 XSync(x_display_, False); // Needed to work around buggy vdpau-driver.
330 } 59 }
331 60
332 void VaapiH264Decoder::DecodeSurface::Acquire(int32 input_id, int poc) { 61 VaapiH264Decoder::VaapiH264Decoder(
333 DCHECK_EQ(available_, true); 62 const scoped_refptr<VaapiDelegate>& vaapi_delegate,
334 available_ = false; 63 const OutputPicCB& output_pic_cb,
335 at_client_ = false; 64 const ReportErrorToUmaCB& report_error_to_uma_cb)
336 used_ = true; 65 : max_pic_order_cnt_lsb_(0),
337 input_id_ = input_id; 66 max_frame_num_(0),
338 poc_ = poc; 67 max_pic_num_(0),
339 } 68 max_long_term_frame_idx_(0),
340 69 curr_sps_id_(-1),
341 void VaapiH264Decoder::DecodeSurface::Release() { 70 curr_pps_id_(-1),
342 DCHECK_EQ(available_, false); 71 vaapi_delegate_(vaapi_delegate),
343 available_ = true; 72 output_pic_cb_(output_pic_cb),
344 used_ = false; 73 report_error_to_uma_cb_(report_error_to_uma_cb) {
345 at_client_ = false;
346 }
347
348 bool VaapiH264Decoder::DecodeSurface::Sync() {
349 if (!make_context_current_.Run())
350 return false;
351
352 // Wait for the data to be put into the buffer so it'd ready for output.
353 VAStatus va_res = VAAPI_SyncSurface(va_display_, va_surface_id_);
354 VA_SUCCESS_OR_RETURN(va_res, "Failed syncing decoded picture", false);
355
356 // Put the decoded data into XPixmap bound to the texture.
357 va_res = VAAPI_PutSurface(va_display_,
358 va_surface_id_, x_pixmap_,
359 0, 0, width_, height_,
360 0, 0, width_, height_,
361 NULL, 0, 0);
362 VA_SUCCESS_OR_RETURN(va_res, "Failed putting decoded picture to texture",
363 false);
364
365 return true;
366 }
367
368 VaapiH264Decoder::VaapiH264Decoder() {
369 Reset(); 74 Reset();
370 curr_input_id_ = -1;
371 x_display_ = NULL;
372 fb_config_ = NULL;
373 va_display_ = NULL;
374 curr_sps_id_ = -1;
375 curr_pps_id_ = -1;
376 pic_width_ = -1;
377 pic_height_ = -1;
378 max_frame_num_ = 0;
379 max_pic_num_ = 0;
380 max_long_term_frame_idx_ = 0;
381 max_pic_order_cnt_lsb_ = 0;
382 state_ = kUninitialized;
383 num_available_decode_surfaces_ = 0;
384 va_context_created_ = false;
385 last_output_poc_ = 0;
386 } 75 }
387 76
388 VaapiH264Decoder::~VaapiH264Decoder() { 77 VaapiH264Decoder::~VaapiH264Decoder() {
389 Destroy();
390 } 78 }
391 79
392 // This puts the decoder in state where it keeps stream data and is ready
393 // to resume playback from a random location in the stream, but drops all
394 // inputs and outputs and makes all surfaces available for use.
395 void VaapiH264Decoder::Reset() { 80 void VaapiH264Decoder::Reset() {
396 frame_ready_at_hw_ = false;
397
398 curr_pic_.reset(); 81 curr_pic_.reset();
399 82
83 curr_input_id_ = -1;
400 frame_num_ = 0; 84 frame_num_ = 0;
401 prev_frame_num_ = -1; 85 prev_frame_num_ = -1;
402 prev_frame_num_offset_ = -1; 86 prev_frame_num_offset_ = -1;
403 87
404 prev_ref_has_memmgmnt5_ = false; 88 prev_ref_has_memmgmnt5_ = false;
405 prev_ref_top_field_order_cnt_ = -1; 89 prev_ref_top_field_order_cnt_ = -1;
406 prev_ref_pic_order_cnt_msb_ = -1; 90 prev_ref_pic_order_cnt_msb_ = -1;
407 prev_ref_pic_order_cnt_lsb_ = -1; 91 prev_ref_pic_order_cnt_lsb_ = -1;
408 prev_ref_field_ = H264Picture::FIELD_NONE; 92 prev_ref_field_ = H264Picture::FIELD_NONE;
409 93
410 // When called from the constructor, although va_display_ is invalid, 94 vaapi_delegate_->DestroyPendingBuffers();
411 // |pending_slice_bufs_| and |pending_va_bufs_| are empty.
412 DestroyPendingBuffers();
413
414 pending_slice_bufs_ = std::queue<VABufferID>();
415 pending_va_bufs_ = std::queue<VABufferID>();
416 95
417 ref_pic_list0_.clear(); 96 ref_pic_list0_.clear();
418 ref_pic_list1_.clear(); 97 ref_pic_list1_.clear();
419 98
420 for (POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.begin(); 99 for (DecSurfacesInUse::iterator it = decode_surfaces_in_use_.begin();
421 it != poc_to_decode_surfaces_.end(); ) { 100 it != decode_surfaces_in_use_.end(); ) {
422 int poc = it->second->poc(); 101 int poc = it->second->poc();
423 // Must be incremented before UnassignSurfaceFromPoC as this call 102 // Must be incremented before UnassignSurfaceFromPoC as this call
424 // invalidates |it|. 103 // invalidates |it|.
425 ++it; 104 ++it;
426 UnassignSurfaceFromPoC(poc); 105 UnassignSurfaceFromPoC(poc);
427 } 106 }
428 DCHECK(poc_to_decode_surfaces_.empty()); 107 DCHECK(decode_surfaces_in_use_.empty());
429 108
430 dpb_.Clear(); 109 dpb_.Clear();
431 parser_.Reset(); 110 parser_.Reset();
432 last_output_poc_ = 0; 111 last_output_poc_ = 0;
433 112
434 // Still initialized and ready to decode, unless called from constructor, 113 state_ = kIdle;
435 // which will change it back.
436 state_ = kAfterReset;
437 } 114 }
438 115
439 void VaapiH264Decoder::Destroy() { 116 void VaapiH264Decoder::ReuseSurface(
440 if (state_ == kUninitialized) 117 const scoped_refptr<VASurface>& va_surface) {
441 return; 118 available_va_surfaces_.push_back(va_surface);
442
443 VAStatus va_res;
444 bool destroy_surfaces = false;
445 switch (state_) {
446 case kDecoding:
447 case kAfterReset:
448 case kError:
449 destroy_surfaces = true;
450 // fallthrough
451 case kInitialized:
452 if (!make_context_current_.Run())
453 break;
454 if (destroy_surfaces)
455 DestroyVASurfaces();
456 DestroyPendingBuffers();
457 va_res = VAAPI_DestroyConfig(va_display_, va_config_id_);
458 VA_LOG_ON_ERROR(va_res, "vaDestroyConfig failed");
459 va_res = VAAPI_Terminate(va_display_);
460 VA_LOG_ON_ERROR(va_res, "vaTerminate failed");
461 // fallthrough
462 case kUninitialized:
463 break;
464 }
465
466 state_ = kUninitialized;
467 }
468
469 // Maps Profile enum values to VaProfile values.
470 bool VaapiH264Decoder::SetProfile(media::VideoCodecProfile profile) {
471 switch (profile) {
472 case media::H264PROFILE_BASELINE:
473 profile_ = VAProfileH264Baseline;
474 break;
475 case media::H264PROFILE_MAIN:
476 profile_ = VAProfileH264Main;
477 break;
478 case media::H264PROFILE_HIGH:
479 profile_ = VAProfileH264High;
480 break;
481 default:
482 return false;
483 }
484 return true;
485 }
486
487 class ScopedPtrXFree {
488 public:
489 void operator()(void* x) const {
490 ::XFree(x);
491 }
492 };
493
494 bool VaapiH264Decoder::InitializeFBConfig() {
495 const int fbconfig_attr[] = {
496 GLX_DRAWABLE_TYPE, GLX_PIXMAP_BIT,
497 GLX_BIND_TO_TEXTURE_TARGETS_EXT, GLX_TEXTURE_2D_BIT_EXT,
498 GLX_BIND_TO_TEXTURE_RGB_EXT, GL_TRUE,
499 GLX_Y_INVERTED_EXT, GL_TRUE,
500 GL_NONE,
501 };
502
503 int num_fbconfigs;
504 scoped_ptr_malloc<GLXFBConfig, ScopedPtrXFree> glx_fb_configs(
505 glXChooseFBConfig(x_display_, DefaultScreen(x_display_), fbconfig_attr,
506 &num_fbconfigs));
507 if (!glx_fb_configs)
508 return false;
509 if (!num_fbconfigs)
510 return false;
511
512 fb_config_ = glx_fb_configs.get()[0];
513 return true;
514 }
515
516 bool VaapiH264Decoder::Initialize(
517 media::VideoCodecProfile profile,
518 Display* x_display,
519 GLXContext glx_context,
520 const base::Callback<bool(void)>& make_context_current,
521 const OutputPicCB& output_pic_cb,
522 const SubmitDecodeCB& submit_decode_cb) {
523 DCHECK_EQ(state_, kUninitialized);
524
525 output_pic_cb_ = output_pic_cb;
526 submit_decode_cb_ = submit_decode_cb;
527
528 x_display_ = x_display;
529 make_context_current_ = make_context_current;
530
531 if (!make_context_current_.Run())
532 return false;
533
534 if (!SetProfile(profile)) {
535 DVLOG(1) << "Unsupported profile";
536 return false;
537 }
538
539 if (!InitializeFBConfig()) {
540 DVLOG(1) << "Could not get a usable FBConfig";
541 return false;
542 }
543
544 va_display_ = VAAPI_GetDisplay(x_display_);
545 if (!VAAPI_DisplayIsValid(va_display_)) {
546 DVLOG(1) << "Could not get a valid VA display";
547 return false;
548 }
549
550 int major_version, minor_version;
551 VAStatus va_res;
552 va_res = VAAPI_Initialize(va_display_, &major_version, &minor_version);
553 VA_SUCCESS_OR_RETURN(va_res, "vaInitialize failed", false);
554 DVLOG(1) << "VAAPI version: " << major_version << "." << minor_version;
555
556 VAConfigAttrib attrib;
557 attrib.type = VAConfigAttribRTFormat;
558
559 VAEntrypoint entrypoint = VAEntrypointVLD;
560 va_res = VAAPI_GetConfigAttributes(va_display_, profile_, entrypoint,
561 &attrib, 1);
562 VA_SUCCESS_OR_RETURN(va_res, "vaGetConfigAttributes failed", false);
563
564 if (!(attrib.value & VA_RT_FORMAT_YUV420)) {
565 DVLOG(1) << "YUV420 not supported";
566 return false;
567 }
568
569 va_res = VAAPI_CreateConfig(va_display_, profile_, entrypoint,
570 &attrib, 1, &va_config_id_);
571 VA_SUCCESS_OR_RETURN(va_res, "vaCreateConfig failed", false);
572
573 state_ = kInitialized;
574 return true;
575 }
576
577 void VaapiH264Decoder::ReusePictureBuffer(int32 picture_buffer_id) {
578 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id);
579 if (it == decode_surfaces_.end()) {
580 DVLOG(1) << "Asked to reuse an invalid surface "
581 << picture_buffer_id;
582 return;
583 }
584 if (it->second->available()) {
585 DVLOG(1) << "Asked to reuse an already available surface "
586 << picture_buffer_id;
587 return;
588 }
589
590 it->second->set_at_client(false);
591 if (!it->second->used()) {
592 it->second->Release();
593 ++num_available_decode_surfaces_;
594 }
595 }
596
597 bool VaapiH264Decoder::AssignPictureBuffer(int32 picture_buffer_id,
598 uint32 texture_id) {
599 DCHECK_EQ(state_, kDecoding);
600
601 if (decode_surfaces_.size() >= GetRequiredNumOfPictures()) {
602 DVLOG(1) << "Got more surfaces than required";
603 return false;
604 }
605
606 // This will not work if we start using VDA.DismissPicture()
607 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface(
608 fb_config_, x_display_, va_display_, make_context_current_,
609 va_surface_ids_[decode_surfaces_.size()], picture_buffer_id, texture_id,
610 pic_width_, pic_height_));
611 if (!dec_surface->available()) {
612 DVLOG(1) << "Error creating a decoding surface (binding to texture?)";
613 return false;
614 }
615
616 DVLOG(2) << "New picture assigned, texture id: " << dec_surface->texture_id()
617 << " pic buf id: " << dec_surface->picture_buffer_id()
618 << " will use va surface " << dec_surface->va_surface_id();
619
620 bool inserted = decode_surfaces_.insert(std::make_pair(picture_buffer_id,
621 dec_surface)).second;
622 DCHECK(inserted);
623 ++num_available_decode_surfaces_;
624
625 return true;
626 }
627
628 bool VaapiH264Decoder::CreateVASurfaces() {
629 DCHECK_NE(pic_width_, -1);
630 DCHECK_NE(pic_height_, -1);
631 if (state_ == kAfterReset)
632 return true;
633 DCHECK_EQ(state_, kInitialized);
634
635 // Allocate VASurfaces in driver.
636 VAStatus va_res = VAAPI_CreateSurfaces(va_display_, pic_width_,
637 pic_height_, VA_RT_FORMAT_YUV420,
638 GetRequiredNumOfPictures(),
639 va_surface_ids_);
640 VA_SUCCESS_OR_RETURN(va_res, "vaCreateSurfaces failed", false);
641
642 DCHECK(decode_surfaces_.empty());
643 // And create a context associated with them.
644 va_res = VAAPI_CreateContext(va_display_, va_config_id_,
645 pic_width_, pic_height_, VA_PROGRESSIVE,
646 va_surface_ids_, GetRequiredNumOfPictures(),
647 &va_context_id_);
648
649 if (va_res != VA_STATUS_SUCCESS) {
650 DVLOG(1) << "Error creating a decoding surface (binding to texture?)";
651 VAAPI_DestroySurfaces(va_display_, va_surface_ids_,
652 GetRequiredNumOfPictures());
653 return false;
654 }
655
656 va_context_created_ = true;
657
658 return true;
659 }
660
661 void VaapiH264Decoder::DestroyVASurfaces() {
662 DCHECK(state_ == kDecoding || state_ == kError || state_ == kAfterReset);
663 decode_surfaces_.clear();
664
665 // This can happen if we fail during DecodeInitial.
666 if (!va_context_created_)
667 return;
668
669 VAStatus va_res = VAAPI_DestroyContext(va_display_, va_context_id_);
670 VA_LOG_ON_ERROR(va_res, "vaDestroyContext failed");
671
672 va_res = VAAPI_DestroySurfaces(va_display_, va_surface_ids_,
673 GetRequiredNumOfPictures());
674 VA_LOG_ON_ERROR(va_res, "vaDestroySurfaces failed");
675
676 va_context_created_ = false;
677 }
678
679 void VaapiH264Decoder::DestroyPendingBuffers() {
680 while (!pending_slice_bufs_.empty()) {
681 VABufferID buffer = pending_slice_bufs_.front();
682 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, buffer);
683 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
684 pending_slice_bufs_.pop();
685 }
686 while (!pending_va_bufs_.empty()) {
687 VABufferID buffer = pending_va_bufs_.front();
688 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, buffer);
689 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
690 pending_va_bufs_.pop();
691 }
692 } 119 }
693 120
694 // Fill |va_pic| with default/neutral values. 121 // Fill |va_pic| with default/neutral values.
695 static void InitVAPicture(VAPictureH264* va_pic) { 122 static void InitVAPicture(VAPictureH264* va_pic) {
696 memset(va_pic, 0, sizeof(*va_pic)); 123 memset(va_pic, 0, sizeof(*va_pic));
697 va_pic->picture_id = VA_INVALID_ID; 124 va_pic->picture_id = VA_INVALID_ID;
698 va_pic->flags = VA_PICTURE_H264_INVALID; 125 va_pic->flags = VA_PICTURE_H264_INVALID;
699 } 126 }
700 127
701 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) { 128 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) {
702 DCHECK(pic); 129 DCHECK(pic);
703 POCToDecodeSurfaces::iterator iter = poc_to_decode_surfaces_.find( 130
704 pic->pic_order_cnt); 131 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt);
705 if (iter == poc_to_decode_surfaces_.end()) { 132 if (!dec_surface) {
706 DVLOG(1) << "Could not find surface with POC: " << pic->pic_order_cnt;
707 // Cannot provide a ref picture, will corrupt output, but may be able 133 // Cannot provide a ref picture, will corrupt output, but may be able
708 // to recover. 134 // to recover.
709 InitVAPicture(va_pic); 135 InitVAPicture(va_pic);
710 return; 136 return;
711 } 137 }
712 138
713 va_pic->picture_id = iter->second->va_surface_id(); 139 va_pic->picture_id = dec_surface->va_surface_id();
714 va_pic->frame_idx = pic->frame_num; 140 va_pic->frame_idx = pic->frame_num;
715 va_pic->flags = 0; 141 va_pic->flags = 0;
716 142
717 switch (pic->field) { 143 switch (pic->field) {
718 case H264Picture::FIELD_NONE: 144 case H264Picture::FIELD_NONE:
719 break; 145 break;
720 case H264Picture::FIELD_TOP: 146 case H264Picture::FIELD_TOP:
721 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD; 147 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD;
722 break; 148 break;
723 case H264Picture::FIELD_BOTTOM: 149 case H264Picture::FIELD_BOTTOM:
(...skipping 19 matching lines...) Expand all
743 // Libva does not document this, but other implementations (e.g. mplayer) 169 // Libva does not document this, but other implementations (e.g. mplayer)
744 // do it this way as well. 170 // do it this way as well.
745 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) { 171 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) {
746 if ((*rit)->ref) 172 if ((*rit)->ref)
747 FillVAPicture(&va_pics[i++], *rit); 173 FillVAPicture(&va_pics[i++], *rit);
748 } 174 }
749 175
750 return i; 176 return i;
751 } 177 }
752 178
753 // Can only be called when all surfaces are already bound 179 VaapiH264Decoder::DecodeSurface* VaapiH264Decoder::DecodeSurfaceByPoC(int poc) {
754 // to textures (cannot be run at the same time as AssignPictureBuffer). 180 DecSurfacesInUse::iterator iter = decode_surfaces_in_use_.find(poc);
755 bool VaapiH264Decoder::AssignSurfaceToPoC(int poc) { 181 if (iter == decode_surfaces_in_use_.end()) {
756 DCHECK_GT(num_available_decode_surfaces_, 0) << decode_surfaces_.size(); 182 DVLOG(1) << "Could not find surface assigned to POC: " << poc;
757 183 return NULL;
758 // Find a surface not currently holding data used for reference and/or
759 // to be displayed and mark it as used.
760 DecodeSurfaces::iterator iter = decode_surfaces_.begin();
761 for (; iter != decode_surfaces_.end(); ++iter) {
762 if (!iter->second->available())
763 continue;
764
765 --num_available_decode_surfaces_;
766 DCHECK_GE(num_available_decode_surfaces_, 0);
767
768 // Associate with input id and poc and mark as unavailable.
769 iter->second->Acquire(curr_input_id_, poc);
770 DVLOG(4) << "Will use surface " << iter->second->va_surface_id()
771 << " for POC " << iter->second->poc()
772 << " input ID: " << iter->second->input_id();
773 bool inserted = poc_to_decode_surfaces_.insert(std::make_pair(
774 poc, iter->second.get())).second;
775 DCHECK(inserted);
776 return true;
777 } 184 }
778 185
779 // Could not find an available surface. 186 return iter->second.get();
780 return false;
781 } 187 }
782 188
783 // Can only be called when all surfaces are already bound 189 bool VaapiH264Decoder::AssignSurfaceToPoC(int32 input_id, int poc) {
784 // to textures (cannot be run at the same time as AssignPictureBuffer). 190 if (available_va_surfaces_.empty()) {
191 DVLOG(1) << "No VA Surfaces available";
192 return false;
193 }
194
195 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface(
196 poc, input_id, available_va_surfaces_.back()));
197 available_va_surfaces_.pop_back();
198
199 DVLOG(4) << "POC " << poc
200 << " will use surface " << dec_surface->va_surface_id();
201
202 bool inserted = decode_surfaces_in_use_.insert(
203 std::make_pair(poc, dec_surface)).second;
204 DCHECK(inserted);
205
206 return true;
207 }
208
785 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) { 209 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) {
786 DecodeSurface* dec_surface; 210 DecSurfacesInUse::iterator it = decode_surfaces_in_use_.find(poc);
787 POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.find(poc); 211 if (it == decode_surfaces_in_use_.end()) {
788 if (it == poc_to_decode_surfaces_.end()) {
789 DVLOG(1) << "Asked to unassign an unassigned POC " << poc; 212 DVLOG(1) << "Asked to unassign an unassigned POC " << poc;
790 return; 213 return;
791 } 214 }
792 dec_surface = it->second;
793 DVLOG(4) << "POC " << poc << " no longer using surface "
794 << dec_surface->va_surface_id();
795 poc_to_decode_surfaces_.erase(it);
796 215
797 dec_surface->set_used(false); 216 DVLOG(4) << "POC " << poc << " no longer using VA surface "
798 if (!dec_surface->at_client()) { 217 << it->second->va_surface_id();
799 dec_surface->Release(); 218
800 ++num_available_decode_surfaces_; 219 decode_surfaces_in_use_.erase(it);
801 }
802 } 220 }
803 221
804 // Fill a VAPictureParameterBufferH264 to be later sent to the HW decoder.
805 bool VaapiH264Decoder::SendPPS() { 222 bool VaapiH264Decoder::SendPPS() {
806 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); 223 const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
807 DCHECK(pps); 224 DCHECK(pps);
808 225
809 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); 226 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id);
810 DCHECK(sps); 227 DCHECK(sps);
811 228
812 DCHECK(curr_pic_.get()); 229 DCHECK(curr_pic_.get());
813 230
814 VAPictureParameterBufferH264 pic_param; 231 VAPictureParameterBufferH264 pic_param;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
877 // Init reference pictures' array. 294 // Init reference pictures' array.
878 for (int i = 0; i < 16; ++i) 295 for (int i = 0; i < 16; ++i)
879 InitVAPicture(&pic_param.ReferenceFrames[i]); 296 InitVAPicture(&pic_param.ReferenceFrames[i]);
880 297
881 // And fill it with picture info from DPB. 298 // And fill it with picture info from DPB.
882 FillVARefFramesFromDPB(pic_param.ReferenceFrames, 299 FillVARefFramesFromDPB(pic_param.ReferenceFrames,
883 arraysize(pic_param.ReferenceFrames)); 300 arraysize(pic_param.ReferenceFrames));
884 301
885 pic_param.num_ref_frames = sps->max_num_ref_frames; 302 pic_param.num_ref_frames = sps->max_num_ref_frames;
886 303
887 // Allocate a buffer in driver for this parameter buffer and upload data. 304 return vaapi_delegate_->SubmitBuffer(VAPictureParameterBufferType,
888 VABufferID pic_param_buf_id;
889 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_,
890 VAPictureParameterBufferType,
891 sizeof(VAPictureParameterBufferH264), 305 sizeof(VAPictureParameterBufferH264),
892 1, &pic_param, &pic_param_buf_id); 306 &pic_param);
893 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for PPS", false);
894
895 // Queue its VA buffer ID to be committed on HW decode run.
896 pending_va_bufs_.push(pic_param_buf_id);
897
898 return true;
899 } 307 }
900 308
901 // Fill a VAIQMatrixBufferH264 to be later sent to the HW decoder.
902 bool VaapiH264Decoder::SendIQMatrix() { 309 bool VaapiH264Decoder::SendIQMatrix() {
903 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); 310 const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
904 DCHECK(pps); 311 DCHECK(pps);
905 312
906 VAIQMatrixBufferH264 iq_matrix_buf; 313 VAIQMatrixBufferH264 iq_matrix_buf;
907 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264)); 314 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264));
908 315
909 if (pps->pic_scaling_matrix_present_flag) { 316 if (pps->pic_scaling_matrix_present_flag) {
910 for (int i = 0; i < 6; ++i) { 317 for (int i = 0; i < 6; ++i) {
911 for (int j = 0; j < 16; ++j) 318 for (int j = 0; j < 16; ++j)
(...skipping 11 matching lines...) Expand all
923 for (int j = 0; j < 16; ++j) 330 for (int j = 0; j < 16; ++j)
924 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j]; 331 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j];
925 } 332 }
926 333
927 for (int i = 0; i < 2; ++i) { 334 for (int i = 0; i < 2; ++i) {
928 for (int j = 0; j < 64; ++j) 335 for (int j = 0; j < 64; ++j)
929 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j]; 336 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j];
930 } 337 }
931 } 338 }
932 339
933 // Allocate a buffer in driver for this parameter buffer and upload data. 340 return vaapi_delegate_->SubmitBuffer(VAIQMatrixBufferType,
934 VABufferID iq_matrix_buf_id; 341 sizeof(VAIQMatrixBufferH264),
935 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, 342 &iq_matrix_buf);
936 VAIQMatrixBufferType,
937 sizeof(VAIQMatrixBufferH264), 1,
938 &iq_matrix_buf, &iq_matrix_buf_id);
939 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for IQMatrix",
940 false);
941
942 // Queue its VA buffer ID to be committed on HW decode run.
943 pending_va_bufs_.push(iq_matrix_buf_id);
944
945 return true;
946 } 343 }
947 344
948 bool VaapiH264Decoder::SendVASliceParam(H264SliceHeader* slice_hdr) { 345 bool VaapiH264Decoder::SendVASliceParam(H264SliceHeader* slice_hdr) {
949 const H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); 346 const H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id);
950 DCHECK(pps); 347 DCHECK(pps);
951 348
952 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); 349 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id);
953 DCHECK(sps); 350 DCHECK(sps);
954 351
955 VASliceParameterBufferH264 slice_param; 352 VASliceParameterBufferH264 slice_param;
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 422
1026 int i; 423 int i;
1027 H264Picture::PtrVector::iterator it; 424 H264Picture::PtrVector::iterator it;
1028 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it; 425 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it;
1029 ++it, ++i) 426 ++it, ++i)
1030 FillVAPicture(&slice_param.RefPicList0[i], *it); 427 FillVAPicture(&slice_param.RefPicList0[i], *it);
1031 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it; 428 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it;
1032 ++it, ++i) 429 ++it, ++i)
1033 FillVAPicture(&slice_param.RefPicList1[i], *it); 430 FillVAPicture(&slice_param.RefPicList1[i], *it);
1034 431
1035 // Allocate a buffer in driver for this parameter buffer and upload data. 432 return vaapi_delegate_->SubmitBuffer(VASliceParameterBufferType,
1036 VABufferID slice_param_buf_id;
1037 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_,
1038 VASliceParameterBufferType,
1039 sizeof(VASliceParameterBufferH264), 433 sizeof(VASliceParameterBufferH264),
1040 1, &slice_param, &slice_param_buf_id); 434 &slice_param);
1041 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice param",
1042 false);
1043
1044 // Queue its VA buffer ID to be committed on HW decode run.
1045 pending_slice_bufs_.push(slice_param_buf_id);
1046
1047 return true;
1048 } 435 }
1049 436
1050 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) { 437 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) {
1051 // Can't help it, blame libva... 438 // Can't help it, blame libva...
1052 void* non_const_ptr = const_cast<uint8*>(ptr); 439 void* non_const_ptr = const_cast<uint8*>(ptr);
1053 440 return vaapi_delegate_->SubmitBuffer(VASliceDataBufferType, size,
1054 VABufferID slice_data_buf_id; 441 non_const_ptr);
1055 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_,
1056 VASliceDataBufferType, size, 1,
1057 non_const_ptr, &slice_data_buf_id);
1058 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice data",
1059 false);
1060
1061 pending_slice_bufs_.push(slice_data_buf_id);
1062 return true;
1063 } 442 }
1064 443
1065 bool VaapiH264Decoder::QueueSlice(H264SliceHeader* slice_hdr) { 444 bool VaapiH264Decoder::QueueSlice(H264SliceHeader* slice_hdr) {
1066 DCHECK(curr_pic_.get()); 445 DCHECK(curr_pic_.get());
1067 446
1068 if (!SendVASliceParam(slice_hdr)) 447 if (!SendVASliceParam(slice_hdr))
1069 return false; 448 return false;
1070 449
1071 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size)) 450 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size))
1072 return false; 451 return false;
1073 452
1074 return true; 453 return true;
1075 } 454 }
1076 455
456 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever
457 // possible.
1077 bool VaapiH264Decoder::DecodePicture() { 458 bool VaapiH264Decoder::DecodePicture() {
1078 DCHECK(!frame_ready_at_hw_);
1079 DCHECK(curr_pic_.get()); 459 DCHECK(curr_pic_.get());
1080 460
1081 // Find the surface associated with the picture to be decoded. 461 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt;
1082 DecodeSurface* dec_surface = 462 DecodeSurface* dec_surface = DecodeSurfaceByPoC(curr_pic_->pic_order_cnt);
1083 poc_to_decode_surfaces_[curr_pic_->pic_order_cnt]; 463 if (!dec_surface) {
1084 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt 464 DVLOG(1) << "Asked to decode an invalid POC " << curr_pic_->pic_order_cnt;
1085 << " into surface " << dec_surface->va_surface_id(); 465 return false;
466 }
1086 467
1087 DVLOG(4) << "Pending VA bufs to commit: " << pending_va_bufs_.size(); 468 if (!vaapi_delegate_->DecodeAndDestroyPendingBuffers(
1088 DVLOG(4) << "Pending slice bufs to commit: " << pending_slice_bufs_.size(); 469 dec_surface->va_surface_id())) {
1089 470 DVLOG(1) << "Failed decoding picture";
1090 DCHECK(pending_slice_bufs_.size()); 471 return false;
1091 scoped_ptr<std::queue<VABufferID> > va_bufs(new std::queue<VABufferID>()); 472 }
1092 std::swap(*va_bufs, pending_va_bufs_);
1093 scoped_ptr<std::queue<VABufferID> > slice_bufs(new std::queue<VABufferID>());
1094 std::swap(*slice_bufs, pending_slice_bufs_);
1095
1096 // Fire up a parallel job on the GPU on the ChildThread to decode and put
1097 // the decoded/converted/scaled picture into the pixmap.
1098 // Callee will take care of freeing the buffer queues.
1099 submit_decode_cb_.Run(
1100 dec_surface->picture_buffer_id(), va_bufs.Pass(), slice_bufs.Pass());
1101
1102 // Used to notify clients that we had sufficient data to start decoding
1103 // a new frame.
1104 frame_ready_at_hw_ = true;
1105 473
1106 return true; 474 return true;
1107 } 475 }
1108 476
1109 void VaapiH264Decoder::DestroyBuffers(size_t num_va_buffers,
1110 const VABufferID* va_buffers) {
1111 for (size_t i = 0; i < num_va_buffers; ++i) {
1112 VAStatus va_res = VAAPI_DestroyBuffer(va_display_, va_buffers[i]);
1113 VA_LOG_ON_ERROR(va_res, "vaDestroyBuffer failed");
1114 }
1115 }
1116 477
1117 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever
1118 // possible.
1119 bool VaapiH264Decoder::SubmitDecode(
1120 int32 picture_buffer_id,
1121 scoped_ptr<std::queue<VABufferID> > va_bufs,
1122 scoped_ptr<std::queue<VABufferID> > slice_bufs) {
1123
1124 static const size_t kMaxVABuffers = 32;
1125 DCHECK_LE(va_bufs->size(), kMaxVABuffers);
1126 DCHECK_LE(slice_bufs->size(), kMaxVABuffers);
1127
1128 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id);
1129 if (it == decode_surfaces_.end()) {
1130 DVLOG(1) << "Asked to put an invalid buffer";
1131 return false;
1132 }
1133
1134 // Get ready to decode into surface.
1135 VAStatus va_res = VAAPI_BeginPicture(va_display_, va_context_id_,
1136 it->second->va_surface_id());
1137 VA_SUCCESS_OR_RETURN(va_res, "vaBeginPicture failed", false);
1138
1139 // Put buffer IDs for pending parameter buffers into va_buffers[].
1140 VABufferID va_buffers[kMaxVABuffers];
1141 size_t num_va_buffers = va_bufs->size();
1142 for (size_t i = 0; i < num_va_buffers && i < kMaxVABuffers; ++i) {
1143 va_buffers[i] = va_bufs->front();
1144 va_bufs->pop();
1145 }
1146 base::Closure va_buffers_callback =
1147 base::Bind(&VaapiH264Decoder::DestroyBuffers, base::Unretained(this),
1148 num_va_buffers, va_buffers);
1149 base::ScopedClosureRunner va_buffers_deleter(va_buffers_callback);
1150
1151 // And send them to the HW decoder.
1152 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, va_buffers,
1153 num_va_buffers);
1154 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for va_bufs failed", false);
1155
1156 DVLOG(4) << "Committed " << num_va_buffers << "VA buffers";
1157
1158 // Put buffer IDs for pending slice data buffers into slice_buffers[].
1159 VABufferID slice_buffers[kMaxVABuffers];
1160 size_t num_slice_buffers = slice_bufs->size();
1161 for (size_t i = 0; i < num_slice_buffers && i < kMaxVABuffers; ++i) {
1162 slice_buffers[i] = slice_bufs->front();
1163 slice_bufs->pop();
1164 }
1165 base::Closure va_slices_callback =
1166 base::Bind(&VaapiH264Decoder::DestroyBuffers, base::Unretained(this),
1167 num_slice_buffers, slice_buffers);
1168 base::ScopedClosureRunner slice_buffers_deleter(va_slices_callback);
1169
1170 // And send them to the Hw decoder.
1171 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, slice_buffers,
1172 num_slice_buffers);
1173 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for slices failed", false);
1174
1175 DVLOG(4) << "Committed " << num_slice_buffers << "slice buffers";
1176
1177 // Instruct HW decoder to start processing committed buffers (decode this
1178 // picture). This does not block until the end of decode.
1179 va_res = VAAPI_EndPicture(va_display_, va_context_id_);
1180 VA_SUCCESS_OR_RETURN(va_res, "vaEndPicture failed", false);
1181
1182 DVLOG(3) << "Will output from VASurface " << it->second->va_surface_id()
1183 << " to texture id " << it->second->texture_id();
1184
1185 return it->second->Sync();
1186 }
1187
1188
1189 bool VaapiH264Decoder::InitCurrPicture(H264SliceHeader* slice_hdr) { 478 bool VaapiH264Decoder::InitCurrPicture(H264SliceHeader* slice_hdr) {
1190 DCHECK(curr_pic_.get()); 479 DCHECK(curr_pic_.get());
1191 480
1192 memset(curr_pic_.get(), 0, sizeof(H264Picture)); 481 memset(curr_pic_.get(), 0, sizeof(H264Picture));
1193 482
1194 curr_pic_->idr = slice_hdr->idr_pic_flag; 483 curr_pic_->idr = slice_hdr->idr_pic_flag;
1195 484
1196 if (slice_hdr->field_pic_flag) { 485 if (slice_hdr->field_pic_flag) {
1197 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM 486 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM
1198 : H264Picture::FIELD_TOP; 487 : H264Picture::FIELD_TOP;
1199 } else { 488 } else {
1200 curr_pic_->field = H264Picture::FIELD_NONE; 489 curr_pic_->field = H264Picture::FIELD_NONE;
1201 } 490 }
1202 491
1203 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; 492 curr_pic_->ref = slice_hdr->nal_ref_idc != 0;
1204 // This assumes non-interlaced stream. 493 // This assumes non-interlaced stream.
1205 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; 494 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num;
1206 495
1207 if (!CalculatePicOrderCounts(slice_hdr)) 496 if (!CalculatePicOrderCounts(slice_hdr))
1208 return false; 497 return false;
1209 498
1210 // Try to get an empty surface to decode this picture to. 499 // Try to get an empty surface to decode this picture to.
1211 if (!AssignSurfaceToPoC(curr_pic_->pic_order_cnt)) { 500 if (!AssignSurfaceToPoC(curr_input_id_, curr_pic_->pic_order_cnt)) {
1212 DVLOG(1) << "Failed getting a free surface for a picture"; 501 DVLOG(1) << "Failed getting a free surface for a picture";
1213 return false; 502 return false;
1214 } 503 }
1215 504
1216 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; 505 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag;
1217 curr_pic_->adaptive_ref_pic_marking_mode_flag = 506 curr_pic_->adaptive_ref_pic_marking_mode_flag =
1218 slice_hdr->adaptive_ref_pic_marking_mode_flag; 507 slice_hdr->adaptive_ref_pic_marking_mode_flag;
1219 508
1220 // If the slice header indicates we will have to perform reference marking 509 // If the slice header indicates we will have to perform reference marking
1221 // process after this picture is decoded, store required data for that 510 // process after this picture is decoded, store required data for that
(...skipping 479 matching lines...) Expand 10 before | Expand all | Expand 10 after
1701 990
1702 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is 991 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is
1703 // temporarily made one element longer than the required final list. 992 // temporarily made one element longer than the required final list.
1704 // Resize the list back to its required size. 993 // Resize the list back to its required size.
1705 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); 994 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1);
1706 995
1707 return true; 996 return true;
1708 } 997 }
1709 998
1710 bool VaapiH264Decoder::OutputPic(H264Picture* pic) { 999 bool VaapiH264Decoder::OutputPic(H264Picture* pic) {
1711 DCHECK(!pic->outputted); 1000 if (pic->outputted)
1001 return true;
1002
1712 pic->outputted = true; 1003 pic->outputted = true;
1713 POCToDecodeSurfaces::iterator iter = poc_to_decode_surfaces_.find( 1004 last_output_poc_ = pic->pic_order_cnt;
1714 pic->pic_order_cnt); 1005
1715 if (iter == poc_to_decode_surfaces_.end()) 1006 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt);
1007 if (!dec_surface)
1716 return false; 1008 return false;
1717 DecodeSurface* dec_surface = iter->second;
1718 1009
1719 dec_surface->set_at_client(true); 1010 DCHECK_GE(dec_surface->input_id(), 0);
1720 last_output_poc_ = pic->pic_order_cnt;
1721 // Notify the client that a picture can be output.
1722 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt 1011 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt
1723 << " input_id: " << dec_surface->input_id() 1012 << " input_id: " << dec_surface->input_id();
1724 << "output_id: " << dec_surface->picture_buffer_id(); 1013 output_pic_cb_.Run(dec_surface->input_id(), dec_surface->va_surface());
1725 output_pic_cb_.Run(dec_surface->input_id(),
1726 dec_surface->picture_buffer_id());
1727 1014
1728 return true; 1015 return true;
1729 } 1016 }
1730 1017
1731 bool VaapiH264Decoder::Flush() { 1018 void VaapiH264Decoder::ClearDPB() {
1019 // Clear DPB contents, marking the pictures as unused first.
1020 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it)
1021 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
1022
1023 dpb_.Clear();
1024 last_output_poc_ = 0;
1025 }
1026
1027 bool VaapiH264Decoder::OutputAllRemainingPics() {
1732 // Output all pictures that are waiting to be outputted. 1028 // Output all pictures that are waiting to be outputted.
1733 FinishPrevFrameIfPresent(); 1029 FinishPrevFrameIfPresent();
1734 H264Picture::PtrVector to_output; 1030 H264Picture::PtrVector to_output;
1735 dpb_.GetNotOutputtedPicsAppending(to_output); 1031 dpb_.GetNotOutputtedPicsAppending(to_output);
1736 // Sort them by ascending POC to output in order. 1032 // Sort them by ascending POC to output in order.
1737 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); 1033 std::sort(to_output.begin(), to_output.end(), POCAscCompare());
1738 1034
1739 H264Picture::PtrVector::iterator it; 1035 H264Picture::PtrVector::iterator it;
1740 for (it = to_output.begin(); it != to_output.end(); ++it) { 1036 for (it = to_output.begin(); it != to_output.end(); ++it) {
1741 if (!OutputPic(*it)) { 1037 if (!OutputPic(*it)) {
1742 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt; 1038 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt;
1743 return false; 1039 return false;
1744 } 1040 }
1745 } 1041 }
1746 1042
1747 // And clear DPB contents, marking the pictures as unused first.
1748 // The surfaces will be released after they have been displayed and returned.
1749 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) {
1750 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
1751 }
1752 dpb_.Clear();
1753 last_output_poc_ = 0;
1754
1755 return true; 1043 return true;
1756 } 1044 }
1757 1045
1046 bool VaapiH264Decoder::Flush() {
1047 if (!OutputAllRemainingPics())
1048 return false;
1049
1050 ClearDPB();
1051
1052 DCHECK(decode_surfaces_in_use_.empty());
1053 return true;
1054 }
1055
1758 bool VaapiH264Decoder::StartNewFrame(H264SliceHeader* slice_hdr) { 1056 bool VaapiH264Decoder::StartNewFrame(H264SliceHeader* slice_hdr) {
1759 // TODO posciak: add handling of max_num_ref_frames per spec. 1057 // TODO posciak: add handling of max_num_ref_frames per spec.
1760 1058
1761 // If the new frame is an IDR, output what's left to output and clear DPB 1059 // If the new frame is an IDR, output what's left to output and clear DPB
1762 if (slice_hdr->idr_pic_flag) { 1060 if (slice_hdr->idr_pic_flag) {
1763 // (unless we are explicitly instructed not to do so). 1061 // (unless we are explicitly instructed not to do so).
1764 if (!slice_hdr->no_output_of_prior_pics_flag) { 1062 if (!slice_hdr->no_output_of_prior_pics_flag) {
1765 // Output DPB contents. 1063 // Output DPB contents.
1766 if (!Flush()) 1064 if (!Flush())
1767 return false; 1065 return false;
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
1958 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic(); 1256 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic();
1959 if (to_unmark == NULL) { 1257 if (to_unmark == NULL) {
1960 DVLOG(1) << "Couldn't find a short ref picture to unmark"; 1258 DVLOG(1) << "Couldn't find a short ref picture to unmark";
1961 return; 1259 return;
1962 } 1260 }
1963 to_unmark->ref = false; 1261 to_unmark->ref = false;
1964 } 1262 }
1965 } else { 1263 } else {
1966 // Shouldn't get here. 1264 // Shouldn't get here.
1967 DVLOG(1) << "Interlaced video not supported."; 1265 DVLOG(1) << "Interlaced video not supported.";
1968 ReportToUMA(INTERLACED_STREAM); 1266 report_error_to_uma_cb_.Run(INTERLACED_STREAM);
1969 } 1267 }
1970 } else { 1268 } else {
1971 // Stream has instructions how to discard pictures from DPB and how 1269 // Stream has instructions how to discard pictures from DPB and how
1972 // to mark/unmark existing reference pictures. Do it. 1270 // to mark/unmark existing reference pictures. Do it.
1973 // Spec 8.2.5.4. 1271 // Spec 8.2.5.4.
1974 if (curr_pic_->field == H264Picture::FIELD_NONE) { 1272 if (curr_pic_->field == H264Picture::FIELD_NONE) {
1975 HandleMemoryManagementOps(); 1273 HandleMemoryManagementOps();
1976 } else { 1274 } else {
1977 // Shouldn't get here. 1275 // Shouldn't get here.
1978 DVLOG(1) << "Interlaced video not supported."; 1276 DVLOG(1) << "Interlaced video not supported.";
1979 ReportToUMA(INTERLACED_STREAM); 1277 report_error_to_uma_cb_.Run(INTERLACED_STREAM);
1980 } 1278 }
1981 } 1279 }
1982 } 1280 }
1983 } 1281 }
1984 1282
1985 bool VaapiH264Decoder::FinishPicture() { 1283 bool VaapiH264Decoder::FinishPicture() {
1986 DCHECK(curr_pic_.get()); 1284 DCHECK(curr_pic_.get());
1987 1285
1988 // Finish processing previous picture. 1286 // Finish processing previous picture.
1989 // Start by storing previous reference picture data for later use, 1287 // Start by storing previous reference picture data for later use,
1990 // if picture being finished is a reference picture. 1288 // if picture being finished is a reference picture.
1991 if (curr_pic_->ref) { 1289 if (curr_pic_->ref) {
1992 ReferencePictureMarking(); 1290 ReferencePictureMarking();
1993 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; 1291 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5;
1994 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; 1292 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt;
1995 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; 1293 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb;
1996 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; 1294 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb;
1997 prev_ref_field_ = curr_pic_->field; 1295 prev_ref_field_ = curr_pic_->field;
1998 } 1296 }
1999 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; 1297 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5;
2000 prev_frame_num_offset_ = curr_pic_->frame_num_offset; 1298 prev_frame_num_offset_ = curr_pic_->frame_num_offset;
2001 1299
2002 // Remove unused (for reference or later output) pictures from DPB, marking 1300 // Remove unused (for reference or later output) pictures from DPB, marking
2003 // them as such. 1301 // them as such.
2004 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { 1302 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) {
2005 if ((*it)->outputted && !(*it)->ref) 1303 if ((*it)->outputted && !(*it)->ref)
2006 UnassignSurfaceFromPoC((*it)->pic_order_cnt); 1304 UnassignSurfaceFromPoC((*it)->pic_order_cnt);
2007 } 1305 }
2008 dpb_.RemoveUnused(); 1306 dpb_.DeleteUnused();
2009 1307
2010 DVLOG(4) << "Finishing picture, DPB entries: " << dpb_.size() 1308 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size();
2011 << " Num available dec surfaces: "
2012 << num_available_decode_surfaces_;
2013 1309
2014 // Whatever happens below, curr_pic_ will stop managing the pointer to the 1310 // Whatever happens below, curr_pic_ will stop managing the pointer to the
2015 // picture after this function returns. The ownership will either be 1311 // picture after this function returns. The ownership will either be
2016 // transferred to DPB, if the image is still needed (for output and/or 1312 // transferred to DPB, if the image is still needed (for output and/or
2017 // reference), or the memory will be released if we manage to output it here 1313 // reference), or the memory will be released if we manage to output it here
2018 // without having to store it for future reference. 1314 // without having to store it for future reference.
2019 scoped_ptr<H264Picture> pic(curr_pic_.release()); 1315 scoped_ptr<H264Picture> pic(curr_pic_.release());
2020 1316
2021 // Get all pictures that haven't been outputted yet. 1317 // Get all pictures that haven't been outputted yet.
2022 H264Picture::PtrVector not_outputted; 1318 H264Picture::PtrVector not_outputted;
(...skipping 16 matching lines...) Expand all
2039 (*output_candidate)->pic_order_cnt <= last_output_poc_ + 2; 1335 (*output_candidate)->pic_order_cnt <= last_output_poc_ + 2;
2040 ++output_candidate) { 1336 ++output_candidate) {
2041 DCHECK_GE((*output_candidate)->pic_order_cnt, last_output_poc_); 1337 DCHECK_GE((*output_candidate)->pic_order_cnt, last_output_poc_);
2042 if (!OutputPic(*output_candidate)) 1338 if (!OutputPic(*output_candidate))
2043 return false; 1339 return false;
2044 1340
2045 if (!(*output_candidate)->ref) { 1341 if (!(*output_candidate)->ref) {
2046 // Current picture hasn't been inserted into DPB yet, so don't remove it 1342 // Current picture hasn't been inserted into DPB yet, so don't remove it
2047 // if we managed to output it immediately. 1343 // if we managed to output it immediately.
2048 if (*output_candidate != pic) 1344 if (*output_candidate != pic)
2049 dpb_.RemoveByPOC((*output_candidate)->pic_order_cnt); 1345 dpb_.DeleteByPOC((*output_candidate)->pic_order_cnt);
2050 // Mark as unused. 1346 // Mark as unused.
2051 UnassignSurfaceFromPoC((*output_candidate)->pic_order_cnt); 1347 UnassignSurfaceFromPoC((*output_candidate)->pic_order_cnt);
2052 } 1348 }
2053 } 1349 }
2054 1350
2055 // If we haven't managed to output the picture that we just decoded, or if 1351 // If we haven't managed to output the picture that we just decoded, or if
2056 // it's a reference picture, we have to store it in DPB. 1352 // it's a reference picture, we have to store it in DPB.
2057 if (!pic->outputted || pic->ref) { 1353 if (!pic->outputted || pic->ref) {
2058 if (dpb_.IsFull()) { 1354 if (dpb_.IsFull()) {
2059 // If we haven't managed to output anything to free up space in DPB 1355 // If we haven't managed to output anything to free up space in DPB
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
2092 return 0; 1388 return 0;
2093 } 1389 }
2094 } 1390 }
2095 1391
2096 bool VaapiH264Decoder::ProcessSPS(int sps_id) { 1392 bool VaapiH264Decoder::ProcessSPS(int sps_id) {
2097 const H264SPS* sps = parser_.GetSPS(sps_id); 1393 const H264SPS* sps = parser_.GetSPS(sps_id);
2098 DCHECK(sps); 1394 DCHECK(sps);
2099 1395
2100 if (sps->frame_mbs_only_flag == 0) { 1396 if (sps->frame_mbs_only_flag == 0) {
2101 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; 1397 DVLOG(1) << "frame_mbs_only_flag != 1 not supported";
2102 ReportToUMA(FRAME_MBS_ONLY_FLAG_NOT_ONE); 1398 report_error_to_uma_cb_.Run(FRAME_MBS_ONLY_FLAG_NOT_ONE);
2103 return false; 1399 return false;
2104 } 1400 }
2105 1401
2106 if (sps->gaps_in_frame_num_value_allowed_flag) { 1402 if (sps->gaps_in_frame_num_value_allowed_flag) {
2107 DVLOG(1) << "Gaps in frame numbers not supported"; 1403 DVLOG(1) << "Gaps in frame numbers not supported";
2108 ReportToUMA(GAPS_IN_FRAME_NUM); 1404 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM);
2109 return false; 1405 return false;
2110 } 1406 }
2111 1407
2112 curr_sps_id_ = sps->seq_parameter_set_id; 1408 curr_sps_id_ = sps->seq_parameter_set_id;
2113 1409
2114 // Calculate picture height/width in macroblocks and pixels 1410 // Calculate picture height/width in macroblocks and pixels
2115 // (spec 7.4.2.1.1, 7.4.3). 1411 // (spec 7.4.2.1.1, 7.4.3).
2116 int width_mb = sps->pic_width_in_mbs_minus1 + 1; 1412 int width_mb = sps->pic_width_in_mbs_minus1 + 1;
2117 int height_mb = (2 - sps->frame_mbs_only_flag) * 1413 int height_mb = (2 - sps->frame_mbs_only_flag) *
2118 (sps->pic_height_in_map_units_minus1 + 1); 1414 (sps->pic_height_in_map_units_minus1 + 1);
2119 1415
2120 int width = 16 * width_mb; 1416 int width = 16 * width_mb;
2121 int height = 16 * height_mb; 1417 int height = 16 * height_mb;
2122 1418
2123 DVLOG(1) << "New picture size: " << width << "x" << height; 1419 DVLOG(1) << "New picture size: " << width << "x" << height;
2124 if (width == 0 || height == 0) { 1420 if (width == 0 || height == 0) {
2125 DVLOG(1) << "Invalid picture size!"; 1421 DVLOG(1) << "Invalid picture size!";
2126 return false; 1422 return false;
2127 } 1423 }
2128 1424
2129 if ((pic_width_ != -1 || pic_height_ != -1) && 1425 if (!pic_size_.IsEmpty() &&
2130 (width != pic_width_ || height != pic_height_)) { 1426 (width != pic_size_.width() || height != pic_size_.height())) {
2131 DVLOG(1) << "Picture size changed mid-stream"; 1427 DVLOG(1) << "Picture size changed mid-stream";
2132 ReportToUMA(MID_STREAM_RESOLUTION_CHANGE); 1428 report_error_to_uma_cb_.Run(MID_STREAM_RESOLUTION_CHANGE);
2133 return false; 1429 return false;
2134 } 1430 }
2135 1431
2136 pic_width_ = width; 1432 pic_size_.SetSize(width, height);
2137 pic_height_ = height;
2138 1433
2139 max_pic_order_cnt_lsb_ = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4); 1434 max_pic_order_cnt_lsb_ = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
2140 max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4); 1435 max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4);
2141 1436
2142 int level = sps->level_idc; 1437 int level = sps->level_idc;
2143 int max_dpb_mbs = LevelToMaxDpbMbs(level); 1438 int max_dpb_mbs = LevelToMaxDpbMbs(level);
2144 if (max_dpb_mbs == 0) 1439 if (max_dpb_mbs == 0)
2145 return false; 1440 return false;
2146 1441
2147 size_t max_dpb_size = std::min(max_dpb_mbs / (width_mb * height_mb), 1442 size_t max_dpb_size = std::min(max_dpb_mbs / (width_mb * height_mb),
(...skipping 24 matching lines...) Expand all
2172 1467
2173 return true; 1468 return true;
2174 } 1469 }
2175 1470
2176 bool VaapiH264Decoder::ProcessSlice(H264SliceHeader* slice_hdr) { 1471 bool VaapiH264Decoder::ProcessSlice(H264SliceHeader* slice_hdr) {
2177 prev_frame_num_ = frame_num_; 1472 prev_frame_num_ = frame_num_;
2178 frame_num_ = slice_hdr->frame_num; 1473 frame_num_ = slice_hdr->frame_num;
2179 1474
2180 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { 1475 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) {
2181 DVLOG(1) << "Gap in frame_num!"; 1476 DVLOG(1) << "Gap in frame_num!";
2182 ReportToUMA(GAPS_IN_FRAME_NUM); 1477 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM);
2183 return false; 1478 return false;
2184 } 1479 }
2185 1480
2186 if (slice_hdr->field_pic_flag == 0) 1481 if (slice_hdr->field_pic_flag == 0)
2187 max_pic_num_ = max_frame_num_; 1482 max_pic_num_ = max_frame_num_;
2188 else 1483 else
2189 max_pic_num_ = 2 * max_frame_num_; 1484 max_pic_num_ = 2 * max_frame_num_;
2190 1485
2191 // TODO posciak: switch to new picture detection per 7.4.1.2.4. 1486 // TODO posciak: switch to new picture detection per 7.4.1.2.4.
2192 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { 1487 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) {
(...skipping 17 matching lines...) Expand all
2210 state_ = kError; \ 1505 state_ = kError; \
2211 return VaapiH264Decoder::kDecodeError; \ 1506 return VaapiH264Decoder::kDecodeError; \
2212 } while (0) 1507 } while (0)
2213 1508
2214 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeInitial(int32 input_id) { 1509 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeInitial(int32 input_id) {
2215 // Decode enough to get required picture size (i.e. until we find an SPS), 1510 // Decode enough to get required picture size (i.e. until we find an SPS),
2216 // if we get any slice data, we are missing the beginning of the stream. 1511 // if we get any slice data, we are missing the beginning of the stream.
2217 H264NALU nalu; 1512 H264NALU nalu;
2218 H264Parser::Result res; 1513 H264Parser::Result res;
2219 1514
2220 DCHECK_NE(state_, kUninitialized); 1515 if (state_ == kDecoding)
1516 return kReadyToDecode;
2221 1517
2222 curr_input_id_ = input_id; 1518 curr_input_id_ = input_id;
2223 1519
2224 while (1) { 1520 while (1) {
2225 if (state_ == kAfterReset && num_available_decode_surfaces_ == 0) { 1521 // If we've already decoded some of the stream (after reset), we may be able
1522 // to go into decoding state not only starting at/resuming from an SPS, but
1523 // also from other resume points, such as IDRs. In such a case we need an
1524 // output surface in case we end up decoding a frame. Otherwise we just look
1525 // for an SPS and don't need any outputs.
1526 if (curr_sps_id_ != -1 && available_va_surfaces_.empty()) {
2226 DVLOG(4) << "No output surfaces available"; 1527 DVLOG(4) << "No output surfaces available";
2227 return kNoOutputAvailable; 1528 return kNoOutputAvailable;
2228 } 1529 }
2229 1530
2230 // Get next NALU looking for SPS or IDR if after reset. 1531 // Get next NALU looking for SPS or IDR if after reset.
2231 res = parser_.AdvanceToNextNALU(&nalu); 1532 res = parser_.AdvanceToNextNALU(&nalu);
2232 if (res == H264Parser::kEOStream) { 1533 if (res == H264Parser::kEOStream) {
2233 DVLOG(1) << "Could not find SPS before EOS"; 1534 DVLOG(1) << "Could not find SPS before EOS";
2234 return kNeedMoreStreamData; 1535 return kNeedMoreStreamData;
2235 } else if (res != H264Parser::kOk) { 1536 } else if (res != H264Parser::kOk) {
2236 SET_ERROR_AND_RETURN(); 1537 SET_ERROR_AND_RETURN();
2237 } 1538 }
2238 1539
2239 DVLOG(4) << " NALU found: " << static_cast<int>(nalu.nal_unit_type); 1540 DVLOG(4) << " NALU found: " << static_cast<int>(nalu.nal_unit_type);
2240 1541
2241 switch (nalu.nal_unit_type) { 1542 switch (nalu.nal_unit_type) {
2242 case H264NALU::kSPS: 1543 case H264NALU::kSPS:
2243 res = parser_.ParseSPS(&curr_sps_id_); 1544 res = parser_.ParseSPS(&curr_sps_id_);
2244 if (res != H264Parser::kOk) 1545 if (res != H264Parser::kOk)
2245 SET_ERROR_AND_RETURN(); 1546 SET_ERROR_AND_RETURN();
2246 1547
2247 if (!ProcessSPS(curr_sps_id_)) 1548 if (!ProcessSPS(curr_sps_id_))
2248 SET_ERROR_AND_RETURN(); 1549 SET_ERROR_AND_RETURN();
2249 1550
2250 // Just got information about the video size from SPS, so we can
2251 // now allocate surfaces and let the client now we are ready to
2252 // accept output buffers and decode.
2253 if (!CreateVASurfaces())
2254 SET_ERROR_AND_RETURN();
2255
2256 state_ = kDecoding; 1551 state_ = kDecoding;
2257 return kReadyToDecode; 1552 return kReadyToDecode;
2258 1553
2259 case H264NALU::kIDRSlice: 1554 case H264NALU::kIDRSlice:
2260 // If after reset, should be able to recover from an IDR. 1555 // If after reset, should be able to recover from an IDR.
2261 if (state_ == kAfterReset) { 1556 // TODO(posciak): the IDR may require an SPS that we don't have
1557 // available. For now we'd fail if that happens, but ideally we'd like
1558 // to keep going until the next SPS in the stream.
1559 if (curr_sps_id_ != -1) {
2262 H264SliceHeader slice_hdr; 1560 H264SliceHeader slice_hdr;
2263 1561
2264 res = parser_.ParseSliceHeader(nalu, &slice_hdr); 1562 res = parser_.ParseSliceHeader(nalu, &slice_hdr);
2265 if (res != H264Parser::kOk) 1563 if (res != H264Parser::kOk)
2266 SET_ERROR_AND_RETURN(); 1564 SET_ERROR_AND_RETURN();
2267 1565
2268 if (!ProcessSlice(&slice_hdr)) 1566 if (!ProcessSlice(&slice_hdr))
2269 SET_ERROR_AND_RETURN(); 1567 SET_ERROR_AND_RETURN();
2270 1568
2271 state_ = kDecoding; 1569 state_ = kDecoding;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2306 DVLOG(1) << "Decoder not ready: error in stream or not initialized"; 1604 DVLOG(1) << "Decoder not ready: error in stream or not initialized";
2307 return kDecodeError; 1605 return kDecodeError;
2308 } 1606 }
2309 1607
2310 // All of the actions below might result in decoding a picture from 1608 // All of the actions below might result in decoding a picture from
2311 // previously parsed data, but we still have to handle/parse current input 1609 // previously parsed data, but we still have to handle/parse current input
2312 // first. 1610 // first.
2313 // Note: this may drop some already decoded frames if there are errors 1611 // Note: this may drop some already decoded frames if there are errors
2314 // further in the stream, but we are OK with that. 1612 // further in the stream, but we are OK with that.
2315 while (1) { 1613 while (1) {
2316 if (num_available_decode_surfaces_ == 0) { 1614 if (available_va_surfaces_.empty()) {
2317 DVLOG(4) << "No output surfaces available"; 1615 DVLOG(4) << "No output surfaces available";
2318 return kNoOutputAvailable; 1616 return kNoOutputAvailable;
2319 } 1617 }
1618
2320 par_res = parser_.AdvanceToNextNALU(&nalu); 1619 par_res = parser_.AdvanceToNextNALU(&nalu);
2321 if (par_res == H264Parser::kEOStream) 1620 if (par_res == H264Parser::kEOStream)
2322 return kNeedMoreStreamData; 1621 return kNeedMoreStreamData;
2323 else if (par_res != H264Parser::kOk) 1622 else if (par_res != H264Parser::kOk)
2324 SET_ERROR_AND_RETURN(); 1623 SET_ERROR_AND_RETURN();
2325 1624
2326 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type); 1625 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type);
2327 1626
2328 switch (nalu.nal_unit_type) { 1627 switch (nalu.nal_unit_type) {
2329 case H264NALU::kNonIDRSlice: 1628 case H264NALU::kNonIDRSlice:
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2364 SET_ERROR_AND_RETURN(); 1663 SET_ERROR_AND_RETURN();
2365 1664
2366 if (!ProcessPPS(pps_id)) 1665 if (!ProcessPPS(pps_id))
2367 SET_ERROR_AND_RETURN(); 1666 SET_ERROR_AND_RETURN();
2368 break; 1667 break;
2369 1668
2370 default: 1669 default:
2371 // skip NALU 1670 // skip NALU
2372 break; 1671 break;
2373 } 1672 }
2374
2375 // If the last action resulted in decoding a frame, possibly from older
2376 // data, return. Otherwise keep reading the stream.
2377 if (frame_ready_at_hw_) {
2378 frame_ready_at_hw_ = false;
2379 return kDecodedFrame;
2380 }
2381 } 1673 }
2382 } 1674 }
2383 1675
2384 size_t VaapiH264Decoder::GetRequiredNumOfPictures() { 1676 size_t VaapiH264Decoder::GetRequiredNumOfPictures() {
2385 return dpb_.max_num_pics() + kPicsInPipeline; 1677 return dpb_.max_num_pics() + kPicsInPipeline;
2386 } 1678 }
2387 1679
2388 // static
2389 void VaapiH264Decoder::PreSandboxInitialization() {
2390 DCHECK(!pre_sandbox_init_done_);
2391 vaapi_handle = dlopen("libva.so", RTLD_NOW);
2392 vaapi_x11_handle = dlopen("libva-x11.so", RTLD_NOW);
2393 pre_sandbox_init_done_ = vaapi_handle && vaapi_x11_handle;
2394 }
2395
2396 // static
2397 bool VaapiH264Decoder::PostSandboxInitialization() {
2398 if (!pre_sandbox_init_done_)
2399 return false;
2400 #define VAAPI_DLSYM(name, handle) \
2401 VAAPI_##name = reinterpret_cast<Vaapi##name>(dlsym((handle), "va"#name)) \
2402
2403 VAAPI_DLSYM(GetDisplay, vaapi_x11_handle);
2404 VAAPI_DLSYM(DisplayIsValid, vaapi_handle);
2405 VAAPI_DLSYM(Initialize, vaapi_handle);
2406 VAAPI_DLSYM(Terminate, vaapi_handle);
2407 VAAPI_DLSYM(GetConfigAttributes, vaapi_handle);
2408 VAAPI_DLSYM(CreateConfig, vaapi_handle);
2409 VAAPI_DLSYM(DestroyConfig, vaapi_handle);
2410 VAAPI_DLSYM(CreateSurfaces, vaapi_handle);
2411 VAAPI_DLSYM(DestroySurfaces, vaapi_handle);
2412 VAAPI_DLSYM(CreateContext, vaapi_handle);
2413 VAAPI_DLSYM(DestroyContext, vaapi_handle);
2414 VAAPI_DLSYM(PutSurface, vaapi_x11_handle);
2415 VAAPI_DLSYM(SyncSurface, vaapi_x11_handle);
2416 VAAPI_DLSYM(BeginPicture, vaapi_handle);
2417 VAAPI_DLSYM(RenderPicture, vaapi_handle);
2418 VAAPI_DLSYM(EndPicture, vaapi_handle);
2419 VAAPI_DLSYM(CreateBuffer, vaapi_handle);
2420 VAAPI_DLSYM(DestroyBuffer, vaapi_handle);
2421 VAAPI_DLSYM(ErrorStr, vaapi_handle);
2422 #undef VAAPI_DLSYM
2423
2424 return VAAPI_GetDisplay &&
2425 VAAPI_DisplayIsValid &&
2426 VAAPI_Initialize &&
2427 VAAPI_Terminate &&
2428 VAAPI_GetConfigAttributes &&
2429 VAAPI_CreateConfig &&
2430 VAAPI_DestroyConfig &&
2431 VAAPI_CreateSurfaces &&
2432 VAAPI_DestroySurfaces &&
2433 VAAPI_CreateContext &&
2434 VAAPI_DestroyContext &&
2435 VAAPI_PutSurface &&
2436 VAAPI_SyncSurface &&
2437 VAAPI_BeginPicture &&
2438 VAAPI_RenderPicture &&
2439 VAAPI_EndPicture &&
2440 VAAPI_CreateBuffer &&
2441 VAAPI_DestroyBuffer &&
2442 VAAPI_ErrorStr;
2443 }
2444
2445 } // namespace content 1680 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698