OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include <dlfcn.h> | |
6 | |
7 #include <algorithm> | |
8 | |
9 #include "base/bind.h" | |
10 #include "base/stl_util.h" | |
11 #include "content/common/gpu/media/vaapi_h264_decoder.h" | |
12 #include "third_party/libva/va/va.h" | |
13 #include "third_party/libva/va/va_x11.h" | |
14 #include "ui/gl/gl_bindings.h" | |
15 | |
16 #define VA_LOG_ON_ERROR(va_res, err_msg) \ | |
17 do { \ | |
18 if ((va_res) != VA_STATUS_SUCCESS) { \ | |
19 DVLOG(1) << err_msg \ | |
20 << " VA error: " << VAAPI_ErrorStr(va_res); \ | |
21 } \ | |
22 } while(0) | |
23 | |
24 #define VA_SUCCESS_OR_RETURN(va_res, err_msg, ret) \ | |
25 do { \ | |
26 if ((va_res) != VA_STATUS_SUCCESS) { \ | |
27 DVLOG(1) << err_msg \ | |
28 << " VA error: " << VAAPI_ErrorStr(va_res); \ | |
29 return (ret); \ | |
30 } \ | |
31 } while (0) | |
32 | |
33 namespace content { | |
34 | |
35 void *vaapi_handle = dlopen("libva.so", RTLD_NOW); | |
36 void *vaapi_x11_handle = dlopen("libva-x11.so", RTLD_NOW); | |
37 void *vaapi_glx_handle = dlopen("libva-glx.so", RTLD_NOW); | |
38 | |
39 typedef VADisplay (*VaapiGetDisplayGLX)(Display *dpy); | |
40 typedef int (*VaapiDisplayIsValid)(VADisplay dpy); | |
41 typedef VAStatus (*VaapiInitialize)(VADisplay dpy, | |
42 int *major_version, | |
43 int *minor_version); | |
44 typedef VAStatus (*VaapiTerminate)(VADisplay dpy); | |
45 typedef VAStatus (*VaapiGetConfigAttributes)(VADisplay dpy, | |
46 VAProfile profile, | |
47 VAEntrypoint entrypoint, | |
48 VAConfigAttrib *attrib_list, | |
49 int num_attribs); | |
50 typedef VAStatus (*VaapiCreateConfig)(VADisplay dpy, | |
51 VAProfile profile, | |
52 VAEntrypoint entrypoint, | |
53 VAConfigAttrib *attrib_list, | |
54 int num_attribs, | |
55 VAConfigID *config_id); | |
56 typedef VAStatus (*VaapiDestroyConfig)(VADisplay dpy, VAConfigID config_id); | |
57 typedef VAStatus (*VaapiCreateSurfaces)(VADisplay dpy, | |
58 int width, | |
59 int height, | |
60 int format, | |
61 int num_surfaces, | |
62 VASurfaceID *surfaces); | |
63 typedef VAStatus (*VaapiDestroySurfaces)(VADisplay dpy, | |
64 VASurfaceID *surfaces, | |
65 int num_surfaces); | |
66 typedef VAStatus (*VaapiCreateContext)(VADisplay dpy, | |
67 VAConfigID config_id, | |
68 int picture_width, | |
69 int picture_height, | |
70 int flag, | |
71 VASurfaceID *render_targets, | |
72 int num_render_targets, | |
73 VAContextID *context); | |
74 typedef VAStatus (*VaapiDestroyContext)(VADisplay dpy, VAContextID context); | |
75 typedef VAStatus (*VaapiPutSurface)(VADisplay dpy, | |
76 VASurfaceID surface, | |
77 Drawable draw, | |
78 short srcx, | |
79 short srcy, | |
80 unsigned short srcw, | |
81 unsigned short srch, | |
82 short destx, | |
83 short desty, | |
84 unsigned short destw, | |
85 unsigned short desth, | |
86 VARectangle *cliprects, | |
87 unsigned int number_cliprects, | |
88 unsigned int flags); | |
89 typedef VAStatus (*VaapiSyncSurface)(VADisplay dpy, VASurfaceID render_target); | |
90 typedef VAStatus (*VaapiBeginPicture)(VADisplay dpy, | |
91 VAContextID context, | |
92 VASurfaceID render_target); | |
93 typedef VAStatus (*VaapiRenderPicture)(VADisplay dpy, | |
94 VAContextID context, | |
95 VABufferID *buffers, | |
96 int num_buffers); | |
97 typedef VAStatus (*VaapiEndPicture)(VADisplay dpy, VAContextID context); | |
98 typedef VAStatus (*VaapiCreateBuffer)(VADisplay dpy, | |
99 VAContextID context, | |
100 VABufferType type, | |
101 unsigned int size, | |
102 unsigned int num_elements, | |
103 void *data, | |
104 VABufferID *buf_id); | |
105 typedef VAStatus (*VaapiDestroyBuffer)(VADisplay dpy, VABufferID buffer_id); | |
106 typedef const char* (*VaapiErrorStr)(VAStatus error_status); | |
107 | |
108 #define VAAPI_DLSYM(name, handle) \ | |
109 Vaapi##name VAAPI_##name = \ | |
110 reinterpret_cast<Vaapi##name>(dlsym((handle), "va"#name)) | |
111 | |
112 VAAPI_DLSYM(GetDisplayGLX, vaapi_glx_handle); | |
113 VAAPI_DLSYM(DisplayIsValid, vaapi_handle); | |
114 VAAPI_DLSYM(Initialize, vaapi_handle); | |
115 VAAPI_DLSYM(Terminate, vaapi_handle); | |
116 VAAPI_DLSYM(GetConfigAttributes, vaapi_handle); | |
117 VAAPI_DLSYM(CreateConfig, vaapi_handle); | |
118 VAAPI_DLSYM(DestroyConfig, vaapi_handle); | |
119 VAAPI_DLSYM(CreateSurfaces, vaapi_handle); | |
120 VAAPI_DLSYM(DestroySurfaces, vaapi_handle); | |
121 VAAPI_DLSYM(CreateContext, vaapi_handle); | |
122 VAAPI_DLSYM(DestroyContext, vaapi_handle); | |
123 VAAPI_DLSYM(PutSurface, vaapi_x11_handle); | |
124 VAAPI_DLSYM(SyncSurface, vaapi_x11_handle); | |
125 VAAPI_DLSYM(BeginPicture, vaapi_handle); | |
126 VAAPI_DLSYM(RenderPicture, vaapi_handle); | |
127 VAAPI_DLSYM(EndPicture, vaapi_handle); | |
128 VAAPI_DLSYM(CreateBuffer, vaapi_handle); | |
129 VAAPI_DLSYM(DestroyBuffer, vaapi_handle); | |
130 VAAPI_DLSYM(ErrorStr, vaapi_handle); | |
131 | |
132 static bool AreVaapiFunctionPointersInitialized() { | |
133 return VAAPI_GetDisplayGLX && | |
134 VAAPI_DisplayIsValid && | |
135 VAAPI_Initialize && | |
136 VAAPI_Terminate && | |
137 VAAPI_GetConfigAttributes && | |
138 VAAPI_CreateConfig && | |
139 VAAPI_DestroyConfig && | |
140 VAAPI_CreateSurfaces && | |
141 VAAPI_DestroySurfaces && | |
142 VAAPI_CreateContext && | |
143 VAAPI_DestroyContext && | |
144 VAAPI_PutSurface && | |
145 VAAPI_SyncSurface && | |
146 VAAPI_BeginPicture && | |
147 VAAPI_RenderPicture && | |
148 VAAPI_EndPicture && | |
149 VAAPI_CreateBuffer && | |
150 VAAPI_DestroyBuffer && | |
151 VAAPI_ErrorStr; | |
152 } | |
153 | |
154 class VaapiH264Decoder::DecodeSurface { | |
155 public: | |
156 DecodeSurface(const GLXFBConfig& fb_config, | |
157 Display* x_display, | |
158 VADisplay va_display, | |
159 VASurfaceID va_surface_id, | |
160 int32 picture_buffer_id, | |
161 uint32 texture_id, | |
162 int width, int height); | |
163 ~DecodeSurface(); | |
164 | |
165 VASurfaceID va_surface_id() { | |
166 return va_surface_id_; | |
167 } | |
168 | |
169 int32 picture_buffer_id() { | |
170 return picture_buffer_id_; | |
171 } | |
172 | |
173 uint32 texture_id() { | |
174 return texture_id_; | |
175 } | |
176 | |
177 bool available() { | |
178 return available_; | |
179 } | |
180 | |
181 int32 input_id() { | |
182 return input_id_; | |
183 } | |
184 | |
185 int poc() { | |
186 return poc_; | |
187 } | |
188 | |
189 Pixmap x_pixmap() { | |
190 return x_pixmap_; | |
191 } | |
192 | |
193 // Associate the surface with |input_id| and |poc|, and make it unavailable | |
194 // (in use). | |
195 void Acquire(int32 input_id, int poc); | |
196 | |
197 // Make this surface available, ready to be reused. | |
198 void Release(); | |
199 | |
200 // Has to be called before output to sync texture contents. | |
201 // Returns true if successful. | |
202 bool Sync(); | |
203 | |
204 private: | |
205 Display* x_display_; | |
206 VADisplay va_display_; | |
207 VASurfaceID va_surface_id_; | |
208 | |
209 // Client-provided ids. | |
210 int32 input_id_; | |
211 int32 picture_buffer_id_; | |
212 uint32 texture_id_; | |
213 | |
214 int width_; | |
215 int height_; | |
216 | |
217 // Available for decoding (data no longer used for reference or output). | |
218 bool available_; | |
219 | |
220 // PicOrderCount | |
221 int poc_; | |
222 | |
223 // Pixmaps bound to this texture. | |
224 Pixmap x_pixmap_; | |
225 GLXPixmap glx_pixmap_; | |
226 | |
227 DISALLOW_COPY_AND_ASSIGN(DecodeSurface); | |
228 }; | |
229 | |
230 VaapiH264Decoder::DecodeSurface::DecodeSurface(const GLXFBConfig& fb_config, | |
231 Display* x_display, | |
232 VADisplay va_display, | |
233 VASurfaceID va_surface_id, | |
234 int32 picture_buffer_id, | |
235 uint32 texture_id, | |
236 int width, int height) | |
237 : x_display_(x_display), | |
238 va_display_(va_display), | |
239 va_surface_id_(va_surface_id), | |
240 picture_buffer_id_(picture_buffer_id), | |
241 texture_id_(texture_id), | |
242 width_(width), | |
243 height_(height), | |
244 available_(false) { | |
245 // Bind the surface to a texture of the given width and height, | |
246 // allocating pixmaps as needed. | |
247 glEnable(GL_TEXTURE_2D); | |
248 glBindTexture(GL_TEXTURE_2D, texture_id_); | |
249 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
250 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
251 | |
252 XWindowAttributes win_attr; | |
253 int screen = DefaultScreen(x_display_); | |
254 XGetWindowAttributes(x_display_, RootWindow(x_display_, screen), &win_attr); | |
255 x_pixmap_ = XCreatePixmap(x_display_, RootWindow(x_display_, screen), | |
256 width_, height_, win_attr.depth); | |
257 if (!x_pixmap_) { | |
258 DVLOG(1) << "Failed creating an X Pixmap for TFP"; | |
259 return; | |
260 } | |
261 | |
262 static const int pixmap_attr[] = { | |
263 GLX_TEXTURE_TARGET_EXT, GLX_TEXTURE_2D_EXT, | |
264 GLX_TEXTURE_FORMAT_EXT, GLX_TEXTURE_FORMAT_RGB_EXT, | |
265 GL_NONE, | |
266 }; | |
267 | |
268 glx_pixmap_ = glXCreatePixmap(x_display_, fb_config, x_pixmap_, | |
269 pixmap_attr); | |
270 | |
271 glBindTexture(GL_TEXTURE_2D, texture_id_); | |
272 glXBindTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT, NULL); | |
273 | |
274 available_ = true; | |
275 } | |
276 | |
277 VaapiH264Decoder::DecodeSurface::~DecodeSurface() { | |
278 // Unbind surface from texture and deallocate resources. | |
279 glXReleaseTexImageEXT(x_display_, glx_pixmap_, GLX_FRONT_LEFT_EXT); | |
280 glXDestroyGLXPixmap(x_display_, glx_pixmap_); | |
281 XFreePixmap(x_display_, x_pixmap_); | |
282 } | |
283 | |
284 void VaapiH264Decoder::DecodeSurface::Acquire(int32 input_id, int poc) { | |
285 DCHECK_EQ(available_, true); | |
286 available_ = false; | |
287 input_id_ = input_id; | |
288 poc_ = poc; | |
289 } | |
290 | |
291 void VaapiH264Decoder::DecodeSurface::Release() { | |
292 available_ = true; | |
293 } | |
294 | |
295 bool VaapiH264Decoder::DecodeSurface::Sync() { | |
296 // Put the decoded data into XPixmap bound to the texture. | |
297 VAStatus va_res = VAAPI_PutSurface(va_display_, | |
298 va_surface_id_, x_pixmap_, | |
299 0, 0, width_, height_, | |
300 0, 0, width_, height_, | |
301 NULL, 0, 0); | |
302 VA_SUCCESS_OR_RETURN(va_res, "Failed putting decoded picture to texture", | |
303 false); | |
304 | |
305 // Wait for the data to be put into the buffer so it'd ready for output. | |
306 va_res = VAAPI_SyncSurface(va_display_, va_surface_id_); | |
307 VA_SUCCESS_OR_RETURN(va_res, "Failed syncing decoded picture", false); | |
308 | |
309 return true; | |
310 } | |
311 | |
312 VaapiH264Decoder::VaapiH264Decoder() { | |
313 Reset(); | |
314 curr_sps_id_ = -1; | |
315 curr_pps_id_ = -1; | |
316 pic_width_ = -1; | |
317 pic_height_ = -1; | |
318 max_frame_num_ = 0; | |
319 max_pic_num_ = 0; | |
320 max_long_term_frame_idx_ = 0; | |
321 max_pic_order_cnt_lsb_ = 0; | |
322 state_ = kUninitialized; | |
323 num_available_decode_surfaces_ = 0; | |
324 } | |
325 | |
326 VaapiH264Decoder::~VaapiH264Decoder() { | |
327 Destroy(); | |
328 } | |
329 | |
330 // This puts the decoder in state where it keeps stream data and is ready | |
331 // to resume playback from a random location in the stream, but drops all | |
332 // inputs and outputs and makes all surfaces available for use. | |
333 void VaapiH264Decoder::Reset() { | |
334 frame_ready_at_hw_ = false; | |
335 | |
336 curr_pic_.reset(); | |
337 | |
338 frame_num_ = 0; | |
339 prev_frame_num_ = -1; | |
340 prev_frame_num_offset_ = -1; | |
341 | |
342 prev_ref_has_memmgmnt5_ = false; | |
343 prev_ref_top_field_order_cnt_ = -1; | |
344 prev_ref_pic_order_cnt_msb_ = -1; | |
345 prev_ref_pic_order_cnt_lsb_ = -1; | |
346 prev_ref_field_ = H264Picture::FIELD_NONE; | |
347 | |
348 pending_slice_bufs_ = std::queue<VABufferID>(); | |
349 pending_va_bufs_ = std::queue<VABufferID>(); | |
350 | |
351 ref_pic_list0_.clear(); | |
352 ref_pic_list1_.clear(); | |
353 | |
354 for (POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.begin(); | |
355 it != poc_to_decode_surfaces_.end(); ) { | |
356 int poc = it->second->poc(); | |
357 // Must be incremented before UnassignSurfaceFromPoC as this call | |
358 // invalidates |it|. | |
359 ++it; | |
360 DecodeSurface *dec_surface = UnassignSurfaceFromPoC(poc); | |
361 if (dec_surface) { | |
362 dec_surface->Release(); | |
363 ++num_available_decode_surfaces_; | |
364 } | |
365 } | |
366 DCHECK(poc_to_decode_surfaces_.empty()); | |
367 | |
368 dpb_.Clear(); | |
369 parser_.Reset(); | |
370 | |
371 // Still initialized and ready to decode, unless called from constructor, | |
372 // which will change it back. | |
373 state_ = kAfterReset; | |
374 } | |
375 | |
376 void VaapiH264Decoder::Destroy() { | |
377 VAStatus va_res; | |
378 | |
379 if (state_ == kUninitialized) | |
380 return; | |
381 | |
382 switch (state_) { | |
383 case kDecoding: | |
384 case kAfterReset: | |
385 case kError: | |
386 DestroyVASurfaces(); | |
387 // fallthrough | |
388 case kInitialized: | |
389 va_res = VAAPI_DestroyConfig(va_display_, va_config_id_); | |
390 VA_LOG_ON_ERROR(va_res, "vaDestroyConfig failed"); | |
391 va_res = VAAPI_Terminate(va_display_); | |
392 VA_LOG_ON_ERROR(va_res, "vaTerminate failed"); | |
393 // fallthrough | |
394 case kUninitialized: | |
395 break; | |
396 } | |
397 | |
398 state_ = kUninitialized; | |
399 } | |
400 | |
401 // Maps Profile enum values to VaProfile values. | |
402 bool VaapiH264Decoder::SetProfile(media::VideoCodecProfile profile) { | |
403 switch (profile) { | |
404 case media::H264PROFILE_BASELINE: | |
405 profile_ = VAProfileH264Baseline; | |
406 break; | |
407 case media::H264PROFILE_MAIN: | |
408 profile_ = VAProfileH264Main; | |
409 break; | |
410 case media::H264PROFILE_HIGH: | |
411 profile_ = VAProfileH264High; | |
412 break; | |
413 default: | |
414 return false; | |
415 } | |
416 return true; | |
417 } | |
418 | |
419 class ScopedPtrXFree { | |
420 public: | |
421 void operator()(void* x) const { | |
422 ::XFree(x); | |
423 } | |
424 }; | |
425 | |
426 bool VaapiH264Decoder::InitializeFBConfig() { | |
427 const int fbconfig_attr[] = { | |
428 GLX_DRAWABLE_TYPE, GLX_PIXMAP_BIT, | |
429 GLX_BIND_TO_TEXTURE_TARGETS_EXT, GLX_TEXTURE_2D_BIT_EXT, | |
430 GLX_BIND_TO_TEXTURE_RGB_EXT, GL_TRUE, | |
431 GLX_Y_INVERTED_EXT, GL_TRUE, | |
432 GL_NONE, | |
433 }; | |
434 | |
435 int num_fbconfigs; | |
436 scoped_ptr_malloc<GLXFBConfig, ScopedPtrXFree> glx_fb_configs( | |
437 glXChooseFBConfig(x_display_, DefaultScreen(x_display_), fbconfig_attr, | |
438 &num_fbconfigs)); | |
439 if (!glx_fb_configs.get()) | |
440 return false; | |
441 if (!num_fbconfigs) | |
442 return false; | |
443 | |
444 fb_config_ = glx_fb_configs.get()[0]; | |
445 return true; | |
446 } | |
447 | |
448 bool VaapiH264Decoder::Initialize(media::VideoCodecProfile profile, | |
449 Display* x_display, | |
450 GLXContext glx_context, | |
451 const OutputPicCB& output_pic_cb) { | |
452 DCHECK_EQ(state_, kUninitialized); | |
453 | |
454 output_pic_cb_ = output_pic_cb; | |
455 | |
456 x_display_ = x_display; | |
457 parent_glx_context_ = glx_context; | |
458 | |
459 if (!SetProfile(profile)) { | |
460 DVLOG(1) << "Unsupported profile"; | |
461 return false; | |
462 } | |
463 | |
464 if (!AreVaapiFunctionPointersInitialized()) { | |
465 DVLOG(1) << "Could not load libva"; | |
466 return false; | |
467 } | |
468 | |
469 if (!InitializeFBConfig()) { | |
470 DVLOG(1) << "Could not get a usable FBConfig"; | |
471 return false; | |
472 } | |
473 | |
474 va_display_ = VAAPI_GetDisplayGLX(x_display_); | |
475 if (!VAAPI_DisplayIsValid(va_display_)) { | |
476 DVLOG(1) << "Could not get a valid VA display"; | |
477 return false; | |
478 } | |
479 | |
480 int major_version, minor_version; | |
481 VAStatus va_res; | |
482 va_res = VAAPI_Initialize(va_display_, &major_version, &minor_version); | |
483 VA_SUCCESS_OR_RETURN(va_res, "vaInitialize failed", false); | |
484 DVLOG(1) << "VAAPI version: " << major_version << "." << minor_version; | |
485 | |
486 VAConfigAttrib attrib; | |
487 attrib.type = VAConfigAttribRTFormat; | |
488 | |
489 VAEntrypoint entrypoint = VAEntrypointVLD; | |
490 va_res = VAAPI_GetConfigAttributes(va_display_, profile_, entrypoint, | |
491 &attrib, 1); | |
492 VA_SUCCESS_OR_RETURN(va_res, "vaGetConfigAttributes failed", false); | |
493 | |
494 if (!(attrib.value & VA_RT_FORMAT_YUV420)) { | |
495 DVLOG(1) << "YUV420 not supported"; | |
496 return false; | |
497 } | |
498 | |
499 va_res = VAAPI_CreateConfig(va_display_, profile_, entrypoint, | |
500 &attrib, 1, &va_config_id_); | |
501 VA_SUCCESS_OR_RETURN(va_res, "vaCreateConfig failed", false); | |
502 | |
503 state_ = kInitialized; | |
504 return true; | |
505 } | |
506 | |
507 void VaapiH264Decoder::ReusePictureBuffer(int32 picture_buffer_id) { | |
508 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id); | |
509 if (it == decode_surfaces_.end() || it->second->available()) { | |
510 DVLOG(1) << "Asked to reuse an invalid/already available surface"; | |
511 return; | |
512 } | |
513 it->second->Release(); | |
514 ++num_available_decode_surfaces_; | |
515 } | |
516 | |
517 bool VaapiH264Decoder::AssignPictureBuffer(int32 picture_buffer_id, | |
518 uint32 texture_id) { | |
519 DCHECK_EQ(state_, kDecoding); | |
520 | |
521 if (decode_surfaces_.size() >= GetRequiredNumOfPictures()) { | |
522 DVLOG(1) << "Got more surfaces than required"; | |
523 return false; | |
524 } | |
525 | |
526 // This will not work if we start using VDA.DismissPicture() | |
527 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface( | |
528 fb_config_, x_display_, va_display_, | |
529 va_surface_ids_[decode_surfaces_.size()], picture_buffer_id, texture_id, | |
530 pic_width_, pic_height_)); | |
531 if (!dec_surface->available()) { | |
532 DVLOG(1) << "Error creating a decoding surface (binding to texture?)"; | |
533 return false; | |
534 } | |
535 | |
536 DVLOG(2) << "New picture assigned, texture id: " << dec_surface->texture_id() | |
537 << " pic buf id: " << dec_surface->picture_buffer_id() | |
538 << " will use va surface " << dec_surface->va_surface_id(); | |
539 | |
540 bool inserted = decode_surfaces_.insert(std::make_pair(picture_buffer_id, | |
541 dec_surface)).second; | |
542 DCHECK(inserted); | |
543 ++num_available_decode_surfaces_; | |
544 | |
545 return true; | |
546 } | |
547 | |
548 bool VaapiH264Decoder::CreateVASurfaces() { | |
549 DCHECK_NE(pic_width_, -1); | |
550 DCHECK_NE(pic_height_, -1); | |
551 DCHECK_EQ(state_, kInitialized); | |
552 | |
553 // Allocate VASurfaces in driver. | |
554 VAStatus va_res = VAAPI_CreateSurfaces(va_display_, pic_width_, | |
555 pic_height_, VA_RT_FORMAT_YUV420, | |
556 GetRequiredNumOfPictures(), | |
557 va_surface_ids_); | |
558 VA_SUCCESS_OR_RETURN(va_res, "vaCreateSurfaces failed", false); | |
559 | |
560 DCHECK(decode_surfaces_.empty()); | |
561 | |
562 // And create a context associated with them. | |
563 va_res = VAAPI_CreateContext(va_display_, va_config_id_, | |
564 pic_width_, pic_height_, VA_PROGRESSIVE, | |
565 va_surface_ids_, GetRequiredNumOfPictures(), | |
566 &va_context_id_); | |
567 VA_SUCCESS_OR_RETURN(va_res, "vaCreateContext failed", false); | |
568 | |
569 return true; | |
570 } | |
571 | |
572 void VaapiH264Decoder::DestroyVASurfaces() { | |
573 DCHECK(state_ == kDecoding || state_ == kError || state_ == kAfterReset); | |
574 | |
575 decode_surfaces_.clear(); | |
576 | |
577 VAStatus va_res = VAAPI_DestroyContext(va_display_, va_context_id_); | |
578 VA_LOG_ON_ERROR(va_res, "vaDestroyContext failed"); | |
579 | |
580 va_res = VAAPI_DestroySurfaces(va_display_, va_surface_ids_, | |
581 GetRequiredNumOfPictures()); | |
582 VA_LOG_ON_ERROR(va_res, "vaDestroySurfaces failed"); | |
583 } | |
584 | |
585 // Fill |va_pic| with default/neutral values. | |
586 static void InitVAPicture(VAPictureH264* va_pic) { | |
587 memset(va_pic, 0, sizeof(*va_pic)); | |
588 va_pic->picture_id = VA_INVALID_ID; | |
589 va_pic->flags = VA_PICTURE_H264_INVALID; | |
590 } | |
591 | |
592 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) { | |
593 POCToDecodeSurfaces::iterator iter = poc_to_decode_surfaces_.find( | |
594 pic->pic_order_cnt); | |
595 if (iter == poc_to_decode_surfaces_.end()) { | |
596 DVLOG(1) << "Could not find surface with POC: " << pic->pic_order_cnt; | |
597 // Cannot provide a ref picture, will corrupt output, but may be able | |
598 // to recover. | |
599 InitVAPicture(va_pic); | |
600 return; | |
601 } | |
602 | |
603 va_pic->picture_id = iter->second->va_surface_id(); | |
604 va_pic->frame_idx = pic->frame_num; | |
605 va_pic->flags = 0; | |
606 | |
607 switch (pic->field) { | |
608 case H264Picture::FIELD_NONE: | |
609 break; | |
610 case H264Picture::FIELD_TOP: | |
611 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD; | |
612 break; | |
613 case H264Picture::FIELD_BOTTOM: | |
614 va_pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD; | |
615 break; | |
616 } | |
617 | |
618 if (pic->ref) { | |
619 va_pic->flags |= pic->long_term ? VA_PICTURE_H264_LONG_TERM_REFERENCE | |
620 : VA_PICTURE_H264_SHORT_TERM_REFERENCE; | |
621 } | |
622 | |
623 va_pic->TopFieldOrderCnt = pic->top_field_order_cnt; | |
624 va_pic->BottomFieldOrderCnt = pic->bottom_field_order_cnt; | |
625 } | |
626 | |
627 int VaapiH264Decoder::FillVARefFramesFromDPB(VAPictureH264 *va_pics, | |
628 int num_pics) { | |
629 H264DPB::Pictures::reverse_iterator rit; | |
630 int i; | |
631 | |
632 // Return reference frames in reverse order of insertion. | |
633 // Libva does not document this, but other implementations (e.g. mplayer) | |
634 // do it this way as well. | |
635 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) { | |
636 if ((*rit)->ref) | |
637 FillVAPicture(&va_pics[i++], *rit); | |
638 } | |
639 | |
640 return i; | |
641 } | |
642 | |
643 // Can only be called when all surfaces are already bound | |
644 // to textures (cannot be run at the same time as AssignPictureBuffer). | |
645 bool VaapiH264Decoder::AssignSurfaceToPoC(int poc) { | |
646 // Find a surface not currently holding data used for reference and/or | |
647 // to be displayed and mark it as used. | |
648 DecodeSurfaces::iterator iter = decode_surfaces_.begin(); | |
649 for (; iter != decode_surfaces_.end(); ++iter) { | |
650 if (iter->second->available()) { | |
651 --num_available_decode_surfaces_; | |
652 DCHECK_GE(num_available_decode_surfaces_, 0); | |
653 | |
654 // Associate with input id and poc and mark as unavailable. | |
655 iter->second->Acquire(curr_input_id_, poc); | |
656 DVLOG(4) << "Will use surface " << iter->second->va_surface_id() | |
657 << " for POC " << iter->second->poc() | |
658 << " input ID: " << iter->second->input_id(); | |
659 bool inserted = poc_to_decode_surfaces_.insert(std::make_pair(poc, | |
660 iter->second.get())).second; | |
661 DCHECK(inserted); | |
662 return true; | |
663 } | |
664 } | |
665 | |
666 // Could not find an available surface. | |
667 return false; | |
668 } | |
669 | |
670 // Can only be called when all surfaces are already bound | |
671 // to textures (cannot be run at the same time as AssignPictureBuffer). | |
672 VaapiH264Decoder::DecodeSurface* VaapiH264Decoder::UnassignSurfaceFromPoC( | |
673 int poc) { | |
674 DecodeSurface* dec_surface; | |
675 POCToDecodeSurfaces::iterator it = poc_to_decode_surfaces_.find(poc); | |
676 if (it == poc_to_decode_surfaces_.end()) { | |
677 DVLOG(1) << "Asked to unassign an unassigned POC"; | |
678 return NULL; | |
679 } | |
680 dec_surface = it->second; | |
681 DVLOG(4) << "POC " << poc << " no longer using surface " | |
682 << dec_surface->va_surface_id(); | |
683 poc_to_decode_surfaces_.erase(it); | |
684 return dec_surface; | |
685 } | |
686 | |
687 // Fill a VAPictureParameterBufferH264 to be later sent to the HW decoder. | |
688 bool VaapiH264Decoder::SendPPS() { | |
689 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
690 DCHECK(pps); | |
691 | |
692 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
693 DCHECK(sps); | |
694 | |
695 DCHECK(curr_pic_.get()); | |
696 | |
697 VAPictureParameterBufferH264 pic_param; | |
698 memset(&pic_param, 0, sizeof(VAPictureParameterBufferH264)); | |
699 | |
700 #define FROM_SPS_TO_PP(a) pic_param.a = sps->a; | |
701 #define FROM_SPS_TO_PP2(a, b) pic_param.b = sps->a; | |
702 FROM_SPS_TO_PP2(pic_width_in_mbs_minus1, picture_width_in_mbs_minus1); | |
703 // This assumes non-interlaced video | |
704 FROM_SPS_TO_PP2(pic_height_in_map_units_minus1, | |
705 picture_height_in_mbs_minus1); | |
706 FROM_SPS_TO_PP(bit_depth_luma_minus8); | |
707 FROM_SPS_TO_PP(bit_depth_chroma_minus8); | |
708 #undef FROM_SPS_TO_PP | |
709 #undef FROM_SPS_TO_PP2 | |
710 | |
711 #define FROM_SPS_TO_PP_SF(a) pic_param.seq_fields.bits.a = sps->a; | |
712 #define FROM_SPS_TO_PP_SF2(a, b) pic_param.seq_fields.bits.b = sps->a; | |
713 FROM_SPS_TO_PP_SF(chroma_format_idc); | |
714 FROM_SPS_TO_PP_SF2(separate_colour_plane_flag, | |
715 residual_colour_transform_flag); | |
716 FROM_SPS_TO_PP_SF(gaps_in_frame_num_value_allowed_flag); | |
717 FROM_SPS_TO_PP_SF(frame_mbs_only_flag); | |
718 FROM_SPS_TO_PP_SF(mb_adaptive_frame_field_flag); | |
719 FROM_SPS_TO_PP_SF(direct_8x8_inference_flag); | |
720 pic_param.seq_fields.bits.MinLumaBiPredSize8x8 = (sps->level_idc >= 31); | |
721 FROM_SPS_TO_PP_SF(log2_max_frame_num_minus4); | |
722 FROM_SPS_TO_PP_SF(pic_order_cnt_type); | |
723 FROM_SPS_TO_PP_SF(log2_max_pic_order_cnt_lsb_minus4); | |
724 FROM_SPS_TO_PP_SF(delta_pic_order_always_zero_flag); | |
725 #undef FROM_SPS_TO_PP_SF | |
726 #undef FROM_SPS_TO_PP_SF2 | |
727 | |
728 #define FROM_PPS_TO_PP(a) pic_param.a = pps->a; | |
729 FROM_PPS_TO_PP(num_slice_groups_minus1); | |
730 pic_param.slice_group_map_type = 0; | |
731 pic_param.slice_group_change_rate_minus1 = 0; | |
732 FROM_PPS_TO_PP(pic_init_qp_minus26); | |
733 FROM_PPS_TO_PP(pic_init_qs_minus26); | |
734 FROM_PPS_TO_PP(chroma_qp_index_offset); | |
735 FROM_PPS_TO_PP(second_chroma_qp_index_offset); | |
736 #undef FROM_PPS_TO_PP | |
737 | |
738 #define FROM_PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = pps->a; | |
739 #define FROM_PPS_TO_PP_PF2(a, b) pic_param.pic_fields.bits.b = pps->a; | |
740 FROM_PPS_TO_PP_PF(entropy_coding_mode_flag); | |
741 FROM_PPS_TO_PP_PF(weighted_pred_flag); | |
742 FROM_PPS_TO_PP_PF(weighted_bipred_idc); | |
743 FROM_PPS_TO_PP_PF(transform_8x8_mode_flag); | |
744 | |
745 pic_param.pic_fields.bits.field_pic_flag = 0; | |
746 FROM_PPS_TO_PP_PF(constrained_intra_pred_flag); | |
747 FROM_PPS_TO_PP_PF2(bottom_field_pic_order_in_frame_present_flag, | |
748 pic_order_present_flag); | |
749 FROM_PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
750 FROM_PPS_TO_PP_PF(redundant_pic_cnt_present_flag); | |
751 pic_param.pic_fields.bits.reference_pic_flag = curr_pic_->ref; | |
752 #undef FROM_PPS_TO_PP_PF | |
753 #undef FROM_PPS_TO_PP_PF2 | |
754 | |
755 pic_param.frame_num = curr_pic_->frame_num; | |
756 | |
757 InitVAPicture(&pic_param.CurrPic); | |
758 FillVAPicture(&pic_param.CurrPic, curr_pic_.get()); | |
759 | |
760 // Init reference pictures' array. | |
761 for (int i = 0; i < 16; ++i) | |
762 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
763 | |
764 // And fill it with picture info from DPB. | |
765 FillVARefFramesFromDPB(pic_param.ReferenceFrames, | |
766 arraysize(pic_param.ReferenceFrames)); | |
767 | |
768 pic_param.num_ref_frames = sps->max_num_ref_frames; | |
769 | |
770 // Allocate a buffer in driver for this parameter buffer and upload data. | |
771 VABufferID pic_param_buf_id; | |
772 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, | |
773 VAPictureParameterBufferType, | |
774 sizeof(VAPictureParameterBufferH264), | |
775 1, &pic_param, &pic_param_buf_id); | |
776 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for PPS", false); | |
777 | |
778 // Queue its VA buffer ID to be committed on HW decode run. | |
779 pending_va_bufs_.push(pic_param_buf_id); | |
780 | |
781 return true; | |
782 } | |
783 | |
784 // Fill a VAIQMatrixBufferH264 to be later sent to the HW decoder. | |
785 bool VaapiH264Decoder::SendIQMatrix() { | |
786 const H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
787 DCHECK(pps); | |
788 | |
789 VAIQMatrixBufferH264 iq_matrix_buf; | |
790 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264)); | |
791 | |
792 if (pps->pic_scaling_matrix_present_flag) { | |
793 for (int i = 0; i < 6; ++i) { | |
794 for (int j = 0; j < 16; ++j) | |
795 iq_matrix_buf.ScalingList4x4[i][j] = pps->scaling_list4x4[i][j]; | |
796 } | |
797 | |
798 for (int i = 0; i < 2; ++i) { | |
799 for (int j = 0; j < 64; ++j) | |
800 iq_matrix_buf.ScalingList8x8[i][j] = pps->scaling_list8x8[i][j]; | |
801 } | |
802 } else { | |
803 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
804 DCHECK(sps); | |
805 for (int i = 0; i < 6; ++i) { | |
806 for (int j = 0; j < 16; ++j) | |
807 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j]; | |
808 } | |
809 | |
810 for (int i = 0; i < 2; ++i) { | |
811 for (int j = 0; j < 64; ++j) | |
812 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j]; | |
813 } | |
814 } | |
815 | |
816 // Allocate a buffer in driver for this parameter buffer and upload data. | |
817 VABufferID iq_matrix_buf_id; | |
818 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, | |
819 VAIQMatrixBufferType, | |
820 sizeof(VAIQMatrixBufferH264), 1, | |
821 &iq_matrix_buf, &iq_matrix_buf_id); | |
822 VA_SUCCESS_OR_RETURN(va_res, "Failed to create a buffer for IQMatrix", | |
823 false); | |
824 | |
825 // Queue its VA buffer ID to be committed on HW decode run. | |
826 pending_va_bufs_.push(iq_matrix_buf_id); | |
827 | |
828 return true; | |
829 } | |
830 | |
831 bool VaapiH264Decoder::SendVASliceParam(H264SliceHeader* slice_hdr) { | |
832 const H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); | |
833 DCHECK(pps); | |
834 | |
835 const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
836 DCHECK(sps); | |
837 | |
838 VASliceParameterBufferH264 slice_param; | |
839 memset(&slice_param, 0, sizeof(VASliceParameterBufferH264)); | |
840 | |
841 slice_param.slice_data_size = slice_hdr->nalu_size; | |
842 slice_param.slice_data_offset = 0; | |
843 slice_param.slice_data_flag = VA_SLICE_DATA_FLAG_ALL; | |
844 slice_param.slice_data_bit_offset = slice_hdr->header_bit_size; | |
845 | |
846 #define SHDRToSP(a) slice_param.a = slice_hdr->a; | |
847 SHDRToSP(first_mb_in_slice); | |
848 slice_param.slice_type = slice_hdr->slice_type % 5; | |
849 SHDRToSP(direct_spatial_mv_pred_flag); | |
850 | |
851 // TODO posciak: make sure parser sets those even when override flags | |
852 // in slice header is off. | |
853 SHDRToSP(num_ref_idx_l0_active_minus1); | |
854 SHDRToSP(num_ref_idx_l1_active_minus1); | |
855 SHDRToSP(cabac_init_idc); | |
856 SHDRToSP(slice_qp_delta); | |
857 SHDRToSP(disable_deblocking_filter_idc); | |
858 SHDRToSP(slice_alpha_c0_offset_div2); | |
859 SHDRToSP(slice_beta_offset_div2); | |
860 | |
861 if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) && | |
862 pps->weighted_pred_flag) || | |
863 (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) { | |
864 SHDRToSP(luma_log2_weight_denom); | |
865 SHDRToSP(chroma_log2_weight_denom); | |
866 | |
867 SHDRToSP(luma_weight_l0_flag); | |
868 SHDRToSP(luma_weight_l1_flag); | |
869 | |
870 SHDRToSP(chroma_weight_l0_flag); | |
871 SHDRToSP(chroma_weight_l1_flag); | |
872 | |
873 for (int i = 0; i <= slice_param.num_ref_idx_l0_active_minus1; ++i) { | |
874 slice_param.luma_weight_l0[i] = | |
875 slice_hdr->pred_weight_table_l0.luma_weight[i]; | |
876 slice_param.luma_offset_l0[i] = | |
877 slice_hdr->pred_weight_table_l0.luma_offset[i]; | |
878 | |
879 for (int j = 0; j < 2; ++j) { | |
880 slice_param.chroma_weight_l0[i][j] = | |
881 slice_hdr->pred_weight_table_l0.chroma_weight[i][j]; | |
882 slice_param.chroma_offset_l0[i][j] = | |
883 slice_hdr->pred_weight_table_l0.chroma_offset[i][j]; | |
884 } | |
885 } | |
886 | |
887 if (slice_hdr->IsBSlice()) { | |
888 for (int i = 0; i <= slice_param.num_ref_idx_l1_active_minus1; ++i) { | |
889 slice_param.luma_weight_l1[i] = | |
890 slice_hdr->pred_weight_table_l1.luma_weight[i]; | |
891 slice_param.luma_offset_l1[i] = | |
892 slice_hdr->pred_weight_table_l1.luma_offset[i]; | |
893 | |
894 for (int j = 0; j < 2; ++j) { | |
895 slice_param.chroma_weight_l1[i][j] = | |
896 slice_hdr->pred_weight_table_l1.chroma_weight[i][j]; | |
897 slice_param.chroma_offset_l1[i][j] = | |
898 slice_hdr->pred_weight_table_l1.chroma_offset[i][j]; | |
899 } | |
900 } | |
901 } | |
902 } | |
903 | |
904 for (int i = 0; i < 32; ++i) { | |
905 InitVAPicture(&slice_param.RefPicList0[i]); | |
906 InitVAPicture(&slice_param.RefPicList1[i]); | |
907 } | |
908 | |
909 int i; | |
910 H264Picture::PtrVector::iterator it; | |
911 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end(); | |
912 ++it, ++i) | |
913 FillVAPicture(&slice_param.RefPicList0[i], *it); | |
914 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end(); | |
915 ++it, ++i) | |
916 FillVAPicture(&slice_param.RefPicList1[i], *it); | |
917 | |
918 // Allocate a buffer in driver for this parameter buffer and upload data. | |
919 VABufferID slice_param_buf_id; | |
920 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, | |
921 VASliceParameterBufferType, | |
922 sizeof(VASliceParameterBufferH264), | |
923 1, &slice_param, &slice_param_buf_id); | |
924 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice param", | |
925 false); | |
926 | |
927 // Queue its VA buffer ID to be committed on HW decode run. | |
928 pending_slice_bufs_.push(slice_param_buf_id); | |
929 | |
930 return true; | |
931 } | |
932 | |
933 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) | |
934 { | |
935 // Can't help it, blame libva... | |
936 void* non_const_ptr = const_cast<uint8*>(ptr); | |
937 | |
938 VABufferID slice_data_buf_id; | |
939 VAStatus va_res = VAAPI_CreateBuffer(va_display_, va_context_id_, | |
940 VASliceDataBufferType, size, 1, | |
941 non_const_ptr, &slice_data_buf_id); | |
942 VA_SUCCESS_OR_RETURN(va_res, "Failed creating a buffer for slice data", | |
943 false); | |
944 | |
945 pending_slice_bufs_.push(slice_data_buf_id); | |
946 return true; | |
947 } | |
948 | |
949 bool VaapiH264Decoder::QueueSlice(H264SliceHeader* slice_hdr) { | |
950 DCHECK(curr_pic_.get()); | |
951 | |
952 if (!SendVASliceParam(slice_hdr)) | |
953 return false; | |
954 | |
955 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size)) | |
956 return false; | |
957 | |
958 return true; | |
959 } | |
960 | |
961 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever | |
962 // possible. | |
963 | |
964 bool VaapiH264Decoder::DecodePicture() { | |
965 DCHECK(!frame_ready_at_hw_); | |
966 DCHECK(curr_pic_.get()); | |
967 | |
968 static const size_t kMaxVABuffers = 32; | |
969 DCHECK_LE(pending_va_bufs_.size(), kMaxVABuffers); | |
970 DCHECK_LE(pending_slice_bufs_.size(), kMaxVABuffers); | |
971 | |
972 DVLOG(4) << "Pending VA bufs to commit: " << pending_va_bufs_.size(); | |
973 DVLOG(4) << "Pending slice bufs to commit: " << pending_slice_bufs_.size(); | |
974 | |
975 // Find the surface associated with the picture to be decoded. | |
976 DCHECK(pending_slice_bufs_.size()); | |
977 DecodeSurface* dec_surface = | |
978 poc_to_decode_surfaces_[curr_pic_->pic_order_cnt]; | |
979 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt | |
980 << " into surface " << dec_surface->va_surface_id(); | |
981 | |
982 // Get ready to decode into surface. | |
983 VAStatus va_res = VAAPI_BeginPicture(va_display_, va_context_id_, | |
984 dec_surface->va_surface_id()); | |
985 VA_SUCCESS_OR_RETURN(va_res, "vaBeginPicture failed", false); | |
986 | |
987 // Put buffer IDs for pending parameter buffers into buffers[]. | |
988 VABufferID buffers[kMaxVABuffers]; | |
989 size_t num_buffers = pending_va_bufs_.size(); | |
990 for (size_t i = 0; i < num_buffers && i < kMaxVABuffers; ++i) { | |
991 buffers[i] = pending_va_bufs_.front(); | |
992 pending_va_bufs_.pop(); | |
993 } | |
994 | |
995 // And send them to the HW decoder. | |
996 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, buffers, | |
997 num_buffers); | |
998 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for va_bufs failed", false); | |
999 | |
1000 DVLOG(4) << "Committed " << num_buffers << "VA buffers"; | |
1001 | |
1002 for (size_t i = 0; i < num_buffers; ++i) { | |
1003 va_res = VAAPI_DestroyBuffer(va_display_, buffers[i]); | |
1004 VA_SUCCESS_OR_RETURN(va_res, "vaDestroyBuffer for va_bufs failed", false); | |
1005 } | |
1006 | |
1007 // Put buffer IDs for pending slice data buffers into buffers[]. | |
1008 num_buffers = pending_slice_bufs_.size(); | |
1009 for (size_t i = 0; i < num_buffers && i < kMaxVABuffers; ++i) { | |
1010 buffers[i] = pending_slice_bufs_.front(); | |
1011 pending_slice_bufs_.pop(); | |
1012 } | |
1013 | |
1014 // And send them to the Hw decoder. | |
1015 va_res = VAAPI_RenderPicture(va_display_, va_context_id_, buffers, | |
1016 num_buffers); | |
1017 VA_SUCCESS_OR_RETURN(va_res, "vaRenderPicture for slices failed", false); | |
1018 | |
1019 DVLOG(4) << "Committed " << num_buffers << "slice buffers"; | |
1020 | |
1021 for (size_t i = 0; i < num_buffers; ++i) { | |
1022 va_res = VAAPI_DestroyBuffer(va_display_, buffers[i]); | |
1023 VA_SUCCESS_OR_RETURN(va_res, "vaDestroyBuffer for slices failed", false); | |
1024 } | |
1025 | |
1026 // Instruct HW decoder to start processing committed buffers (decode this | |
1027 // picture). This does not block until the end of decode. | |
1028 va_res = VAAPI_EndPicture(va_display_, va_context_id_); | |
1029 VA_SUCCESS_OR_RETURN(va_res, "vaEndPicture failed", false); | |
1030 | |
1031 // Used to notify clients that we had sufficient data to start decoding | |
1032 // a new frame. | |
1033 frame_ready_at_hw_ = true; | |
1034 return true; | |
1035 } | |
1036 | |
1037 | |
1038 bool VaapiH264Decoder::InitCurrPicture(H264SliceHeader* slice_hdr) { | |
1039 DCHECK(curr_pic_.get()); | |
1040 | |
1041 memset(curr_pic_.get(), 0, sizeof(H264Picture)); | |
1042 | |
1043 curr_pic_->idr = slice_hdr->idr_pic_flag; | |
1044 | |
1045 if (slice_hdr->field_pic_flag) { | |
1046 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM | |
1047 : H264Picture::FIELD_TOP; | |
1048 } else { | |
1049 curr_pic_->field = H264Picture::FIELD_NONE; | |
1050 } | |
1051 | |
1052 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; | |
1053 // This assumes non-interlaced stream. | |
1054 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; | |
1055 | |
1056 if (!CalculatePicOrderCounts(slice_hdr)) | |
1057 return false; | |
1058 | |
1059 // Try to get an empty surface to decode this picture to. | |
1060 if (!AssignSurfaceToPoC(curr_pic_->pic_order_cnt)) { | |
1061 DVLOG(1) << "Failed getting a free surface for a picture"; | |
1062 return false; | |
1063 } | |
1064 | |
1065 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; | |
1066 curr_pic_->adaptive_ref_pic_marking_mode_flag = | |
1067 slice_hdr->adaptive_ref_pic_marking_mode_flag; | |
1068 | |
1069 // If the slice header indicates we will have to perform reference marking | |
1070 // process after this picture is decoded, store required data for that | |
1071 // purpose. | |
1072 if (slice_hdr->adaptive_ref_pic_marking_mode_flag) { | |
1073 COMPILE_ASSERT(sizeof(curr_pic_->ref_pic_marking) == | |
1074 sizeof(slice_hdr->ref_pic_marking), | |
1075 ref_pic_marking_array_sizes_do_not_match); | |
1076 memcpy(curr_pic_->ref_pic_marking, slice_hdr->ref_pic_marking, | |
1077 sizeof(curr_pic_->ref_pic_marking)); | |
1078 } | |
1079 | |
1080 return true; | |
1081 } | |
1082 | |
1083 bool VaapiH264Decoder::CalculatePicOrderCounts(H264SliceHeader* slice_hdr) { | |
1084 DCHECK_NE(curr_sps_id_, -1); | |
1085 | |
1086 int pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; | |
1087 curr_pic_->pic_order_cnt_lsb = pic_order_cnt_lsb; | |
1088 if (parser_.GetSPS(curr_sps_id_)->pic_order_cnt_type != 0) { | |
1089 DVLOG(1) << "Unsupported pic_order_cnt_type"; | |
1090 return false; | |
1091 } | |
1092 | |
1093 // See spec 8.2.1.1. | |
1094 int prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb; | |
1095 if (slice_hdr->idr_pic_flag) { | |
1096 prev_pic_order_cnt_msb = prev_pic_order_cnt_lsb = 0; | |
1097 } else { | |
1098 if (prev_ref_has_memmgmnt5_) { | |
1099 if (prev_ref_field_ != H264Picture::FIELD_BOTTOM) { | |
1100 prev_pic_order_cnt_msb = 0; | |
1101 prev_pic_order_cnt_lsb = prev_ref_top_field_order_cnt_; | |
1102 } else { | |
1103 prev_pic_order_cnt_msb = 0; | |
1104 prev_pic_order_cnt_lsb = 0; | |
1105 } | |
1106 } else { | |
1107 prev_pic_order_cnt_msb = prev_ref_pic_order_cnt_msb_; | |
1108 prev_pic_order_cnt_lsb = prev_ref_pic_order_cnt_lsb_; | |
1109 } | |
1110 } | |
1111 | |
1112 DCHECK_NE(max_pic_order_cnt_lsb_, 0); | |
1113 if ((pic_order_cnt_lsb < prev_pic_order_cnt_lsb) && | |
1114 (prev_pic_order_cnt_lsb - pic_order_cnt_lsb >= | |
1115 max_pic_order_cnt_lsb_ / 2)) { | |
1116 curr_pic_->pic_order_cnt_msb = prev_pic_order_cnt_msb + | |
1117 max_pic_order_cnt_lsb_; | |
1118 } else if ((pic_order_cnt_lsb > prev_pic_order_cnt_lsb) && | |
1119 (pic_order_cnt_lsb - prev_pic_order_cnt_lsb > | |
1120 max_pic_order_cnt_lsb_ / 2)) { | |
1121 curr_pic_->pic_order_cnt_msb = prev_pic_order_cnt_msb - | |
1122 max_pic_order_cnt_lsb_; | |
1123 } else { | |
1124 curr_pic_->pic_order_cnt_msb = prev_pic_order_cnt_msb; | |
1125 } | |
1126 | |
1127 if (curr_pic_->field != H264Picture::FIELD_BOTTOM) { | |
1128 curr_pic_->top_field_order_cnt = curr_pic_->pic_order_cnt_msb + | |
1129 pic_order_cnt_lsb; | |
1130 } | |
1131 | |
1132 if (curr_pic_->field != H264Picture::FIELD_TOP) { | |
1133 // TODO posciak: perhaps replace with pic->field? | |
1134 if (!slice_hdr->field_pic_flag) { | |
1135 curr_pic_->bottom_field_order_cnt = curr_pic_->top_field_order_cnt + | |
1136 slice_hdr->delta_pic_order_cnt_bottom; | |
1137 } else { | |
1138 curr_pic_->bottom_field_order_cnt = curr_pic_->pic_order_cnt_msb + | |
1139 pic_order_cnt_lsb; | |
1140 } | |
1141 } | |
1142 | |
1143 switch (curr_pic_->field) { | |
1144 case H264Picture::FIELD_NONE: | |
1145 curr_pic_->pic_order_cnt = std::min(curr_pic_->top_field_order_cnt, | |
1146 curr_pic_->bottom_field_order_cnt); | |
1147 break; | |
1148 case H264Picture::FIELD_TOP: | |
1149 curr_pic_->pic_order_cnt = curr_pic_->top_field_order_cnt; | |
1150 break; | |
1151 case H264Picture::FIELD_BOTTOM: | |
1152 curr_pic_->pic_order_cnt = curr_pic_->bottom_field_order_cnt; | |
1153 break; | |
1154 } | |
1155 | |
1156 return true; | |
1157 } | |
1158 | |
1159 void VaapiH264Decoder::UpdatePicNums() { | |
1160 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { | |
1161 H264Picture* pic = *it; | |
1162 DCHECK(pic); | |
1163 if (!pic->ref) | |
1164 continue; | |
1165 | |
1166 // Below assumes non-interlaced stream. | |
1167 DCHECK_EQ(pic->field, H264Picture::FIELD_NONE); | |
1168 if (pic->long_term) { | |
1169 pic->long_term_pic_num = pic->long_term_frame_idx; | |
1170 } else { | |
1171 if (pic->frame_num > frame_num_) | |
1172 pic->frame_num_wrap = pic->frame_num - max_frame_num_; | |
1173 else | |
1174 pic->frame_num_wrap = pic->frame_num; | |
1175 | |
1176 pic->pic_num = pic->frame_num_wrap; | |
1177 } | |
1178 } | |
1179 } | |
1180 | |
1181 struct PicNumDescCompare { | |
1182 bool operator()(const H264Picture* a, const H264Picture* b) const { | |
1183 return a->pic_num > b->pic_num; | |
1184 } | |
1185 }; | |
1186 | |
1187 struct LongTermPicNumAscCompare { | |
1188 bool operator()(const H264Picture* a, const H264Picture* b) const { | |
1189 return a->long_term_pic_num < b->long_term_pic_num; | |
1190 } | |
1191 }; | |
1192 | |
1193 void VaapiH264Decoder::ConstructReferencePicListsP(H264SliceHeader* slice_hdr) { | |
1194 // RefPicList0 (8.2.4.2.1) [[1] [2]], where: | |
1195 // [1] shortterm ref pics sorted by descending pic_num, | |
1196 // [2] longterm ref pics by ascending long_term_pic_num. | |
1197 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | |
1198 // First get the short ref pics... | |
1199 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | |
1200 size_t num_short_refs = ref_pic_list0_.size(); | |
1201 | |
1202 // and sort them to get [1]. | |
1203 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), PicNumDescCompare()); | |
1204 | |
1205 // Now get long term pics and sort them by long_term_pic_num to get [2]. | |
1206 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | |
1207 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | |
1208 LongTermPicNumAscCompare()); | |
1209 | |
1210 // Cut off if we have more than requested in slice header. | |
1211 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | |
1212 } | |
1213 | |
1214 struct POCAscCompare { | |
1215 bool operator()(const H264Picture* a, const H264Picture* b) const { | |
1216 return a->pic_order_cnt < b->pic_order_cnt; | |
1217 } | |
1218 }; | |
1219 | |
1220 struct POCDescCompare { | |
1221 bool operator()(const H264Picture* a, const H264Picture* b) const { | |
1222 return a->pic_order_cnt > b->pic_order_cnt; | |
1223 } | |
1224 }; | |
1225 | |
1226 void VaapiH264Decoder::ConstructReferencePicListsB(H264SliceHeader* slice_hdr) { | |
1227 // RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where: | |
1228 // [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC, | |
1229 // [2] shortterm ref pics with POC > curr_pic's POC by ascending POC, | |
1230 // [3] longterm ref pics by ascending long_term_pic_num. | |
1231 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | |
1232 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | |
1233 size_t num_short_refs = ref_pic_list0_.size(); | |
1234 | |
1235 // First sort ascending, this will put [1] in right place and finish [2]. | |
1236 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), POCAscCompare()); | |
1237 | |
1238 // Find first with POC > curr_pic's POC to get first element in [2]... | |
1239 H264Picture::PtrVector::iterator iter; | |
1240 iter = std::upper_bound(ref_pic_list0_.begin(), ref_pic_list0_.end(), | |
1241 curr_pic_.get(), POCAscCompare()); | |
1242 | |
1243 // and sort [1] descending, thus finishing sequence [1] [2]. | |
1244 std::sort(ref_pic_list0_.begin(), iter, POCDescCompare()); | |
1245 | |
1246 // Now add [3] and sort by ascending long_term_pic_num. | |
1247 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | |
1248 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | |
1249 LongTermPicNumAscCompare()); | |
1250 | |
1251 // RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where: | |
1252 // [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC, | |
1253 // [2] shortterm ref pics with POC < curr_pic's POC by descending POC, | |
1254 // [3] longterm ref pics by ascending long_term_pic_num. | |
1255 | |
1256 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | |
1257 num_short_refs = ref_pic_list1_.size(); | |
1258 | |
1259 // First sort by descending POC. | |
1260 std::sort(ref_pic_list1_.begin(), ref_pic_list1_.end(), POCDescCompare()); | |
1261 | |
1262 // Find first with POC < curr_pic's POC to get first element in [2]... | |
1263 iter = std::upper_bound(ref_pic_list1_.begin(), ref_pic_list1_.end(), | |
1264 curr_pic_.get(), POCDescCompare()); | |
1265 | |
1266 // and sort [1] ascending. | |
1267 std::sort(ref_pic_list1_.begin(), iter, POCAscCompare()); | |
1268 | |
1269 // Now add [3] and sort by ascending long_term_pic_num | |
1270 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | |
1271 std::sort(ref_pic_list1_.begin() + num_short_refs, ref_pic_list1_.end(), | |
1272 LongTermPicNumAscCompare()); | |
1273 | |
1274 // If lists identical, swap first two entries in RefPicList1 (spec 8.2.4.2.3) | |
1275 if (ref_pic_list1_.size() > 1 && | |
1276 std::equal(ref_pic_list0_.begin(), ref_pic_list0_.end(), | |
1277 ref_pic_list1_.begin())) | |
1278 std::swap(ref_pic_list1_[0], ref_pic_list1_[1]); | |
1279 | |
1280 // Per 8.2.4.2 it's possible for num_ref_idx_lX_active_minus1 to indicate | |
1281 // there should be more ref pics on list than we constructed. | |
1282 // Those superfluous ones should be treated as non-reference. | |
1283 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | |
1284 ref_pic_list1_.resize(slice_hdr->num_ref_idx_l1_active_minus1 + 1); | |
1285 } | |
1286 | |
1287 // See 8.2.4 | |
1288 int VaapiH264Decoder::PicNumF(H264Picture *pic) { | |
1289 if (!pic) | |
1290 return -1; | |
1291 | |
1292 if (!pic->long_term) | |
1293 return pic->pic_num; | |
1294 else | |
1295 return max_pic_num_; | |
1296 } | |
1297 | |
1298 // See 8.2.4 | |
1299 int VaapiH264Decoder::LongTermPicNumF(H264Picture *pic) { | |
1300 if (pic->ref && pic->long_term) | |
1301 return pic->long_term_pic_num; | |
1302 else | |
1303 return 2 * (max_long_term_frame_idx_ + 1); | |
1304 } | |
1305 | |
1306 // Shift elements on the |v| starting from |from| to |to|, inclusive, | |
1307 // one position to the right and insert pic at |from|. | |
1308 static void ShiftRightAndInsert(H264Picture::PtrVector& v, | |
1309 int from, | |
1310 int to, | |
1311 H264Picture* pic) { | |
1312 DCHECK(pic); | |
1313 for (int i = to + 1; i > from; --i) | |
1314 v[i] = v[i - 1]; | |
1315 | |
1316 v[from] = pic; | |
1317 } | |
1318 | |
1319 bool VaapiH264Decoder::ModifyReferencePicList(H264SliceHeader *slice_hdr, | |
1320 int list) { | |
1321 int num_ref_idx_lX_active_minus1; | |
1322 H264Picture::PtrVector* ref_pic_listx; | |
1323 H264ModificationOfPicNum* list_mod; | |
1324 | |
1325 // This can process either ref_pic_list0 or ref_pic_list1, depending on | |
1326 // the list argument. Set up pointers to proper list to be processed here. | |
1327 if (list == 0) { | |
1328 if (!slice_hdr->ref_pic_list_modification_flag_l0) | |
1329 return true; | |
1330 | |
1331 list_mod = slice_hdr->ref_list_l0_modifications; | |
1332 num_ref_idx_lX_active_minus1 = ref_pic_list0_.size() - 1; | |
1333 | |
1334 ref_pic_listx = &ref_pic_list0_; | |
1335 } else { | |
1336 if (!slice_hdr->ref_pic_list_modification_flag_l1) | |
1337 return true; | |
1338 | |
1339 list_mod = slice_hdr->ref_list_l1_modifications; | |
1340 num_ref_idx_lX_active_minus1 = ref_pic_list1_.size() - 1; | |
1341 | |
1342 ref_pic_listx = &ref_pic_list1_; | |
1343 } | |
1344 | |
1345 DCHECK_GT(num_ref_idx_lX_active_minus1, 0); | |
1346 | |
1347 // Spec 8.2.4.3: | |
1348 // Reorder pictures on the list in a way specified in the stream. | |
1349 int pic_num_lx_pred = curr_pic_->pic_num; | |
1350 int ref_idx_lx = 0; | |
1351 int pic_num_lx_no_wrap; | |
1352 int pic_num_lx; | |
1353 H264Picture *pic ; | |
1354 for (int i = 0; i < H264SliceHeader::kRefListModSize; ++i) { | |
1355 switch (list_mod->modification_of_pic_nums_idc) { | |
1356 case 0: | |
1357 case 1: | |
1358 // Modify short reference picture position. | |
1359 if (list_mod->modification_of_pic_nums_idc == 0) { | |
1360 // Subtract given value from predicted PicNum. | |
1361 pic_num_lx_no_wrap = pic_num_lx_pred - | |
1362 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); | |
1363 // Wrap around max_pic_num_ if it becomes < 0 as result | |
1364 // of subtraction. | |
1365 if (pic_num_lx_no_wrap < 0) | |
1366 pic_num_lx_no_wrap += max_pic_num_; | |
1367 } else { | |
1368 // Add given value to predicted PicNum. | |
1369 pic_num_lx_no_wrap = pic_num_lx_pred + | |
1370 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); | |
1371 // Wrap around max_pic_num_ if it becomes >= max_pic_num_ as result | |
1372 // of the addition. | |
1373 if (pic_num_lx_no_wrap >= max_pic_num_) | |
1374 pic_num_lx_no_wrap -= max_pic_num_; | |
1375 } | |
1376 | |
1377 // For use in next iteration. | |
1378 pic_num_lx_pred = pic_num_lx_no_wrap; | |
1379 | |
1380 if (pic_num_lx_no_wrap > curr_pic_->pic_num) | |
1381 pic_num_lx = pic_num_lx_no_wrap - max_pic_num_; | |
1382 else | |
1383 pic_num_lx = pic_num_lx_no_wrap; | |
1384 | |
1385 DCHECK_LT(num_ref_idx_lX_active_minus1 + 1, | |
1386 H264SliceHeader::kRefListModSize); | |
1387 pic = dpb_.GetShortRefPicByPicNum(pic_num_lx); | |
1388 if (!pic) { | |
1389 DVLOG(1) << "Malformed stream, no pic num " << pic_num_lx; | |
1390 return false; | |
1391 } | |
1392 ShiftRightAndInsert(*ref_pic_listx, ref_idx_lx, | |
1393 num_ref_idx_lX_active_minus1, pic); | |
1394 ref_idx_lx++; | |
1395 | |
1396 for (int src = ref_idx_lx, dst = ref_idx_lx; | |
1397 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { | |
1398 if (PicNumF((*ref_pic_listx)[src]) != pic_num_lx) | |
1399 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; | |
1400 } | |
1401 break; | |
1402 | |
1403 case 2: | |
1404 // Modify long term reference picture position. | |
1405 DCHECK_LT(num_ref_idx_lX_active_minus1 + 1, | |
1406 H264SliceHeader::kRefListModSize); | |
1407 pic = dpb_.GetLongRefPicByLongTermPicNum(list_mod->long_term_pic_num); | |
1408 if (!pic) { | |
1409 DVLOG(1) << "Malformed stream, no pic num " << pic_num_lx; | |
1410 return false; | |
1411 } | |
1412 ShiftRightAndInsert(*ref_pic_listx, ref_idx_lx, | |
1413 num_ref_idx_lX_active_minus1, pic); | |
1414 ref_idx_lx++; | |
1415 | |
1416 for (int src = ref_idx_lx, dst = ref_idx_lx; | |
1417 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { | |
1418 if (LongTermPicNumF((*ref_pic_listx)[src]) | |
1419 != static_cast<int>(list_mod->long_term_pic_num)) | |
1420 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; | |
1421 } | |
1422 break; | |
1423 | |
1424 case 3: | |
1425 // End of modification list. | |
1426 return true; | |
1427 | |
1428 default: | |
1429 // May be recoverable. | |
1430 DVLOG(1) << "Invalid modification_of_pic_nums_idc=" | |
1431 << list_mod->modification_of_pic_nums_idc | |
1432 << " in position " << i; | |
1433 break; | |
1434 } | |
1435 | |
1436 ++list_mod; | |
1437 } | |
1438 | |
1439 return true; | |
1440 } | |
1441 | |
1442 bool VaapiH264Decoder::PutPicToTexture(int32 picture_buffer_id) { | |
1443 DecodeSurfaces::iterator it = decode_surfaces_.find(picture_buffer_id); | |
1444 if (it == decode_surfaces_.end()) { | |
1445 DVLOG(1) << "Asked to put an invalid buffer"; | |
1446 return false; | |
1447 } | |
1448 | |
1449 DVLOG(3) << "Will output from VASurface " << it->second->va_surface_id() | |
1450 << " to texture id " << it->second->texture_id(); | |
1451 | |
1452 return it->second->Sync(); | |
1453 } | |
1454 | |
1455 bool VaapiH264Decoder::OutputPic(H264Picture* pic) { | |
1456 // No longer need to keep POC->surface mapping, since for decoder this POC | |
1457 // is finished with. When the client returns this surface via | |
1458 // ReusePictureBuffer(), it will be marked back as available for use. | |
1459 DecodeSurface* dec_surface = UnassignSurfaceFromPoC(pic->pic_order_cnt); | |
1460 if (!dec_surface) | |
1461 return false; | |
1462 | |
1463 // Notify the client that a picture can be output. The decoded picture may | |
1464 // not be synced with texture contents yet at this point. The client has | |
1465 // to use PutPicToTexture() to ensure that. | |
1466 DVLOG(4) << "Posting output task for input_id: " << dec_surface->input_id() | |
1467 << "output_id: " << dec_surface->picture_buffer_id(); | |
1468 output_pic_cb_.Run(dec_surface->input_id(), | |
1469 dec_surface->picture_buffer_id()); | |
1470 return true; | |
1471 } | |
1472 | |
1473 bool VaapiH264Decoder::Flush() { | |
1474 // Output all pictures that are waiting to be outputted. | |
1475 H264Picture::PtrVector to_output; | |
1476 dpb_.GetNotOutputtedPicsAppending(to_output); | |
1477 // Sort them by ascending POC to output in order. | |
1478 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); | |
1479 | |
1480 H264Picture::PtrVector::iterator it; | |
1481 for (it = to_output.begin(); it != to_output.end(); ++it) { | |
1482 if (!OutputPic(*it)) { | |
1483 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt; | |
1484 return false; | |
1485 } | |
1486 } | |
1487 | |
1488 // And clear DPB contents. | |
1489 dpb_.Clear(); | |
1490 | |
1491 return true; | |
1492 } | |
1493 | |
1494 bool VaapiH264Decoder::StartNewFrame(H264SliceHeader* slice_hdr) { | |
1495 // TODO posciak: add handling of max_num_ref_frames per spec. | |
1496 | |
1497 // If the new frame is an IDR, output what's left to output and clear DPB | |
1498 if (slice_hdr->idr_pic_flag) { | |
1499 // (unless we are explicitly instructed not to do so). | |
1500 if (!slice_hdr->no_output_of_prior_pics_flag) { | |
1501 // Output DPB contents. | |
1502 if (!Flush()) | |
1503 return false; | |
1504 } | |
1505 dpb_.Clear(); | |
1506 } | |
1507 | |
1508 // curr_pic_ should have either been added to DPB or discarded when finishing | |
1509 // the last frame. DPB is responsible for releasing that memory once it's | |
1510 // not needed anymore. | |
1511 DCHECK(!curr_pic_.get()); | |
1512 curr_pic_.reset(new H264Picture); | |
1513 CHECK(curr_pic_.get()); | |
1514 | |
1515 if (!InitCurrPicture(slice_hdr)) | |
1516 return false; | |
1517 | |
1518 DCHECK_GT(max_frame_num_, 0); | |
1519 | |
1520 UpdatePicNums(); | |
1521 | |
1522 // Prepare reference picture lists if required (B and S/SP slices). | |
1523 ref_pic_list0_.clear(); | |
1524 ref_pic_list1_.clear(); | |
1525 if (slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) { | |
1526 ConstructReferencePicListsP(slice_hdr); | |
1527 if (!ModifyReferencePicList(slice_hdr, 0)) | |
1528 return false; | |
1529 } else if (slice_hdr->IsBSlice()) { | |
1530 ConstructReferencePicListsB(slice_hdr); | |
1531 if (!ModifyReferencePicList(slice_hdr, 0)) | |
1532 return false; | |
1533 if (!ModifyReferencePicList(slice_hdr, 1)) | |
1534 return false; | |
1535 } | |
1536 | |
1537 // Send parameter buffers before each new picture, before the first slice. | |
1538 if (!SendPPS()) | |
1539 return false; | |
1540 | |
1541 if (!SendIQMatrix()) | |
1542 return false; | |
1543 | |
1544 if (!QueueSlice(slice_hdr)) | |
1545 return false; | |
1546 | |
1547 return true; | |
1548 } | |
1549 | |
1550 bool VaapiH264Decoder::HandleMemoryManagementOps() { | |
1551 // 8.2.5.4 | |
1552 for (unsigned int i = 0; i < arraysize(curr_pic_->ref_pic_marking); ++i) { | |
1553 // Code below does not support interlaced stream (per-field pictures). | |
1554 H264DecRefPicMarking* ref_pic_marking = &curr_pic_->ref_pic_marking[i]; | |
1555 H264Picture* to_mark; | |
1556 int pic_num_x; | |
1557 | |
1558 switch (ref_pic_marking->memory_mgmnt_control_operation) { | |
1559 case 0: | |
1560 // Normal end of operations' specification. | |
1561 return true; | |
1562 | |
1563 case 1: | |
1564 // Mark a short term reference picture as unused so it can be removed | |
1565 // if outputted. | |
1566 pic_num_x = curr_pic_->pic_num - | |
1567 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | |
1568 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | |
1569 if (to_mark) { | |
1570 to_mark->ref = false; | |
1571 } else { | |
1572 DVLOG(1) << "Invalid short ref pic num to unmark"; | |
1573 return false; | |
1574 } | |
1575 break; | |
1576 | |
1577 case 2: | |
1578 // Mark a long term reference picture as unused so it can be removed | |
1579 // if outputted. | |
1580 to_mark = dpb_.GetLongRefPicByLongTermPicNum( | |
1581 ref_pic_marking->long_term_pic_num); | |
1582 if (to_mark) { | |
1583 to_mark->ref = false; | |
1584 } else { | |
1585 DVLOG(1) << "Invalid long term ref pic num to unmark"; | |
1586 return false; | |
1587 } | |
1588 break; | |
1589 | |
1590 case 3: | |
1591 // Mark a short term reference picture as long term reference. | |
1592 pic_num_x = curr_pic_->pic_num - | |
1593 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | |
1594 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | |
1595 if (to_mark) { | |
1596 DCHECK(to_mark->ref && !to_mark->long_term); | |
1597 to_mark->long_term = true; | |
1598 to_mark->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | |
1599 } else { | |
1600 DVLOG(1) << "Invalid short term ref pic num to mark as long ref"; | |
1601 return false; | |
1602 } | |
1603 break; | |
1604 | |
1605 case 4: { | |
1606 // Unmark all reference pictures with long_term_frame_idx over new max. | |
1607 max_long_term_frame_idx_ | |
1608 = ref_pic_marking->max_long_term_frame_idx_plus1 - 1; | |
1609 H264Picture::PtrVector long_terms; | |
1610 dpb_.GetLongTermRefPicsAppending(long_terms); | |
1611 for (size_t i = 0; i < long_terms.size(); ++i) { | |
1612 H264Picture* pic = long_terms[i]; | |
1613 DCHECK(pic->ref && pic->long_term); | |
1614 // Ok to cast, max_long_term_frame_idx is much smaller than 16bit. | |
1615 if (pic->long_term_frame_idx > | |
1616 static_cast<int>(max_long_term_frame_idx_)) | |
1617 pic->ref = false; | |
1618 } | |
1619 break; | |
1620 } | |
1621 | |
1622 case 5: | |
1623 // Unmark all reference pictures. | |
1624 dpb_.MarkAllUnusedForRef(); | |
1625 max_long_term_frame_idx_ = -1; | |
1626 curr_pic_->mem_mgmt_5 = true; | |
1627 break; | |
1628 | |
1629 case 6: { | |
1630 // Replace long term reference pictures with current picture. | |
1631 // First unmark if any existing with this long_term_frame_idx... | |
1632 H264Picture::PtrVector long_terms; | |
1633 dpb_.GetLongTermRefPicsAppending(long_terms); | |
1634 for (size_t i = 0; i < long_terms.size(); ++i) { | |
1635 H264Picture* pic = long_terms[i]; | |
1636 DCHECK(pic->ref && pic->long_term); | |
1637 // Ok to cast, long_term_frame_idx is much smaller than 16bit. | |
1638 if (pic->long_term_frame_idx == | |
1639 static_cast<int>(ref_pic_marking->long_term_frame_idx)) | |
1640 pic->ref = false; | |
1641 } | |
1642 | |
1643 // and mark the current one instead. | |
1644 curr_pic_->ref = true; | |
1645 curr_pic_->long_term = true; | |
1646 curr_pic_->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | |
1647 break; | |
1648 } | |
1649 | |
1650 default: | |
1651 // Would indicate a bug in parser. | |
1652 NOTREACHED(); | |
1653 } | |
1654 } | |
1655 | |
1656 return true; | |
1657 } | |
1658 | |
1659 // This method ensures that DPB does not overflow, either by removing | |
1660 // reference pictures as specified in the stream, or using a sliding window | |
1661 // procedure to remove the oldest one. | |
1662 // It also performs marking and unmarking pictures as reference. | |
1663 // See spac 8.2.5.1. | |
1664 void VaapiH264Decoder::ReferencePictureMarking() { | |
1665 if (curr_pic_->idr) { | |
1666 // If current picture is an IDR, all reference pictures are unmarked. | |
1667 dpb_.MarkAllUnusedForRef(); | |
1668 | |
1669 if (curr_pic_->long_term_reference_flag) { | |
1670 curr_pic_->long_term = true; | |
1671 curr_pic_->long_term_frame_idx = 0; | |
1672 max_long_term_frame_idx_ = 0; | |
1673 } else { | |
1674 curr_pic_->long_term = false; | |
1675 max_long_term_frame_idx_ = -1; | |
1676 } | |
1677 } else { | |
1678 if (!curr_pic_->adaptive_ref_pic_marking_mode_flag) { | |
1679 // If non-IDR, and the stream does not indicate what we should do to | |
1680 // ensure DPB doesn't overflow, discard oldest picture. | |
1681 // See spec 8.2.5.3. | |
1682 if (curr_pic_->field == H264Picture::FIELD_NONE) { | |
1683 DCHECK_LE(dpb_.CountRefPics(), | |
1684 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | |
1685 1)); | |
1686 if (dpb_.CountRefPics() == | |
1687 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | |
1688 1)) { | |
1689 // Max number of reference pics reached, | |
1690 // need to remove one of the short term ones. | |
1691 // Find smallest frame_num_wrap short reference picture and mark | |
1692 // it as unused. | |
1693 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic(); | |
1694 if (to_unmark == NULL) { | |
1695 DVLOG(1) << "Couldn't find a short ref picture to unmark"; | |
1696 return; | |
1697 } | |
1698 to_unmark->ref = false; | |
1699 } | |
1700 } else { | |
1701 // Shouldn't get here. | |
1702 DVLOG(1) << "Interlaced video not supported."; | |
1703 } | |
1704 } else { | |
1705 // Stream has instructions how to discard pictures from DPB and how | |
1706 // to mark/unmark existing reference pictures. Do it. | |
1707 // Spec 8.2.5.4. | |
1708 if (curr_pic_->field == H264Picture::FIELD_NONE) { | |
1709 HandleMemoryManagementOps(); | |
1710 } else { | |
1711 // Shouldn't get here. | |
1712 DVLOG(1) << "Interlaced video not supported."; | |
1713 } | |
1714 } | |
1715 } | |
1716 } | |
1717 | |
1718 bool VaapiH264Decoder::FinishPicture() { | |
1719 DCHECK(curr_pic_.get()); | |
1720 | |
1721 // Finish processing previous picture. | |
1722 // Start by storing previous reference picture data for later use, | |
1723 // if picture being finished is a reference picture. | |
1724 if (curr_pic_->ref) { | |
1725 ReferencePictureMarking(); | |
1726 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; | |
1727 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; | |
1728 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; | |
1729 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; | |
1730 prev_ref_field_ = curr_pic_->field; | |
1731 } | |
1732 | |
1733 // Remove unused (for reference or later output) pictures from DPB. | |
1734 dpb_.RemoveUnused(); | |
1735 | |
1736 DVLOG(4) << "Finishing picture, DPB entries: " << dpb_.size() | |
1737 << " Num available dec surfaces: " | |
1738 << num_available_decode_surfaces_; | |
1739 | |
1740 if (dpb_.IsFull()) { | |
1741 // DPB is full, we have to make space for the new picture. | |
1742 // Get all pictures that haven't been outputted yet. | |
1743 H264Picture::PtrVector not_outputted; | |
1744 dpb_.GetNotOutputtedPicsAppending(not_outputted); | |
1745 std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare()); | |
1746 H264Picture::PtrVector::iterator output_candidate = not_outputted.begin(); | |
1747 | |
1748 // Keep outputting pictures until we can either output the picture being | |
1749 // finished and discard it (if it is not a reference picture), or until | |
1750 // we can discard an older picture that was just waiting for output and | |
1751 // is not a reference picture, thus making space for the current one. | |
1752 while (dpb_.IsFull()) { | |
1753 // Maybe outputted enough to output current picture. | |
1754 if (!curr_pic_->ref && (output_candidate == not_outputted.end() || | |
1755 curr_pic_->pic_order_cnt < (*output_candidate)->pic_order_cnt)) { | |
1756 // curr_pic_ is not a reference picture and no preceding pictures are | |
1757 // waiting for output in DPB, so it can be outputted and discarded | |
1758 // without storing in DPB. | |
1759 if (!OutputPic(curr_pic_.get())) | |
1760 return false; | |
1761 | |
1762 // Managed to output current picture, return without adding to DPB. | |
1763 return true; | |
1764 } | |
1765 | |
1766 // Couldn't output current picture, so try to output the lowest PoC | |
1767 // from DPB. | |
1768 if (output_candidate != not_outputted.end()) { | |
1769 if (!OutputPic(*output_candidate)) | |
1770 return false; | |
1771 | |
1772 // If outputted picture wasn't a reference picture, it can be removed. | |
1773 if (!(*output_candidate)->ref) | |
1774 dpb_.RemoveByPOC((*output_candidate)->pic_order_cnt); | |
1775 } else { | |
1776 // Couldn't output current pic and couldn't do anything | |
1777 // with existing pictures in DPB, so we can't make space. | |
1778 // This should not happen. | |
1779 DVLOG(1) << "Could not free up space in DPB!"; | |
1780 return false; | |
1781 } | |
1782 } | |
1783 ++output_candidate; | |
1784 } | |
1785 | |
1786 // Store current picture for later output and/or reference (ownership now | |
1787 // with the DPB). | |
1788 dpb_.StorePic(curr_pic_.release()); | |
1789 | |
1790 return true; | |
1791 } | |
1792 | |
1793 bool VaapiH264Decoder::ProcessSPS(int sps_id) { | |
1794 const H264SPS* sps = parser_.GetSPS(sps_id); | |
1795 DCHECK(sps); | |
1796 | |
1797 if (sps->frame_mbs_only_flag == 0) { | |
1798 // Fields/interlaced video not supported. | |
1799 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; | |
1800 return false; | |
1801 } | |
1802 | |
1803 if (sps->gaps_in_frame_num_value_allowed_flag) { | |
1804 DVLOG(1) << "Gaps in frame numbers not supported"; | |
1805 return false; | |
1806 } | |
1807 | |
1808 if (sps->pic_order_cnt_type != 0) { | |
1809 DVLOG(1) << "Unsupported pic_order_cnt_type"; | |
1810 return false; | |
1811 } | |
1812 | |
1813 curr_sps_id_ = sps->seq_parameter_set_id; | |
1814 | |
1815 // Calculate picture height/width (spec 7.4.2.1.1, 7.4.3). | |
1816 int width = 16 * (sps->pic_width_in_mbs_minus1 + 1); | |
1817 int height = 16 * (2 - sps->frame_mbs_only_flag) * | |
1818 (sps->pic_height_in_map_units_minus1 + 1); | |
1819 | |
1820 if ((pic_width_ != -1 || pic_height_ != -1) && | |
1821 (width != pic_width_ || height != pic_height_)) { | |
1822 DVLOG(1) << "Picture size changed mid-stream"; | |
1823 return false; | |
1824 } | |
1825 | |
1826 pic_width_ = width; | |
1827 pic_height_ = height; | |
1828 DVLOG(1) << "New picture size: " << pic_width_ << "x" << pic_height_; | |
1829 | |
1830 max_pic_order_cnt_lsb_ = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4); | |
1831 max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4); | |
1832 | |
1833 return true; | |
1834 } | |
1835 | |
1836 bool VaapiH264Decoder::ProcessPPS(int pps_id) { | |
1837 const H264PPS* pps = parser_.GetPPS(pps_id); | |
1838 DCHECK(pps); | |
1839 | |
1840 curr_pps_id_ = pps->pic_parameter_set_id; | |
1841 | |
1842 return true; | |
1843 } | |
1844 | |
1845 bool VaapiH264Decoder::FinishPrevFrameIfPresent() { | |
1846 // If we already have a frame waiting to be decoded, decode it and finish. | |
1847 if (curr_pic_ != NULL) { | |
1848 if (!DecodePicture()) | |
1849 return false; | |
1850 return FinishPicture(); | |
1851 } | |
1852 | |
1853 return true; | |
1854 } | |
1855 | |
1856 bool VaapiH264Decoder::ProcessSlice(H264SliceHeader* slice_hdr) { | |
1857 prev_frame_num_ = frame_num_; | |
1858 frame_num_ = slice_hdr->frame_num; | |
1859 | |
1860 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { | |
1861 DVLOG(1) << "Gap in frame_num!"; | |
1862 return false; | |
1863 } | |
1864 | |
1865 if (slice_hdr->field_pic_flag == 0) | |
1866 max_pic_num_ = max_frame_num_; | |
1867 else | |
1868 max_pic_num_ = 2 * max_frame_num_; | |
1869 | |
1870 // TODO posciak: switch to new picture detection per 7.4.1.2.4. | |
1871 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { | |
1872 // This is just some more slice data of the current picture, so | |
1873 // just queue it and return. | |
1874 QueueSlice(slice_hdr); | |
1875 return true; | |
1876 } else { | |
1877 // A new frame, so first finish the previous one before processing it... | |
1878 if (!FinishPrevFrameIfPresent()) | |
1879 return false; | |
1880 | |
1881 // and then start a new one. | |
1882 return StartNewFrame(slice_hdr); | |
1883 } | |
1884 } | |
1885 | |
1886 #define SET_ERROR_AND_RETURN() \ | |
1887 do { \ | |
1888 DVLOG(1) << "Error during decode"; \ | |
1889 state_ = kError; \ | |
1890 return VaapiH264Decoder::kDecodeError; \ | |
1891 } while (0) | |
1892 | |
1893 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeInitial(int32 input_id) { | |
1894 // Decode enough to get required picture size (i.e. until we find an SPS), | |
1895 // if we get any slice data, we are missing the beginning of the stream. | |
1896 H264NALU nalu; | |
1897 H264Parser::Result res; | |
1898 | |
1899 DCHECK_NE(state_, kUninitialized); | |
1900 | |
1901 curr_input_id_ = input_id; | |
1902 | |
1903 while (1) { | |
1904 // Get next NALU looking for SPS or IDR if after reset. | |
1905 res = parser_.AdvanceToNextNALU(&nalu); | |
1906 if (res == H264Parser::kEOStream) { | |
1907 DVLOG(1) << "Could not find SPS before EOS"; | |
1908 return kNeedMoreStreamData; | |
1909 } else if (res != H264Parser::kOk) { | |
1910 SET_ERROR_AND_RETURN(); | |
1911 } | |
1912 | |
1913 DVLOG(4) << " NALU found: " << static_cast<int>(nalu.nal_unit_type); | |
1914 | |
1915 switch (nalu.nal_unit_type) { | |
1916 case H264NALU::kSPS: | |
1917 res = parser_.ParseSPS(&curr_sps_id_); | |
1918 if (res != H264Parser::kOk) | |
1919 SET_ERROR_AND_RETURN(); | |
1920 | |
1921 if (!ProcessSPS(curr_sps_id_)) | |
1922 SET_ERROR_AND_RETURN(); | |
1923 | |
1924 // Just got information about the video size from SPS, so we can | |
1925 // now allocate surfaces and let the client now we are ready to | |
1926 // accept output buffers and decode. | |
1927 if (!CreateVASurfaces()) | |
1928 SET_ERROR_AND_RETURN(); | |
1929 | |
1930 state_ = kDecoding; | |
1931 return kReadyToDecode; | |
1932 | |
1933 case H264NALU::kIDRSlice: | |
1934 // If after reset, should be able to recover from an IDR. | |
1935 if (state_ == kAfterReset) { | |
1936 H264SliceHeader slice_hdr; | |
1937 | |
1938 res = parser_.ParseSliceHeader(nalu, &slice_hdr); | |
1939 if (res != H264Parser::kOk) | |
1940 SET_ERROR_AND_RETURN(); | |
1941 | |
1942 if (!ProcessSlice(&slice_hdr)) | |
1943 SET_ERROR_AND_RETURN(); | |
1944 | |
1945 state_ = kDecoding; | |
1946 return kReadyToDecode; | |
1947 } // else fallthrough | |
1948 case H264NALU::kNonIDRSlice: | |
1949 case H264NALU::kPPS: | |
1950 // Non-IDR slices cannot be used as resume points, as we may not | |
1951 // have all reference pictures that they may require. | |
1952 // fallthrough | |
1953 default: | |
1954 // Skip everything unless it's PPS or an IDR slice (if after reset). | |
1955 DVLOG(4) << "Skipping NALU"; | |
1956 break; | |
1957 } | |
1958 } | |
1959 } | |
1960 | |
1961 void VaapiH264Decoder::SetStream(uint8* ptr, size_t size) { | |
1962 DCHECK(ptr); | |
1963 DCHECK(size); | |
1964 | |
1965 // Got new input stream data from the client. | |
1966 DVLOG(4) << "New input stream chunk at " << (void*) ptr | |
1967 << " size: " << size; | |
1968 parser_.SetStream(ptr, size); | |
1969 } | |
1970 | |
1971 VaapiH264Decoder::DecResult VaapiH264Decoder::DecodeOneFrame(int32 input_id) { | |
1972 // Decode until one full frame is decoded or return it or until end | |
1973 // of stream (end of input data is reached). | |
1974 H264Parser::Result par_res; | |
1975 H264NALU nalu; | |
1976 | |
1977 curr_input_id_ = input_id; | |
1978 | |
1979 if (state_ != kDecoding) { | |
1980 DVLOG(1) << "Decoder not ready: error in stream or not initialized"; | |
1981 return kDecodeError; | |
1982 } else if (num_available_decode_surfaces_ < 1) { | |
1983 DVLOG(4) << "No output surfaces available"; | |
1984 return kNoOutputAvailable; | |
1985 } | |
1986 | |
1987 // All of the actions below might result in decoding a picture from | |
1988 // previously parsed data, but we still have to handle/parse current input | |
1989 // first. | |
1990 // Note: this may drop some already decoded frames if there are errors | |
1991 // further in the stream, but we are OK with that. | |
1992 while (1) { | |
1993 par_res = parser_.AdvanceToNextNALU(&nalu); | |
1994 if (par_res == H264Parser::kEOStream) | |
1995 return kNeedMoreStreamData; | |
1996 else if (par_res != H264Parser::kOk) | |
1997 SET_ERROR_AND_RETURN(); | |
1998 | |
1999 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type); | |
2000 | |
2001 switch (nalu.nal_unit_type) { | |
2002 case H264NALU::kNonIDRSlice: | |
2003 case H264NALU::kIDRSlice: { | |
2004 H264SliceHeader slice_hdr; | |
2005 | |
2006 par_res = parser_.ParseSliceHeader(nalu, &slice_hdr); | |
2007 if (par_res != H264Parser::kOk) | |
2008 SET_ERROR_AND_RETURN(); | |
2009 | |
2010 if (!ProcessSlice(&slice_hdr)) | |
2011 SET_ERROR_AND_RETURN(); | |
2012 break; | |
2013 } | |
2014 | |
2015 case H264NALU::kSPS: | |
2016 int sps_id; | |
2017 | |
2018 if (!FinishPrevFrameIfPresent()) | |
2019 SET_ERROR_AND_RETURN(); | |
2020 | |
2021 par_res = parser_.ParseSPS(&sps_id); | |
2022 if (par_res != H264Parser::kOk) | |
2023 SET_ERROR_AND_RETURN(); | |
2024 | |
2025 if (!ProcessSPS(sps_id)) | |
2026 SET_ERROR_AND_RETURN(); | |
2027 break; | |
2028 | |
2029 case H264NALU::kPPS: | |
2030 int pps_id; | |
2031 | |
2032 if (!FinishPrevFrameIfPresent()) | |
2033 SET_ERROR_AND_RETURN(); | |
2034 | |
2035 par_res = parser_.ParsePPS(&pps_id); | |
2036 if (par_res != H264Parser::kOk) | |
2037 SET_ERROR_AND_RETURN(); | |
2038 | |
2039 if (!ProcessPPS(pps_id)) | |
2040 SET_ERROR_AND_RETURN(); | |
2041 break; | |
2042 | |
2043 default: | |
2044 // skip NALU | |
2045 break; | |
2046 } | |
2047 | |
2048 // If the last action resulted in decoding a frame, possibly from older | |
2049 // data, return. Otherwise keep reading the stream. | |
2050 if (frame_ready_at_hw_) { | |
2051 frame_ready_at_hw_ = false; | |
2052 return kDecodedFrame; | |
2053 } | |
2054 } | |
2055 } | |
2056 | |
2057 // static | |
2058 size_t VaapiH264Decoder::GetRequiredNumOfPictures() { | |
2059 return kNumReqPictures; | |
2060 } | |
2061 | |
2062 } // namespace content | |
2063 | |
OLD | NEW |