OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 #include <limits> | 6 #include <limits> |
7 | 7 |
8 #include "base/bind.h" | 8 #include "base/bind.h" |
9 #include "base/bind_helpers.h" | 9 #include "base/bind_helpers.h" |
| 10 #include "base/callback_helpers.h" |
10 #include "base/numerics/safe_conversions.h" | 11 #include "base/numerics/safe_conversions.h" |
11 #include "base/stl_util.h" | 12 #include "base/stl_util.h" |
12 #include "content/common/gpu/media/vaapi_h264_decoder.h" | 13 #include "content/common/gpu/media/h264_decoder.h" |
13 | 14 |
14 namespace content { | 15 namespace content { |
15 | 16 |
16 // Decode surface, used for decoding and reference. input_id comes from client | 17 H264Decoder::H264Decoder(H264Accelerator* accelerator) |
17 // and is associated with the surface that was produced as the result | |
18 // of decoding a bitstream buffer with that id. | |
19 class VaapiH264Decoder::DecodeSurface { | |
20 public: | |
21 DecodeSurface(int poc, | |
22 int32 input_id, | |
23 const scoped_refptr<VASurface>& va_surface); | |
24 DecodeSurface(int poc, const scoped_refptr<DecodeSurface>& dec_surface); | |
25 ~DecodeSurface(); | |
26 | |
27 int poc() { | |
28 return poc_; | |
29 } | |
30 | |
31 scoped_refptr<VASurface> va_surface() { | |
32 return va_surface_; | |
33 } | |
34 | |
35 int32 input_id() { | |
36 return input_id_; | |
37 } | |
38 | |
39 private: | |
40 int poc_; | |
41 int32 input_id_; | |
42 scoped_refptr<VASurface> va_surface_; | |
43 }; | |
44 | |
45 VaapiH264Decoder::DecodeSurface::DecodeSurface( | |
46 int poc, | |
47 int32 input_id, | |
48 const scoped_refptr<VASurface>& va_surface) | |
49 : poc_(poc), | |
50 input_id_(input_id), | |
51 va_surface_(va_surface) { | |
52 DCHECK(va_surface_.get()); | |
53 } | |
54 | |
55 VaapiH264Decoder::DecodeSurface::~DecodeSurface() { | |
56 } | |
57 | |
58 VaapiH264Decoder::VaapiH264Decoder( | |
59 VaapiWrapper* vaapi_wrapper, | |
60 const OutputPicCB& output_pic_cb, | |
61 const ReportErrorToUmaCB& report_error_to_uma_cb) | |
62 : max_pic_order_cnt_lsb_(0), | 18 : max_pic_order_cnt_lsb_(0), |
63 max_frame_num_(0), | 19 max_frame_num_(0), |
64 max_pic_num_(0), | 20 max_pic_num_(0), |
65 max_long_term_frame_idx_(0), | 21 max_long_term_frame_idx_(0), |
66 max_num_reorder_frames_(0), | 22 max_num_reorder_frames_(0), |
67 curr_sps_id_(-1), | 23 curr_sps_id_(-1), |
68 curr_pps_id_(-1), | 24 curr_pps_id_(-1), |
69 vaapi_wrapper_(vaapi_wrapper), | 25 accelerator_(accelerator) { |
70 output_pic_cb_(output_pic_cb), | 26 DCHECK(accelerator_); |
71 report_error_to_uma_cb_(report_error_to_uma_cb) { | |
72 Reset(); | 27 Reset(); |
73 state_ = kNeedStreamMetadata; | 28 state_ = kNeedStreamMetadata; |
74 } | 29 } |
75 | 30 |
76 VaapiH264Decoder::~VaapiH264Decoder() { | 31 H264Decoder::~H264Decoder() { |
77 } | 32 } |
78 | 33 |
79 void VaapiH264Decoder::Reset() { | 34 void H264Decoder::Reset() { |
80 curr_pic_.reset(); | 35 curr_pic_ = nullptr; |
| 36 curr_nalu_ = nullptr; |
| 37 curr_slice_hdr_ = nullptr; |
81 | 38 |
82 curr_input_id_ = -1; | |
83 frame_num_ = 0; | 39 frame_num_ = 0; |
84 prev_frame_num_ = -1; | 40 prev_frame_num_ = -1; |
85 prev_frame_num_offset_ = -1; | 41 prev_frame_num_offset_ = -1; |
86 | 42 |
87 prev_ref_has_memmgmnt5_ = false; | 43 prev_ref_has_memmgmnt5_ = false; |
88 prev_ref_top_field_order_cnt_ = -1; | 44 prev_ref_top_field_order_cnt_ = -1; |
89 prev_ref_pic_order_cnt_msb_ = -1; | 45 prev_ref_pic_order_cnt_msb_ = -1; |
90 prev_ref_pic_order_cnt_lsb_ = -1; | 46 prev_ref_pic_order_cnt_lsb_ = -1; |
91 prev_ref_field_ = H264Picture::FIELD_NONE; | 47 prev_ref_field_ = H264Picture::FIELD_NONE; |
92 | 48 |
93 vaapi_wrapper_->DestroyPendingBuffers(); | 49 ref_pic_list_p0_.clear(); |
94 | 50 ref_pic_list_b0_.clear(); |
95 ref_pic_list0_.clear(); | 51 ref_pic_list_b1_.clear(); |
96 ref_pic_list1_.clear(); | |
97 | |
98 for (DecSurfacesInUse::iterator it = decode_surfaces_in_use_.begin(); | |
99 it != decode_surfaces_in_use_.end(); ) { | |
100 int poc = it->second->poc(); | |
101 // Must be incremented before UnassignSurfaceFromPoC as this call | |
102 // invalidates |it|. | |
103 ++it; | |
104 UnassignSurfaceFromPoC(poc); | |
105 } | |
106 DCHECK(decode_surfaces_in_use_.empty()); | |
107 | |
108 dpb_.Clear(); | 52 dpb_.Clear(); |
109 parser_.Reset(); | 53 parser_.Reset(); |
110 last_output_poc_ = std::numeric_limits<int>::min(); | 54 last_output_poc_ = std::numeric_limits<int>::min(); |
111 | 55 |
112 // If we are in kDecoding, we can resume without processing an SPS. | 56 // If we are in kDecoding, we can resume without processing an SPS. |
113 if (state_ == kDecoding) | 57 if (state_ == kDecoding) |
114 state_ = kAfterReset; | 58 state_ = kAfterReset; |
115 } | 59 } |
116 | 60 |
117 void VaapiH264Decoder::ReuseSurface( | 61 void H264Decoder::PrepareRefPicLists(media::H264SliceHeader* slice_hdr) { |
118 const scoped_refptr<VASurface>& va_surface) { | 62 ConstructReferencePicListsP(slice_hdr); |
119 available_va_surfaces_.push_back(va_surface); | 63 ConstructReferencePicListsB(slice_hdr); |
120 } | 64 } |
121 | 65 |
122 // Fill |va_pic| with default/neutral values. | 66 bool H264Decoder::ModifyReferencePicLists(media::H264SliceHeader* slice_hdr, |
123 static void InitVAPicture(VAPictureH264* va_pic) { | 67 H264Picture::Vector* ref_pic_list0, |
124 memset(va_pic, 0, sizeof(*va_pic)); | 68 H264Picture::Vector* ref_pic_list1) { |
125 va_pic->picture_id = VA_INVALID_ID; | 69 ref_pic_list0->clear(); |
126 va_pic->flags = VA_PICTURE_H264_INVALID; | 70 ref_pic_list1->clear(); |
127 } | |
128 | |
129 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) { | |
130 DCHECK(pic); | |
131 | |
132 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt); | |
133 if (!dec_surface) { | |
134 // Cannot provide a ref picture, will corrupt output, but may be able | |
135 // to recover. | |
136 InitVAPicture(va_pic); | |
137 return; | |
138 } | |
139 | |
140 va_pic->picture_id = dec_surface->va_surface()->id(); | |
141 va_pic->frame_idx = pic->frame_num; | |
142 va_pic->flags = 0; | |
143 | |
144 switch (pic->field) { | |
145 case H264Picture::FIELD_NONE: | |
146 break; | |
147 case H264Picture::FIELD_TOP: | |
148 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD; | |
149 break; | |
150 case H264Picture::FIELD_BOTTOM: | |
151 va_pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD; | |
152 break; | |
153 } | |
154 | |
155 if (pic->ref) { | |
156 va_pic->flags |= pic->long_term ? VA_PICTURE_H264_LONG_TERM_REFERENCE | |
157 : VA_PICTURE_H264_SHORT_TERM_REFERENCE; | |
158 } | |
159 | |
160 va_pic->TopFieldOrderCnt = pic->top_field_order_cnt; | |
161 va_pic->BottomFieldOrderCnt = pic->bottom_field_order_cnt; | |
162 } | |
163 | |
164 int VaapiH264Decoder::FillVARefFramesFromDPB(VAPictureH264 *va_pics, | |
165 int num_pics) { | |
166 H264DPB::Pictures::reverse_iterator rit; | |
167 int i; | |
168 | |
169 // Return reference frames in reverse order of insertion. | |
170 // Libva does not document this, but other implementations (e.g. mplayer) | |
171 // do it this way as well. | |
172 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) { | |
173 if ((*rit)->ref) | |
174 FillVAPicture(&va_pics[i++], *rit); | |
175 } | |
176 | |
177 return i; | |
178 } | |
179 | |
180 VaapiH264Decoder::DecodeSurface* VaapiH264Decoder::DecodeSurfaceByPoC(int poc) { | |
181 DecSurfacesInUse::iterator iter = decode_surfaces_in_use_.find(poc); | |
182 if (iter == decode_surfaces_in_use_.end()) { | |
183 DVLOG(1) << "Could not find surface assigned to POC: " << poc; | |
184 return NULL; | |
185 } | |
186 | |
187 return iter->second.get(); | |
188 } | |
189 | |
190 bool VaapiH264Decoder::AssignSurfaceToPoC(int32 input_id, int poc) { | |
191 if (available_va_surfaces_.empty()) { | |
192 DVLOG(1) << "No VA Surfaces available"; | |
193 return false; | |
194 } | |
195 | |
196 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface( | |
197 poc, input_id, available_va_surfaces_.back())); | |
198 available_va_surfaces_.pop_back(); | |
199 | |
200 DVLOG(4) << "POC " << poc | |
201 << " will use surface " << dec_surface->va_surface()->id(); | |
202 | |
203 bool inserted = decode_surfaces_in_use_.insert( | |
204 std::make_pair(poc, dec_surface)).second; | |
205 DCHECK(inserted); | |
206 | |
207 return true; | |
208 } | |
209 | |
210 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) { | |
211 DecSurfacesInUse::iterator it = decode_surfaces_in_use_.find(poc); | |
212 if (it == decode_surfaces_in_use_.end()) { | |
213 DVLOG(1) << "Asked to unassign an unassigned POC " << poc; | |
214 return; | |
215 } | |
216 | |
217 DVLOG(4) << "POC " << poc << " no longer using VA surface " | |
218 << it->second->va_surface()->id(); | |
219 | |
220 decode_surfaces_in_use_.erase(it); | |
221 } | |
222 | |
223 bool VaapiH264Decoder::SendPPS() { | |
224 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
225 DCHECK(pps); | |
226 | |
227 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
228 DCHECK(sps); | |
229 | |
230 DCHECK(curr_pic_.get()); | |
231 | |
232 VAPictureParameterBufferH264 pic_param; | |
233 memset(&pic_param, 0, sizeof(VAPictureParameterBufferH264)); | |
234 | |
235 #define FROM_SPS_TO_PP(a) pic_param.a = sps->a; | |
236 #define FROM_SPS_TO_PP2(a, b) pic_param.b = sps->a; | |
237 FROM_SPS_TO_PP2(pic_width_in_mbs_minus1, picture_width_in_mbs_minus1); | |
238 // This assumes non-interlaced video | |
239 FROM_SPS_TO_PP2(pic_height_in_map_units_minus1, | |
240 picture_height_in_mbs_minus1); | |
241 FROM_SPS_TO_PP(bit_depth_luma_minus8); | |
242 FROM_SPS_TO_PP(bit_depth_chroma_minus8); | |
243 #undef FROM_SPS_TO_PP | |
244 #undef FROM_SPS_TO_PP2 | |
245 | |
246 #define FROM_SPS_TO_PP_SF(a) pic_param.seq_fields.bits.a = sps->a; | |
247 #define FROM_SPS_TO_PP_SF2(a, b) pic_param.seq_fields.bits.b = sps->a; | |
248 FROM_SPS_TO_PP_SF(chroma_format_idc); | |
249 FROM_SPS_TO_PP_SF2(separate_colour_plane_flag, | |
250 residual_colour_transform_flag); | |
251 FROM_SPS_TO_PP_SF(gaps_in_frame_num_value_allowed_flag); | |
252 FROM_SPS_TO_PP_SF(frame_mbs_only_flag); | |
253 FROM_SPS_TO_PP_SF(mb_adaptive_frame_field_flag); | |
254 FROM_SPS_TO_PP_SF(direct_8x8_inference_flag); | |
255 pic_param.seq_fields.bits.MinLumaBiPredSize8x8 = (sps->level_idc >= 31); | |
256 FROM_SPS_TO_PP_SF(log2_max_frame_num_minus4); | |
257 FROM_SPS_TO_PP_SF(pic_order_cnt_type); | |
258 FROM_SPS_TO_PP_SF(log2_max_pic_order_cnt_lsb_minus4); | |
259 FROM_SPS_TO_PP_SF(delta_pic_order_always_zero_flag); | |
260 #undef FROM_SPS_TO_PP_SF | |
261 #undef FROM_SPS_TO_PP_SF2 | |
262 | |
263 #define FROM_PPS_TO_PP(a) pic_param.a = pps->a; | |
264 FROM_PPS_TO_PP(num_slice_groups_minus1); | |
265 pic_param.slice_group_map_type = 0; | |
266 pic_param.slice_group_change_rate_minus1 = 0; | |
267 FROM_PPS_TO_PP(pic_init_qp_minus26); | |
268 FROM_PPS_TO_PP(pic_init_qs_minus26); | |
269 FROM_PPS_TO_PP(chroma_qp_index_offset); | |
270 FROM_PPS_TO_PP(second_chroma_qp_index_offset); | |
271 #undef FROM_PPS_TO_PP | |
272 | |
273 #define FROM_PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = pps->a; | |
274 #define FROM_PPS_TO_PP_PF2(a, b) pic_param.pic_fields.bits.b = pps->a; | |
275 FROM_PPS_TO_PP_PF(entropy_coding_mode_flag); | |
276 FROM_PPS_TO_PP_PF(weighted_pred_flag); | |
277 FROM_PPS_TO_PP_PF(weighted_bipred_idc); | |
278 FROM_PPS_TO_PP_PF(transform_8x8_mode_flag); | |
279 | |
280 pic_param.pic_fields.bits.field_pic_flag = 0; | |
281 FROM_PPS_TO_PP_PF(constrained_intra_pred_flag); | |
282 FROM_PPS_TO_PP_PF2(bottom_field_pic_order_in_frame_present_flag, | |
283 pic_order_present_flag); | |
284 FROM_PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
285 FROM_PPS_TO_PP_PF(redundant_pic_cnt_present_flag); | |
286 pic_param.pic_fields.bits.reference_pic_flag = curr_pic_->ref; | |
287 #undef FROM_PPS_TO_PP_PF | |
288 #undef FROM_PPS_TO_PP_PF2 | |
289 | |
290 pic_param.frame_num = curr_pic_->frame_num; | |
291 | |
292 InitVAPicture(&pic_param.CurrPic); | |
293 FillVAPicture(&pic_param.CurrPic, curr_pic_.get()); | |
294 | |
295 // Init reference pictures' array. | |
296 for (int i = 0; i < 16; ++i) | |
297 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
298 | |
299 // And fill it with picture info from DPB. | |
300 FillVARefFramesFromDPB(pic_param.ReferenceFrames, | |
301 arraysize(pic_param.ReferenceFrames)); | |
302 | |
303 pic_param.num_ref_frames = sps->max_num_ref_frames; | |
304 | |
305 return vaapi_wrapper_->SubmitBuffer(VAPictureParameterBufferType, | |
306 sizeof(VAPictureParameterBufferH264), | |
307 &pic_param); | |
308 } | |
309 | |
310 bool VaapiH264Decoder::SendIQMatrix() { | |
311 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
312 DCHECK(pps); | |
313 | |
314 VAIQMatrixBufferH264 iq_matrix_buf; | |
315 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264)); | |
316 | |
317 if (pps->pic_scaling_matrix_present_flag) { | |
318 for (int i = 0; i < 6; ++i) { | |
319 for (int j = 0; j < 16; ++j) | |
320 iq_matrix_buf.ScalingList4x4[i][j] = pps->scaling_list4x4[i][j]; | |
321 } | |
322 | |
323 for (int i = 0; i < 2; ++i) { | |
324 for (int j = 0; j < 64; ++j) | |
325 iq_matrix_buf.ScalingList8x8[i][j] = pps->scaling_list8x8[i][j]; | |
326 } | |
327 } else { | |
328 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
329 DCHECK(sps); | |
330 for (int i = 0; i < 6; ++i) { | |
331 for (int j = 0; j < 16; ++j) | |
332 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j]; | |
333 } | |
334 | |
335 for (int i = 0; i < 2; ++i) { | |
336 for (int j = 0; j < 64; ++j) | |
337 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j]; | |
338 } | |
339 } | |
340 | |
341 return vaapi_wrapper_->SubmitBuffer(VAIQMatrixBufferType, | |
342 sizeof(VAIQMatrixBufferH264), | |
343 &iq_matrix_buf); | |
344 } | |
345 | |
346 bool VaapiH264Decoder::SendVASliceParam(media::H264SliceHeader* slice_hdr) { | |
347 const media::H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); | |
348 DCHECK(pps); | |
349 | |
350 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
351 DCHECK(sps); | |
352 | |
353 VASliceParameterBufferH264 slice_param; | |
354 memset(&slice_param, 0, sizeof(VASliceParameterBufferH264)); | |
355 | |
356 slice_param.slice_data_size = slice_hdr->nalu_size; | |
357 slice_param.slice_data_offset = 0; | |
358 slice_param.slice_data_flag = VA_SLICE_DATA_FLAG_ALL; | |
359 slice_param.slice_data_bit_offset = slice_hdr->header_bit_size; | |
360 | |
361 #define SHDRToSP(a) slice_param.a = slice_hdr->a; | |
362 SHDRToSP(first_mb_in_slice); | |
363 slice_param.slice_type = slice_hdr->slice_type % 5; | |
364 SHDRToSP(direct_spatial_mv_pred_flag); | |
365 | |
366 // TODO posciak: make sure parser sets those even when override flags | |
367 // in slice header is off. | |
368 SHDRToSP(num_ref_idx_l0_active_minus1); | |
369 SHDRToSP(num_ref_idx_l1_active_minus1); | |
370 SHDRToSP(cabac_init_idc); | |
371 SHDRToSP(slice_qp_delta); | |
372 SHDRToSP(disable_deblocking_filter_idc); | |
373 SHDRToSP(slice_alpha_c0_offset_div2); | |
374 SHDRToSP(slice_beta_offset_div2); | |
375 | |
376 if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) && | |
377 pps->weighted_pred_flag) || | |
378 (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) { | |
379 SHDRToSP(luma_log2_weight_denom); | |
380 SHDRToSP(chroma_log2_weight_denom); | |
381 | |
382 SHDRToSP(luma_weight_l0_flag); | |
383 SHDRToSP(luma_weight_l1_flag); | |
384 | |
385 SHDRToSP(chroma_weight_l0_flag); | |
386 SHDRToSP(chroma_weight_l1_flag); | |
387 | |
388 for (int i = 0; i <= slice_param.num_ref_idx_l0_active_minus1; ++i) { | |
389 slice_param.luma_weight_l0[i] = | |
390 slice_hdr->pred_weight_table_l0.luma_weight[i]; | |
391 slice_param.luma_offset_l0[i] = | |
392 slice_hdr->pred_weight_table_l0.luma_offset[i]; | |
393 | |
394 for (int j = 0; j < 2; ++j) { | |
395 slice_param.chroma_weight_l0[i][j] = | |
396 slice_hdr->pred_weight_table_l0.chroma_weight[i][j]; | |
397 slice_param.chroma_offset_l0[i][j] = | |
398 slice_hdr->pred_weight_table_l0.chroma_offset[i][j]; | |
399 } | |
400 } | |
401 | |
402 if (slice_hdr->IsBSlice()) { | |
403 for (int i = 0; i <= slice_param.num_ref_idx_l1_active_minus1; ++i) { | |
404 slice_param.luma_weight_l1[i] = | |
405 slice_hdr->pred_weight_table_l1.luma_weight[i]; | |
406 slice_param.luma_offset_l1[i] = | |
407 slice_hdr->pred_weight_table_l1.luma_offset[i]; | |
408 | |
409 for (int j = 0; j < 2; ++j) { | |
410 slice_param.chroma_weight_l1[i][j] = | |
411 slice_hdr->pred_weight_table_l1.chroma_weight[i][j]; | |
412 slice_param.chroma_offset_l1[i][j] = | |
413 slice_hdr->pred_weight_table_l1.chroma_offset[i][j]; | |
414 } | |
415 } | |
416 } | |
417 } | |
418 | |
419 for (int i = 0; i < 32; ++i) { | |
420 InitVAPicture(&slice_param.RefPicList0[i]); | |
421 InitVAPicture(&slice_param.RefPicList1[i]); | |
422 } | |
423 | |
424 int i; | |
425 H264Picture::PtrVector::iterator it; | |
426 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it; | |
427 ++it, ++i) | |
428 FillVAPicture(&slice_param.RefPicList0[i], *it); | |
429 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it; | |
430 ++it, ++i) | |
431 FillVAPicture(&slice_param.RefPicList1[i], *it); | |
432 | |
433 return vaapi_wrapper_->SubmitBuffer(VASliceParameterBufferType, | |
434 sizeof(VASliceParameterBufferH264), | |
435 &slice_param); | |
436 } | |
437 | |
438 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) { | |
439 // Can't help it, blame libva... | |
440 void* non_const_ptr = const_cast<uint8*>(ptr); | |
441 return vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType, size, | |
442 non_const_ptr); | |
443 } | |
444 | |
445 bool VaapiH264Decoder::PrepareRefPicLists(media::H264SliceHeader* slice_hdr) { | |
446 ref_pic_list0_.clear(); | |
447 ref_pic_list1_.clear(); | |
448 | 71 |
449 // Fill reference picture lists for B and S/SP slices. | 72 // Fill reference picture lists for B and S/SP slices. |
450 if (slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) { | 73 if (slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) { |
451 ConstructReferencePicListsP(slice_hdr); | 74 *ref_pic_list0 = ref_pic_list_p0_; |
452 return ModifyReferencePicList(slice_hdr, 0); | 75 return ModifyReferencePicList(slice_hdr, 0, ref_pic_list0); |
453 } | 76 } else if (slice_hdr->IsBSlice()) { |
454 | 77 *ref_pic_list0 = ref_pic_list_b0_; |
455 if (slice_hdr->IsBSlice()) { | 78 *ref_pic_list1 = ref_pic_list_b1_; |
456 ConstructReferencePicListsB(slice_hdr); | 79 return ModifyReferencePicList(slice_hdr, 0, ref_pic_list0) && |
457 return ModifyReferencePicList(slice_hdr, 0) && | 80 ModifyReferencePicList(slice_hdr, 1, ref_pic_list1); |
458 ModifyReferencePicList(slice_hdr, 1); | |
459 } | 81 } |
460 | 82 |
461 return true; | 83 return true; |
462 } | 84 } |
463 | 85 |
464 bool VaapiH264Decoder::QueueSlice(media::H264SliceHeader* slice_hdr) { | 86 bool H264Decoder::DecodePicture() { |
465 DCHECK(curr_pic_.get()); | |
466 | |
467 if (!PrepareRefPicLists(slice_hdr)) | |
468 return false; | |
469 | |
470 if (!SendVASliceParam(slice_hdr)) | |
471 return false; | |
472 | |
473 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size)) | |
474 return false; | |
475 | |
476 return true; | |
477 } | |
478 | |
479 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever | |
480 // possible. | |
481 bool VaapiH264Decoder::DecodePicture() { | |
482 DCHECK(curr_pic_.get()); | 87 DCHECK(curr_pic_.get()); |
483 | 88 |
484 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt; | 89 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt; |
485 DecodeSurface* dec_surface = DecodeSurfaceByPoC(curr_pic_->pic_order_cnt); | 90 return accelerator_->SubmitDecode(curr_pic_); |
486 if (!dec_surface) { | |
487 DVLOG(1) << "Asked to decode an invalid POC " << curr_pic_->pic_order_cnt; | |
488 return false; | |
489 } | |
490 | |
491 if (!vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | |
492 dec_surface->va_surface()->id())) { | |
493 DVLOG(1) << "Failed decoding picture"; | |
494 return false; | |
495 } | |
496 | |
497 return true; | |
498 } | 91 } |
499 | 92 |
500 bool VaapiH264Decoder::InitCurrPicture(media::H264SliceHeader* slice_hdr) { | 93 bool H264Decoder::InitCurrPicture(media::H264SliceHeader* slice_hdr) { |
501 DCHECK(curr_pic_.get()); | 94 DCHECK(curr_pic_.get()); |
502 | 95 |
503 memset(curr_pic_.get(), 0, sizeof(H264Picture)); | |
504 | |
505 curr_pic_->idr = slice_hdr->idr_pic_flag; | 96 curr_pic_->idr = slice_hdr->idr_pic_flag; |
506 | 97 |
507 if (slice_hdr->field_pic_flag) { | 98 if (slice_hdr->field_pic_flag) { |
508 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM | 99 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM |
509 : H264Picture::FIELD_TOP; | 100 : H264Picture::FIELD_TOP; |
510 } else { | 101 } else { |
511 curr_pic_->field = H264Picture::FIELD_NONE; | 102 curr_pic_->field = H264Picture::FIELD_NONE; |
512 } | 103 } |
513 | 104 |
514 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; | 105 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; |
515 // This assumes non-interlaced stream. | 106 // This assumes non-interlaced stream. |
516 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; | 107 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; |
517 | 108 |
518 if (!CalculatePicOrderCounts(slice_hdr)) | 109 if (!CalculatePicOrderCounts(slice_hdr)) |
519 return false; | 110 return false; |
520 | 111 |
521 // Try to get an empty surface to decode this picture to. | |
522 if (!AssignSurfaceToPoC(curr_input_id_, curr_pic_->pic_order_cnt)) { | |
523 DVLOG(1) << "Failed getting a free surface for a picture"; | |
524 return false; | |
525 } | |
526 | |
527 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; | 112 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; |
528 curr_pic_->adaptive_ref_pic_marking_mode_flag = | 113 curr_pic_->adaptive_ref_pic_marking_mode_flag = |
529 slice_hdr->adaptive_ref_pic_marking_mode_flag; | 114 slice_hdr->adaptive_ref_pic_marking_mode_flag; |
530 | 115 |
531 // If the slice header indicates we will have to perform reference marking | 116 // If the slice header indicates we will have to perform reference marking |
532 // process after this picture is decoded, store required data for that | 117 // process after this picture is decoded, store required data for that |
533 // purpose. | 118 // purpose. |
534 if (slice_hdr->adaptive_ref_pic_marking_mode_flag) { | 119 if (slice_hdr->adaptive_ref_pic_marking_mode_flag) { |
535 COMPILE_ASSERT(sizeof(curr_pic_->ref_pic_marking) == | 120 COMPILE_ASSERT(sizeof(curr_pic_->ref_pic_marking) == |
536 sizeof(slice_hdr->ref_pic_marking), | 121 sizeof(slice_hdr->ref_pic_marking), |
537 ref_pic_marking_array_sizes_do_not_match); | 122 ref_pic_marking_array_sizes_do_not_match); |
538 memcpy(curr_pic_->ref_pic_marking, slice_hdr->ref_pic_marking, | 123 memcpy(curr_pic_->ref_pic_marking, slice_hdr->ref_pic_marking, |
539 sizeof(curr_pic_->ref_pic_marking)); | 124 sizeof(curr_pic_->ref_pic_marking)); |
540 } | 125 } |
541 | 126 |
542 return true; | 127 return true; |
543 } | 128 } |
544 | 129 |
545 bool VaapiH264Decoder::CalculatePicOrderCounts( | 130 bool H264Decoder::CalculatePicOrderCounts(media::H264SliceHeader* slice_hdr) { |
546 media::H264SliceHeader* slice_hdr) { | |
547 DCHECK_NE(curr_sps_id_, -1); | 131 DCHECK_NE(curr_sps_id_, -1); |
548 const media::H264SPS* sps = parser_.GetSPS(curr_sps_id_); | 132 const media::H264SPS* sps = parser_.GetSPS(curr_sps_id_); |
549 | 133 |
550 int pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; | 134 int pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; |
551 curr_pic_->pic_order_cnt_lsb = pic_order_cnt_lsb; | 135 curr_pic_->pic_order_cnt_lsb = pic_order_cnt_lsb; |
552 | 136 |
553 switch (sps->pic_order_cnt_type) { | 137 switch (sps->pic_order_cnt_type) { |
554 case 0: | 138 case 0: |
555 // See spec 8.2.1.1. | 139 // See spec 8.2.1.1. |
556 int prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb; | 140 int prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb; |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
642 // frame_num_in_pic_order_cnt_cycle is verified < 255 in parser | 226 // frame_num_in_pic_order_cnt_cycle is verified < 255 in parser |
643 for (int i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i) | 227 for (int i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i) |
644 expected_pic_order_cnt += sps->offset_for_ref_frame[i]; | 228 expected_pic_order_cnt += sps->offset_for_ref_frame[i]; |
645 } | 229 } |
646 | 230 |
647 if (!slice_hdr->nal_ref_idc) | 231 if (!slice_hdr->nal_ref_idc) |
648 expected_pic_order_cnt += sps->offset_for_non_ref_pic; | 232 expected_pic_order_cnt += sps->offset_for_non_ref_pic; |
649 | 233 |
650 if (!slice_hdr->field_pic_flag) { | 234 if (!slice_hdr->field_pic_flag) { |
651 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + | 235 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + |
652 slice_hdr->delta_pic_order_cnt[0]; | 236 slice_hdr->delta_pic_order_cnt0; |
653 curr_pic_->bottom_field_order_cnt = curr_pic_->top_field_order_cnt + | 237 curr_pic_->bottom_field_order_cnt = curr_pic_->top_field_order_cnt + |
654 sps->offset_for_top_to_bottom_field + | 238 sps->offset_for_top_to_bottom_field + |
655 slice_hdr->delta_pic_order_cnt[1]; | 239 slice_hdr->delta_pic_order_cnt1; |
656 } else if (!slice_hdr->bottom_field_flag) { | 240 } else if (!slice_hdr->bottom_field_flag) { |
657 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + | 241 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + |
658 slice_hdr->delta_pic_order_cnt[0]; | 242 slice_hdr->delta_pic_order_cnt0; |
659 } else { | 243 } else { |
660 curr_pic_->bottom_field_order_cnt = expected_pic_order_cnt + | 244 curr_pic_->bottom_field_order_cnt = expected_pic_order_cnt + |
661 sps->offset_for_top_to_bottom_field + | 245 sps->offset_for_top_to_bottom_field + |
662 slice_hdr->delta_pic_order_cnt[0]; | 246 slice_hdr->delta_pic_order_cnt0; |
663 } | 247 } |
664 break; | 248 break; |
665 } | 249 } |
666 | 250 |
667 case 2: | 251 case 2: |
668 // See spec 8.2.1.3. | 252 // See spec 8.2.1.3. |
669 if (prev_has_memmgmnt5_) | 253 if (prev_has_memmgmnt5_) |
670 prev_frame_num_offset_ = 0; | 254 prev_frame_num_offset_ = 0; |
671 | 255 |
672 if (slice_hdr->idr_pic_flag) | 256 if (slice_hdr->idr_pic_flag) |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
711 curr_pic_->pic_order_cnt = curr_pic_->top_field_order_cnt; | 295 curr_pic_->pic_order_cnt = curr_pic_->top_field_order_cnt; |
712 break; | 296 break; |
713 case H264Picture::FIELD_BOTTOM: | 297 case H264Picture::FIELD_BOTTOM: |
714 curr_pic_->pic_order_cnt = curr_pic_->bottom_field_order_cnt; | 298 curr_pic_->pic_order_cnt = curr_pic_->bottom_field_order_cnt; |
715 break; | 299 break; |
716 } | 300 } |
717 | 301 |
718 return true; | 302 return true; |
719 } | 303 } |
720 | 304 |
721 void VaapiH264Decoder::UpdatePicNums() { | 305 void H264Decoder::UpdatePicNums() { |
722 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { | 306 for (auto& pic : dpb_) { |
723 H264Picture* pic = *it; | |
724 DCHECK(pic); | |
725 if (!pic->ref) | 307 if (!pic->ref) |
726 continue; | 308 continue; |
727 | 309 |
728 // Below assumes non-interlaced stream. | 310 // Below assumes non-interlaced stream. |
729 DCHECK_EQ(pic->field, H264Picture::FIELD_NONE); | 311 DCHECK_EQ(pic->field, H264Picture::FIELD_NONE); |
730 if (pic->long_term) { | 312 if (pic->long_term) { |
731 pic->long_term_pic_num = pic->long_term_frame_idx; | 313 pic->long_term_pic_num = pic->long_term_frame_idx; |
732 } else { | 314 } else { |
733 if (pic->frame_num > frame_num_) | 315 if (pic->frame_num > frame_num_) |
734 pic->frame_num_wrap = pic->frame_num - max_frame_num_; | 316 pic->frame_num_wrap = pic->frame_num - max_frame_num_; |
735 else | 317 else |
736 pic->frame_num_wrap = pic->frame_num; | 318 pic->frame_num_wrap = pic->frame_num; |
737 | 319 |
738 pic->pic_num = pic->frame_num_wrap; | 320 pic->pic_num = pic->frame_num_wrap; |
739 } | 321 } |
740 } | 322 } |
741 } | 323 } |
742 | 324 |
743 struct PicNumDescCompare { | 325 struct PicNumDescCompare { |
744 bool operator()(const H264Picture* a, const H264Picture* b) const { | 326 bool operator()(const scoped_refptr<H264Picture>& a, |
| 327 const scoped_refptr<H264Picture>& b) const { |
745 return a->pic_num > b->pic_num; | 328 return a->pic_num > b->pic_num; |
746 } | 329 } |
747 }; | 330 }; |
748 | 331 |
749 struct LongTermPicNumAscCompare { | 332 struct LongTermPicNumAscCompare { |
750 bool operator()(const H264Picture* a, const H264Picture* b) const { | 333 bool operator()(const scoped_refptr<H264Picture>& a, |
| 334 const scoped_refptr<H264Picture>& b) const { |
751 return a->long_term_pic_num < b->long_term_pic_num; | 335 return a->long_term_pic_num < b->long_term_pic_num; |
752 } | 336 } |
753 }; | 337 }; |
754 | 338 |
755 void VaapiH264Decoder::ConstructReferencePicListsP( | 339 void H264Decoder::ConstructReferencePicListsP( |
756 media::H264SliceHeader* slice_hdr) { | 340 media::H264SliceHeader* slice_hdr) { |
757 // RefPicList0 (8.2.4.2.1) [[1] [2]], where: | 341 // RefPicList0 (8.2.4.2.1) [[1] [2]], where: |
758 // [1] shortterm ref pics sorted by descending pic_num, | 342 // [1] shortterm ref pics sorted by descending pic_num, |
759 // [2] longterm ref pics by ascending long_term_pic_num. | 343 // [2] longterm ref pics by ascending long_term_pic_num. |
760 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | 344 ref_pic_list_p0_.clear(); |
| 345 |
761 // First get the short ref pics... | 346 // First get the short ref pics... |
762 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | 347 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_p0_); |
763 size_t num_short_refs = ref_pic_list0_.size(); | 348 size_t num_short_refs = ref_pic_list_p0_.size(); |
764 | 349 |
765 // and sort them to get [1]. | 350 // and sort them to get [1]. |
766 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), PicNumDescCompare()); | 351 std::sort(ref_pic_list_p0_.begin(), ref_pic_list_p0_.end(), |
| 352 PicNumDescCompare()); |
767 | 353 |
768 // Now get long term pics and sort them by long_term_pic_num to get [2]. | 354 // Now get long term pics and sort them by long_term_pic_num to get [2]. |
769 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | 355 dpb_.GetLongTermRefPicsAppending(&ref_pic_list_p0_); |
770 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | 356 std::sort(ref_pic_list_p0_.begin() + num_short_refs, ref_pic_list_p0_.end(), |
771 LongTermPicNumAscCompare()); | 357 LongTermPicNumAscCompare()); |
772 | 358 |
773 // Cut off if we have more than requested in slice header. | 359 // Cut off if we have more than requested in slice header. |
774 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | 360 ref_pic_list_p0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); |
775 } | 361 } |
776 | 362 |
777 struct POCAscCompare { | 363 struct POCAscCompare { |
778 bool operator()(const H264Picture* a, const H264Picture* b) const { | 364 bool operator()(const scoped_refptr<H264Picture>& a, |
| 365 const scoped_refptr<H264Picture>& b) const { |
779 return a->pic_order_cnt < b->pic_order_cnt; | 366 return a->pic_order_cnt < b->pic_order_cnt; |
780 } | 367 } |
781 }; | 368 }; |
782 | 369 |
783 struct POCDescCompare { | 370 struct POCDescCompare { |
784 bool operator()(const H264Picture* a, const H264Picture* b) const { | 371 bool operator()(const scoped_refptr<H264Picture>& a, |
| 372 const scoped_refptr<H264Picture>& b) const { |
785 return a->pic_order_cnt > b->pic_order_cnt; | 373 return a->pic_order_cnt > b->pic_order_cnt; |
786 } | 374 } |
787 }; | 375 }; |
788 | 376 |
789 void VaapiH264Decoder::ConstructReferencePicListsB( | 377 void H264Decoder::ConstructReferencePicListsB( |
790 media::H264SliceHeader* slice_hdr) { | 378 media::H264SliceHeader* slice_hdr) { |
791 // RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where: | 379 // RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where: |
792 // [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC, | 380 // [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC, |
793 // [2] shortterm ref pics with POC > curr_pic's POC by ascending POC, | 381 // [2] shortterm ref pics with POC > curr_pic's POC by ascending POC, |
794 // [3] longterm ref pics by ascending long_term_pic_num. | 382 // [3] longterm ref pics by ascending long_term_pic_num. |
795 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | 383 ref_pic_list_b0_.clear(); |
796 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | 384 ref_pic_list_b1_.clear(); |
797 size_t num_short_refs = ref_pic_list0_.size(); | 385 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b0_); |
| 386 size_t num_short_refs = ref_pic_list_b0_.size(); |
798 | 387 |
799 // First sort ascending, this will put [1] in right place and finish [2]. | 388 // First sort ascending, this will put [1] in right place and finish [2]. |
800 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), POCAscCompare()); | 389 std::sort(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), POCAscCompare()); |
801 | 390 |
802 // Find first with POC > curr_pic's POC to get first element in [2]... | 391 // Find first with POC > curr_pic's POC to get first element in [2]... |
803 H264Picture::PtrVector::iterator iter; | 392 H264Picture::Vector::iterator iter; |
804 iter = std::upper_bound(ref_pic_list0_.begin(), ref_pic_list0_.end(), | 393 iter = std::upper_bound(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), |
805 curr_pic_.get(), POCAscCompare()); | 394 curr_pic_.get(), POCAscCompare()); |
806 | 395 |
807 // and sort [1] descending, thus finishing sequence [1] [2]. | 396 // and sort [1] descending, thus finishing sequence [1] [2]. |
808 std::sort(ref_pic_list0_.begin(), iter, POCDescCompare()); | 397 std::sort(ref_pic_list_b0_.begin(), iter, POCDescCompare()); |
809 | 398 |
810 // Now add [3] and sort by ascending long_term_pic_num. | 399 // Now add [3] and sort by ascending long_term_pic_num. |
811 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | 400 dpb_.GetLongTermRefPicsAppending(&ref_pic_list_b0_); |
812 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | 401 std::sort(ref_pic_list_b0_.begin() + num_short_refs, ref_pic_list_b0_.end(), |
813 LongTermPicNumAscCompare()); | 402 LongTermPicNumAscCompare()); |
814 | 403 |
815 // RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where: | 404 // RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where: |
816 // [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC, | 405 // [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC, |
817 // [2] shortterm ref pics with POC < curr_pic's POC by descending POC, | 406 // [2] shortterm ref pics with POC < curr_pic's POC by descending POC, |
818 // [3] longterm ref pics by ascending long_term_pic_num. | 407 // [3] longterm ref pics by ascending long_term_pic_num. |
819 | 408 |
820 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | 409 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b1_); |
821 num_short_refs = ref_pic_list1_.size(); | 410 num_short_refs = ref_pic_list_b1_.size(); |
822 | 411 |
823 // First sort by descending POC. | 412 // First sort by descending POC. |
824 std::sort(ref_pic_list1_.begin(), ref_pic_list1_.end(), POCDescCompare()); | 413 std::sort(ref_pic_list_b1_.begin(), ref_pic_list_b1_.end(), POCDescCompare()); |
825 | 414 |
826 // Find first with POC < curr_pic's POC to get first element in [2]... | 415 // Find first with POC < curr_pic's POC to get first element in [2]... |
827 iter = std::upper_bound(ref_pic_list1_.begin(), ref_pic_list1_.end(), | 416 iter = std::upper_bound(ref_pic_list_b1_.begin(), ref_pic_list_b1_.end(), |
828 curr_pic_.get(), POCDescCompare()); | 417 curr_pic_.get(), POCDescCompare()); |
829 | 418 |
830 // and sort [1] ascending. | 419 // and sort [1] ascending. |
831 std::sort(ref_pic_list1_.begin(), iter, POCAscCompare()); | 420 std::sort(ref_pic_list_b1_.begin(), iter, POCAscCompare()); |
832 | 421 |
833 // Now add [3] and sort by ascending long_term_pic_num | 422 // Now add [3] and sort by ascending long_term_pic_num |
834 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | 423 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b1_); |
835 std::sort(ref_pic_list1_.begin() + num_short_refs, ref_pic_list1_.end(), | 424 std::sort(ref_pic_list_b1_.begin() + num_short_refs, ref_pic_list_b1_.end(), |
836 LongTermPicNumAscCompare()); | 425 LongTermPicNumAscCompare()); |
837 | 426 |
838 // If lists identical, swap first two entries in RefPicList1 (spec 8.2.4.2.3) | 427 // If lists identical, swap first two entries in RefPicList1 (spec 8.2.4.2.3) |
839 if (ref_pic_list1_.size() > 1 && | 428 if (ref_pic_list_b1_.size() > 1 && |
840 std::equal(ref_pic_list0_.begin(), ref_pic_list0_.end(), | 429 std::equal(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), |
841 ref_pic_list1_.begin())) | 430 ref_pic_list_b1_.begin())) |
842 std::swap(ref_pic_list1_[0], ref_pic_list1_[1]); | 431 std::swap(ref_pic_list_b1_[0], ref_pic_list_b1_[1]); |
843 | 432 |
844 // Per 8.2.4.2 it's possible for num_ref_idx_lX_active_minus1 to indicate | 433 // Per 8.2.4.2 it's possible for num_ref_idx_lX_active_minus1 to indicate |
845 // there should be more ref pics on list than we constructed. | 434 // there should be more ref pics on list than we constructed. |
846 // Those superfluous ones should be treated as non-reference. | 435 // Those superfluous ones should be treated as non-reference. |
847 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | 436 ref_pic_list_b0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); |
848 ref_pic_list1_.resize(slice_hdr->num_ref_idx_l1_active_minus1 + 1); | 437 ref_pic_list_b1_.resize(slice_hdr->num_ref_idx_l1_active_minus1 + 1); |
849 } | 438 } |
850 | 439 |
851 // See 8.2.4 | 440 // See 8.2.4 |
852 int VaapiH264Decoder::PicNumF(H264Picture *pic) { | 441 int H264Decoder::PicNumF(const scoped_refptr<H264Picture>& pic) { |
853 if (!pic) | 442 if (!pic) |
854 return -1; | 443 return -1; |
855 | 444 |
856 if (!pic->long_term) | 445 if (!pic->long_term) |
857 return pic->pic_num; | 446 return pic->pic_num; |
858 else | 447 else |
859 return max_pic_num_; | 448 return max_pic_num_; |
860 } | 449 } |
861 | 450 |
862 // See 8.2.4 | 451 // See 8.2.4 |
863 int VaapiH264Decoder::LongTermPicNumF(H264Picture *pic) { | 452 int H264Decoder::LongTermPicNumF(const scoped_refptr<H264Picture>& pic) { |
864 if (pic->ref && pic->long_term) | 453 if (pic->ref && pic->long_term) |
865 return pic->long_term_pic_num; | 454 return pic->long_term_pic_num; |
866 else | 455 else |
867 return 2 * (max_long_term_frame_idx_ + 1); | 456 return 2 * (max_long_term_frame_idx_ + 1); |
868 } | 457 } |
869 | 458 |
870 // Shift elements on the |v| starting from |from| to |to|, inclusive, | 459 // Shift elements on the |v| starting from |from| to |to|, inclusive, |
871 // one position to the right and insert pic at |from|. | 460 // one position to the right and insert pic at |from|. |
872 static void ShiftRightAndInsert(H264Picture::PtrVector *v, | 461 static void ShiftRightAndInsert(H264Picture::Vector* v, |
873 int from, | 462 int from, |
874 int to, | 463 int to, |
875 H264Picture* pic) { | 464 const scoped_refptr<H264Picture>& pic) { |
876 // Security checks, do not disable in Debug mode. | 465 // Security checks, do not disable in Debug mode. |
877 CHECK(from <= to); | 466 CHECK(from <= to); |
878 CHECK(to <= std::numeric_limits<int>::max() - 2); | 467 CHECK(to <= std::numeric_limits<int>::max() - 2); |
879 // Additional checks. Debug mode ok. | 468 // Additional checks. Debug mode ok. |
880 DCHECK(v); | 469 DCHECK(v); |
881 DCHECK(pic); | 470 DCHECK(pic); |
882 DCHECK((to + 1 == static_cast<int>(v->size())) || | 471 DCHECK((to + 1 == static_cast<int>(v->size())) || |
883 (to + 2 == static_cast<int>(v->size()))); | 472 (to + 2 == static_cast<int>(v->size()))); |
884 | 473 |
885 v->resize(to + 2); | 474 v->resize(to + 2); |
886 | 475 |
887 for (int i = to + 1; i > from; --i) | 476 for (int i = to + 1; i > from; --i) |
888 (*v)[i] = (*v)[i - 1]; | 477 (*v)[i] = (*v)[i - 1]; |
889 | 478 |
890 (*v)[from] = pic; | 479 (*v)[from] = pic; |
891 } | 480 } |
892 | 481 |
893 bool VaapiH264Decoder::ModifyReferencePicList(media::H264SliceHeader* slice_hdr, | 482 bool H264Decoder::ModifyReferencePicList(media::H264SliceHeader* slice_hdr, |
894 int list) { | 483 int list, |
| 484 H264Picture::Vector* ref_pic_listx) { |
895 int num_ref_idx_lX_active_minus1; | 485 int num_ref_idx_lX_active_minus1; |
896 H264Picture::PtrVector* ref_pic_listx; | |
897 media::H264ModificationOfPicNum* list_mod; | 486 media::H264ModificationOfPicNum* list_mod; |
898 | 487 |
899 // This can process either ref_pic_list0 or ref_pic_list1, depending on | 488 // This can process either ref_pic_list0 or ref_pic_list1, depending on |
900 // the list argument. Set up pointers to proper list to be processed here. | 489 // the list argument. Set up pointers to proper list to be processed here. |
901 if (list == 0) { | 490 if (list == 0) { |
902 if (!slice_hdr->ref_pic_list_modification_flag_l0) | 491 if (!slice_hdr->ref_pic_list_modification_flag_l0) |
903 return true; | 492 return true; |
904 | 493 |
905 list_mod = slice_hdr->ref_list_l0_modifications; | 494 list_mod = slice_hdr->ref_list_l0_modifications; |
906 num_ref_idx_lX_active_minus1 = ref_pic_list0_.size() - 1; | |
907 | |
908 ref_pic_listx = &ref_pic_list0_; | |
909 } else { | 495 } else { |
910 if (!slice_hdr->ref_pic_list_modification_flag_l1) | 496 if (!slice_hdr->ref_pic_list_modification_flag_l1) |
911 return true; | 497 return true; |
912 | 498 |
913 list_mod = slice_hdr->ref_list_l1_modifications; | 499 list_mod = slice_hdr->ref_list_l1_modifications; |
914 num_ref_idx_lX_active_minus1 = ref_pic_list1_.size() - 1; | |
915 | |
916 ref_pic_listx = &ref_pic_list1_; | |
917 } | 500 } |
918 | 501 |
| 502 num_ref_idx_lX_active_minus1 = ref_pic_listx->size() - 1; |
919 DCHECK_GE(num_ref_idx_lX_active_minus1, 0); | 503 DCHECK_GE(num_ref_idx_lX_active_minus1, 0); |
920 | 504 |
921 // Spec 8.2.4.3: | 505 // Spec 8.2.4.3: |
922 // Reorder pictures on the list in a way specified in the stream. | 506 // Reorder pictures on the list in a way specified in the stream. |
923 int pic_num_lx_pred = curr_pic_->pic_num; | 507 int pic_num_lx_pred = curr_pic_->pic_num; |
924 int ref_idx_lx = 0; | 508 int ref_idx_lx = 0; |
925 int pic_num_lx_no_wrap; | 509 int pic_num_lx_no_wrap; |
926 int pic_num_lx; | 510 int pic_num_lx; |
927 bool done = false; | 511 bool done = false; |
928 H264Picture* pic; | 512 scoped_refptr<H264Picture> pic; |
929 for (int i = 0; i < media::H264SliceHeader::kRefListModSize && !done; ++i) { | 513 for (int i = 0; i < media::H264SliceHeader::kRefListModSize && !done; ++i) { |
930 switch (list_mod->modification_of_pic_nums_idc) { | 514 switch (list_mod->modification_of_pic_nums_idc) { |
931 case 0: | 515 case 0: |
932 case 1: | 516 case 1: |
933 // Modify short reference picture position. | 517 // Modify short reference picture position. |
934 if (list_mod->modification_of_pic_nums_idc == 0) { | 518 if (list_mod->modification_of_pic_nums_idc == 0) { |
935 // Subtract given value from predicted PicNum. | 519 // Subtract given value from predicted PicNum. |
936 pic_num_lx_no_wrap = pic_num_lx_pred - | 520 pic_num_lx_no_wrap = pic_num_lx_pred - |
937 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); | 521 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); |
938 // Wrap around max_pic_num_ if it becomes < 0 as result | 522 // Wrap around max_pic_num_ if it becomes < 0 as result |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 DVLOG(1) << "Malformed stream, no pic num " | 568 DVLOG(1) << "Malformed stream, no pic num " |
985 << list_mod->long_term_pic_num; | 569 << list_mod->long_term_pic_num; |
986 return false; | 570 return false; |
987 } | 571 } |
988 ShiftRightAndInsert(ref_pic_listx, ref_idx_lx, | 572 ShiftRightAndInsert(ref_pic_listx, ref_idx_lx, |
989 num_ref_idx_lX_active_minus1, pic); | 573 num_ref_idx_lX_active_minus1, pic); |
990 ref_idx_lx++; | 574 ref_idx_lx++; |
991 | 575 |
992 for (int src = ref_idx_lx, dst = ref_idx_lx; | 576 for (int src = ref_idx_lx, dst = ref_idx_lx; |
993 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { | 577 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { |
994 if (LongTermPicNumF((*ref_pic_listx)[src]) | 578 if (LongTermPicNumF((*ref_pic_listx)[src]) != |
995 != static_cast<int>(list_mod->long_term_pic_num)) | 579 static_cast<int>(list_mod->long_term_pic_num)) |
996 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; | 580 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; |
997 } | 581 } |
998 break; | 582 break; |
999 | 583 |
1000 case 3: | 584 case 3: |
1001 // End of modification list. | 585 // End of modification list. |
1002 done = true; | 586 done = true; |
1003 break; | 587 break; |
1004 | 588 |
1005 default: | 589 default: |
1006 // May be recoverable. | 590 // May be recoverable. |
1007 DVLOG(1) << "Invalid modification_of_pic_nums_idc=" | 591 DVLOG(1) << "Invalid modification_of_pic_nums_idc=" |
1008 << list_mod->modification_of_pic_nums_idc | 592 << list_mod->modification_of_pic_nums_idc |
1009 << " in position " << i; | 593 << " in position " << i; |
1010 break; | 594 break; |
1011 } | 595 } |
1012 | 596 |
1013 ++list_mod; | 597 ++list_mod; |
1014 } | 598 } |
1015 | 599 |
1016 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is | 600 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is |
1017 // temporarily made one element longer than the required final list. | 601 // temporarily made one element longer than the required final list. |
1018 // Resize the list back to its required size. | 602 // Resize the list back to its required size. |
1019 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); | 603 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); |
1020 | 604 |
1021 return true; | 605 return true; |
1022 } | 606 } |
1023 | 607 |
1024 bool VaapiH264Decoder::OutputPic(H264Picture* pic) { | 608 void H264Decoder::OutputPic(scoped_refptr<H264Picture> pic) { |
1025 DCHECK(!pic->outputted); | 609 DCHECK(!pic->outputted); |
1026 pic->outputted = true; | 610 pic->outputted = true; |
1027 last_output_poc_ = pic->pic_order_cnt; | 611 last_output_poc_ = pic->pic_order_cnt; |
1028 | 612 |
1029 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt); | 613 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt; |
1030 if (!dec_surface) | 614 accelerator_->OutputPicture(pic); |
1031 return false; | 615 } |
1032 | 616 |
1033 DCHECK_GE(dec_surface->input_id(), 0); | 617 void H264Decoder::ClearDPB() { |
1034 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt | 618 // Clear DPB contents, marking the pictures as unused first. |
1035 << " input_id: " << dec_surface->input_id(); | 619 dpb_.Clear(); |
1036 output_pic_cb_.Run(dec_surface->input_id(), dec_surface->va_surface()); | 620 last_output_poc_ = std::numeric_limits<int>::min(); |
| 621 } |
| 622 |
| 623 bool H264Decoder::OutputAllRemainingPics() { |
| 624 // Output all pictures that are waiting to be outputted. |
| 625 FinishPrevFrameIfPresent(); |
| 626 H264Picture::Vector to_output; |
| 627 dpb_.GetNotOutputtedPicsAppending(&to_output); |
| 628 // Sort them by ascending POC to output in order. |
| 629 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); |
| 630 |
| 631 for (auto& pic : to_output) |
| 632 OutputPic(pic); |
1037 | 633 |
1038 return true; | 634 return true; |
1039 } | 635 } |
1040 | 636 |
1041 void VaapiH264Decoder::ClearDPB() { | 637 bool H264Decoder::Flush() { |
1042 // Clear DPB contents, marking the pictures as unused first. | |
1043 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) | |
1044 UnassignSurfaceFromPoC((*it)->pic_order_cnt); | |
1045 | |
1046 dpb_.Clear(); | |
1047 last_output_poc_ = std::numeric_limits<int>::min(); | |
1048 } | |
1049 | |
1050 bool VaapiH264Decoder::OutputAllRemainingPics() { | |
1051 // Output all pictures that are waiting to be outputted. | |
1052 FinishPrevFrameIfPresent(); | |
1053 H264Picture::PtrVector to_output; | |
1054 dpb_.GetNotOutputtedPicsAppending(to_output); | |
1055 // Sort them by ascending POC to output in order. | |
1056 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); | |
1057 | |
1058 H264Picture::PtrVector::iterator it; | |
1059 for (it = to_output.begin(); it != to_output.end(); ++it) { | |
1060 if (!OutputPic(*it)) { | |
1061 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt; | |
1062 return false; | |
1063 } | |
1064 } | |
1065 | |
1066 return true; | |
1067 } | |
1068 | |
1069 bool VaapiH264Decoder::Flush() { | |
1070 DVLOG(2) << "Decoder flush"; | 638 DVLOG(2) << "Decoder flush"; |
1071 | 639 |
1072 if (!OutputAllRemainingPics()) | 640 if (!OutputAllRemainingPics()) |
1073 return false; | 641 return false; |
1074 | 642 |
1075 ClearDPB(); | 643 ClearDPB(); |
1076 | 644 DVLOG(2) << "Decoder flush finished"; |
1077 DCHECK(decode_surfaces_in_use_.empty()); | |
1078 return true; | 645 return true; |
1079 } | 646 } |
1080 | 647 |
1081 bool VaapiH264Decoder::StartNewFrame(media::H264SliceHeader* slice_hdr) { | 648 bool H264Decoder::StartNewFrame(media::H264SliceHeader* slice_hdr) { |
1082 // TODO posciak: add handling of max_num_ref_frames per spec. | 649 // TODO posciak: add handling of max_num_ref_frames per spec. |
1083 | |
1084 // If the new frame is an IDR, output what's left to output and clear DPB | |
1085 if (slice_hdr->idr_pic_flag) { | |
1086 // (unless we are explicitly instructed not to do so). | |
1087 if (!slice_hdr->no_output_of_prior_pics_flag) { | |
1088 // Output DPB contents. | |
1089 if (!Flush()) | |
1090 return false; | |
1091 } | |
1092 dpb_.Clear(); | |
1093 last_output_poc_ = std::numeric_limits<int>::min(); | |
1094 } | |
1095 | |
1096 // curr_pic_ should have either been added to DPB or discarded when finishing | |
1097 // the last frame. DPB is responsible for releasing that memory once it's | |
1098 // not needed anymore. | |
1099 DCHECK(!curr_pic_.get()); | |
1100 curr_pic_.reset(new H264Picture); | |
1101 CHECK(curr_pic_.get()); | 650 CHECK(curr_pic_.get()); |
1102 | 651 |
1103 if (!InitCurrPicture(slice_hdr)) | 652 if (!InitCurrPicture(slice_hdr)) |
1104 return false; | 653 return false; |
1105 | 654 |
1106 DCHECK_GT(max_frame_num_, 0); | 655 DCHECK_GT(max_frame_num_, 0); |
1107 | 656 |
1108 UpdatePicNums(); | 657 UpdatePicNums(); |
| 658 DCHECK(slice_hdr); |
| 659 PrepareRefPicLists(slice_hdr); |
1109 | 660 |
1110 // Send parameter buffers before each new picture, before the first slice. | 661 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); |
1111 if (!SendPPS()) | 662 DCHECK(pps); |
1112 return false; | 663 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); |
| 664 DCHECK(sps); |
1113 | 665 |
1114 if (!SendIQMatrix()) | 666 if (!accelerator_->SubmitFrameMetadata(sps, pps, dpb_, ref_pic_list_p0_, |
1115 return false; | 667 ref_pic_list_b0_, ref_pic_list_b1_, |
1116 | 668 curr_pic_.get())) |
1117 if (!QueueSlice(slice_hdr)) | |
1118 return false; | 669 return false; |
1119 | 670 |
1120 return true; | 671 return true; |
1121 } | 672 } |
1122 | 673 |
1123 bool VaapiH264Decoder::HandleMemoryManagementOps() { | 674 bool H264Decoder::HandleMemoryManagementOps() { |
1124 // 8.2.5.4 | 675 // 8.2.5.4 |
1125 for (unsigned int i = 0; i < arraysize(curr_pic_->ref_pic_marking); ++i) { | 676 for (unsigned int i = 0; i < arraysize(curr_pic_->ref_pic_marking); ++i) { |
1126 // Code below does not support interlaced stream (per-field pictures). | 677 // Code below does not support interlaced stream (per-field pictures). |
1127 media::H264DecRefPicMarking* ref_pic_marking = | 678 media::H264DecRefPicMarking* ref_pic_marking = |
1128 &curr_pic_->ref_pic_marking[i]; | 679 &curr_pic_->ref_pic_marking[i]; |
1129 H264Picture* to_mark; | 680 scoped_refptr<H264Picture> to_mark; |
1130 int pic_num_x; | 681 int pic_num_x; |
1131 | 682 |
1132 switch (ref_pic_marking->memory_mgmnt_control_operation) { | 683 switch (ref_pic_marking->memory_mgmnt_control_operation) { |
1133 case 0: | 684 case 0: |
1134 // Normal end of operations' specification. | 685 // Normal end of operations' specification. |
1135 return true; | 686 return true; |
1136 | 687 |
1137 case 1: | 688 case 1: |
1138 // Mark a short term reference picture as unused so it can be removed | 689 // Mark a short term reference picture as unused so it can be removed |
1139 // if outputted. | 690 // if outputted. |
1140 pic_num_x = curr_pic_->pic_num - | 691 pic_num_x = curr_pic_->pic_num - |
1141 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | 692 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); |
1142 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | 693 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); |
1143 if (to_mark) { | 694 if (to_mark) { |
1144 to_mark->ref = false; | 695 to_mark->ref = false; |
1145 } else { | 696 } else { |
1146 DVLOG(1) << "Invalid short ref pic num to unmark"; | 697 DVLOG(1) << "Invalid short ref pic num to unmark"; |
1147 return false; | 698 return false; |
1148 } | 699 } |
1149 break; | 700 break; |
1150 | 701 |
1151 case 2: | 702 case 2: |
1152 // Mark a long term reference picture as unused so it can be removed | 703 // Mark a long term reference picture as unused so it can be removed |
1153 // if outputted. | 704 // if outputted. |
1154 to_mark = dpb_.GetLongRefPicByLongTermPicNum( | 705 to_mark = dpb_.GetLongRefPicByLongTermPicNum( |
1155 ref_pic_marking->long_term_pic_num); | 706 ref_pic_marking->long_term_pic_num); |
1156 if (to_mark) { | 707 if (to_mark) { |
1157 to_mark->ref = false; | 708 to_mark->ref = false; |
1158 } else { | 709 } else { |
1159 DVLOG(1) << "Invalid long term ref pic num to unmark"; | 710 DVLOG(1) << "Invalid long term ref pic num to unmark"; |
1160 return false; | 711 return false; |
1161 } | 712 } |
1162 break; | 713 break; |
1163 | 714 |
1164 case 3: | 715 case 3: |
1165 // Mark a short term reference picture as long term reference. | 716 // Mark a short term reference picture as long term reference. |
1166 pic_num_x = curr_pic_->pic_num - | 717 pic_num_x = curr_pic_->pic_num - |
1167 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | 718 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); |
1168 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | 719 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); |
1169 if (to_mark) { | 720 if (to_mark) { |
1170 DCHECK(to_mark->ref && !to_mark->long_term); | 721 DCHECK(to_mark->ref && !to_mark->long_term); |
1171 to_mark->long_term = true; | 722 to_mark->long_term = true; |
1172 to_mark->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | 723 to_mark->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; |
1173 } else { | 724 } else { |
1174 DVLOG(1) << "Invalid short term ref pic num to mark as long ref"; | 725 DVLOG(1) << "Invalid short term ref pic num to mark as long ref"; |
1175 return false; | 726 return false; |
1176 } | 727 } |
1177 break; | 728 break; |
1178 | 729 |
1179 case 4: { | 730 case 4: { |
1180 // Unmark all reference pictures with long_term_frame_idx over new max. | 731 // Unmark all reference pictures with long_term_frame_idx over new max. |
1181 max_long_term_frame_idx_ | 732 max_long_term_frame_idx_ = |
1182 = ref_pic_marking->max_long_term_frame_idx_plus1 - 1; | 733 ref_pic_marking->max_long_term_frame_idx_plus1 - 1; |
1183 H264Picture::PtrVector long_terms; | 734 H264Picture::Vector long_terms; |
1184 dpb_.GetLongTermRefPicsAppending(long_terms); | 735 dpb_.GetLongTermRefPicsAppending(&long_terms); |
1185 for (size_t i = 0; i < long_terms.size(); ++i) { | 736 for (size_t i = 0; i < long_terms.size(); ++i) { |
1186 H264Picture* pic = long_terms[i]; | 737 scoped_refptr<H264Picture>& pic = long_terms[i]; |
1187 DCHECK(pic->ref && pic->long_term); | 738 DCHECK(pic->ref && pic->long_term); |
1188 // Ok to cast, max_long_term_frame_idx is much smaller than 16bit. | 739 // Ok to cast, max_long_term_frame_idx is much smaller than 16bit. |
1189 if (pic->long_term_frame_idx > | 740 if (pic->long_term_frame_idx > |
1190 static_cast<int>(max_long_term_frame_idx_)) | 741 static_cast<int>(max_long_term_frame_idx_)) |
1191 pic->ref = false; | 742 pic->ref = false; |
1192 } | 743 } |
1193 break; | 744 break; |
1194 } | 745 } |
1195 | 746 |
1196 case 5: | 747 case 5: |
1197 // Unmark all reference pictures. | 748 // Unmark all reference pictures. |
1198 dpb_.MarkAllUnusedForRef(); | 749 dpb_.MarkAllUnusedForRef(); |
1199 max_long_term_frame_idx_ = -1; | 750 max_long_term_frame_idx_ = -1; |
1200 curr_pic_->mem_mgmt_5 = true; | 751 curr_pic_->mem_mgmt_5 = true; |
1201 break; | 752 break; |
1202 | 753 |
1203 case 6: { | 754 case 6: { |
1204 // Replace long term reference pictures with current picture. | 755 // Replace long term reference pictures with current picture. |
1205 // First unmark if any existing with this long_term_frame_idx... | 756 // First unmark if any existing with this long_term_frame_idx... |
1206 H264Picture::PtrVector long_terms; | 757 H264Picture::Vector long_terms; |
1207 dpb_.GetLongTermRefPicsAppending(long_terms); | 758 dpb_.GetLongTermRefPicsAppending(&long_terms); |
1208 for (size_t i = 0; i < long_terms.size(); ++i) { | 759 for (size_t i = 0; i < long_terms.size(); ++i) { |
1209 H264Picture* pic = long_terms[i]; | 760 scoped_refptr<H264Picture>& pic = long_terms[i]; |
1210 DCHECK(pic->ref && pic->long_term); | 761 DCHECK(pic->ref && pic->long_term); |
1211 // Ok to cast, long_term_frame_idx is much smaller than 16bit. | 762 // Ok to cast, long_term_frame_idx is much smaller than 16bit. |
1212 if (pic->long_term_frame_idx == | 763 if (pic->long_term_frame_idx == |
1213 static_cast<int>(ref_pic_marking->long_term_frame_idx)) | 764 static_cast<int>(ref_pic_marking->long_term_frame_idx)) |
1214 pic->ref = false; | 765 pic->ref = false; |
1215 } | 766 } |
1216 | 767 |
1217 // and mark the current one instead. | 768 // and mark the current one instead. |
1218 curr_pic_->ref = true; | 769 curr_pic_->ref = true; |
1219 curr_pic_->long_term = true; | 770 curr_pic_->long_term = true; |
1220 curr_pic_->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | 771 curr_pic_->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; |
1221 break; | 772 break; |
1222 } | 773 } |
1223 | 774 |
1224 default: | 775 default: |
1225 // Would indicate a bug in parser. | 776 // Would indicate a bug in parser. |
1226 NOTREACHED(); | 777 NOTREACHED(); |
1227 } | 778 } |
1228 } | 779 } |
1229 | 780 |
1230 return true; | 781 return true; |
1231 } | 782 } |
1232 | 783 |
1233 // This method ensures that DPB does not overflow, either by removing | 784 // This method ensures that DPB does not overflow, either by removing |
1234 // reference pictures as specified in the stream, or using a sliding window | 785 // reference pictures as specified in the stream, or using a sliding window |
1235 // procedure to remove the oldest one. | 786 // procedure to remove the oldest one. |
1236 // It also performs marking and unmarking pictures as reference. | 787 // It also performs marking and unmarking pictures as reference. |
1237 // See spac 8.2.5.1. | 788 // See spac 8.2.5.1. |
1238 void VaapiH264Decoder::ReferencePictureMarking() { | 789 void H264Decoder::ReferencePictureMarking() { |
1239 if (curr_pic_->idr) { | 790 if (curr_pic_->idr) { |
1240 // If current picture is an IDR, all reference pictures are unmarked. | 791 // If current picture is an IDR, all reference pictures are unmarked. |
1241 dpb_.MarkAllUnusedForRef(); | 792 dpb_.MarkAllUnusedForRef(); |
1242 | 793 |
1243 if (curr_pic_->long_term_reference_flag) { | 794 if (curr_pic_->long_term_reference_flag) { |
1244 curr_pic_->long_term = true; | 795 curr_pic_->long_term = true; |
1245 curr_pic_->long_term_frame_idx = 0; | 796 curr_pic_->long_term_frame_idx = 0; |
1246 max_long_term_frame_idx_ = 0; | 797 max_long_term_frame_idx_ = 0; |
1247 } else { | 798 } else { |
1248 curr_pic_->long_term = false; | 799 curr_pic_->long_term = false; |
1249 max_long_term_frame_idx_ = -1; | 800 max_long_term_frame_idx_ = -1; |
1250 } | 801 } |
1251 } else { | 802 } else { |
1252 if (!curr_pic_->adaptive_ref_pic_marking_mode_flag) { | 803 if (!curr_pic_->adaptive_ref_pic_marking_mode_flag) { |
1253 // If non-IDR, and the stream does not indicate what we should do to | 804 // If non-IDR, and the stream does not indicate what we should do to |
1254 // ensure DPB doesn't overflow, discard oldest picture. | 805 // ensure DPB doesn't overflow, discard oldest picture. |
1255 // See spec 8.2.5.3. | 806 // See spec 8.2.5.3. |
1256 if (curr_pic_->field == H264Picture::FIELD_NONE) { | 807 if (curr_pic_->field == H264Picture::FIELD_NONE) { |
1257 DCHECK_LE(dpb_.CountRefPics(), | 808 DCHECK_LE( |
1258 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | 809 dpb_.CountRefPics(), |
1259 1)); | 810 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, 1)); |
1260 if (dpb_.CountRefPics() == | 811 if (dpb_.CountRefPics() == |
1261 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | 812 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, |
1262 1)) { | 813 1)) { |
1263 // Max number of reference pics reached, | 814 // Max number of reference pics reached, |
1264 // need to remove one of the short term ones. | 815 // need to remove one of the short term ones. |
1265 // Find smallest frame_num_wrap short reference picture and mark | 816 // Find smallest frame_num_wrap short reference picture and mark |
1266 // it as unused. | 817 // it as unused. |
1267 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic(); | 818 scoped_refptr<H264Picture> to_unmark = |
| 819 dpb_.GetLowestFrameNumWrapShortRefPic(); |
1268 if (to_unmark == NULL) { | 820 if (to_unmark == NULL) { |
1269 DVLOG(1) << "Couldn't find a short ref picture to unmark"; | 821 DVLOG(1) << "Couldn't find a short ref picture to unmark"; |
1270 return; | 822 return; |
1271 } | 823 } |
1272 to_unmark->ref = false; | 824 to_unmark->ref = false; |
1273 } | 825 } |
1274 } else { | 826 } else { |
1275 // Shouldn't get here. | 827 // Shouldn't get here. |
1276 DVLOG(1) << "Interlaced video not supported."; | 828 DVLOG(1) << "Interlaced video not supported."; |
1277 report_error_to_uma_cb_.Run(INTERLACED_STREAM); | |
1278 } | 829 } |
1279 } else { | 830 } else { |
1280 // Stream has instructions how to discard pictures from DPB and how | 831 // Stream has instructions how to discard pictures from DPB and how |
1281 // to mark/unmark existing reference pictures. Do it. | 832 // to mark/unmark existing reference pictures. Do it. |
1282 // Spec 8.2.5.4. | 833 // Spec 8.2.5.4. |
1283 if (curr_pic_->field == H264Picture::FIELD_NONE) { | 834 if (curr_pic_->field == H264Picture::FIELD_NONE) { |
1284 HandleMemoryManagementOps(); | 835 HandleMemoryManagementOps(); |
1285 } else { | 836 } else { |
1286 // Shouldn't get here. | 837 // Shouldn't get here. |
1287 DVLOG(1) << "Interlaced video not supported."; | 838 DVLOG(1) << "Interlaced video not supported."; |
1288 report_error_to_uma_cb_.Run(INTERLACED_STREAM); | |
1289 } | 839 } |
1290 } | 840 } |
1291 } | 841 } |
1292 } | 842 } |
1293 | 843 |
1294 bool VaapiH264Decoder::FinishPicture() { | 844 bool H264Decoder::FinishPicture() { |
1295 DCHECK(curr_pic_.get()); | 845 DCHECK(curr_pic_.get()); |
1296 | 846 |
1297 // Finish processing previous picture. | 847 // Finish processing previous picture. |
1298 // Start by storing previous reference picture data for later use, | 848 // Start by storing previous reference picture data for later use, |
1299 // if picture being finished is a reference picture. | 849 // if picture being finished is a reference picture. |
1300 if (curr_pic_->ref) { | 850 if (curr_pic_->ref) { |
1301 ReferencePictureMarking(); | 851 ReferencePictureMarking(); |
1302 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; | 852 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; |
1303 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; | 853 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; |
1304 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; | 854 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; |
1305 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; | 855 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; |
1306 prev_ref_field_ = curr_pic_->field; | 856 prev_ref_field_ = curr_pic_->field; |
1307 } | 857 } |
1308 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; | 858 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; |
1309 prev_frame_num_offset_ = curr_pic_->frame_num_offset; | 859 prev_frame_num_offset_ = curr_pic_->frame_num_offset; |
1310 | 860 |
1311 // Remove unused (for reference or later output) pictures from DPB, marking | 861 // Remove unused (for reference or later output) pictures from DPB, marking |
1312 // them as such. | 862 // them as such. |
1313 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { | |
1314 if ((*it)->outputted && !(*it)->ref) | |
1315 UnassignSurfaceFromPoC((*it)->pic_order_cnt); | |
1316 } | |
1317 dpb_.DeleteUnused(); | 863 dpb_.DeleteUnused(); |
1318 | 864 |
1319 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size(); | 865 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size(); |
1320 | 866 |
1321 // Whatever happens below, curr_pic_ will stop managing the pointer to the | 867 // Whatever happens below, curr_pic_ will stop managing the pointer to the |
1322 // picture after this function returns. The ownership will either be | 868 // picture after this. The ownership will either be transferred to DPB, if |
1323 // transferred to DPB, if the image is still needed (for output and/or | 869 // the image is still needed (for output and/or reference), or the memory |
1324 // reference), or the memory will be released if we manage to output it here | 870 // will be released if we manage to output it here without having to store |
1325 // without having to store it for future reference. | 871 // it for future reference. |
1326 scoped_ptr<H264Picture> pic(curr_pic_.release()); | 872 scoped_refptr<H264Picture> pic = curr_pic_; |
| 873 curr_pic_ = nullptr; |
1327 | 874 |
1328 // Get all pictures that haven't been outputted yet. | 875 // Get all pictures that haven't been outputted yet. |
1329 H264Picture::PtrVector not_outputted; | 876 H264Picture::Vector not_outputted; |
1330 // TODO(posciak): pass as pointer, not reference (violates coding style). | 877 dpb_.GetNotOutputtedPicsAppending(¬_outputted); |
1331 dpb_.GetNotOutputtedPicsAppending(not_outputted); | |
1332 // Include the one we've just decoded. | 878 // Include the one we've just decoded. |
1333 not_outputted.push_back(pic.get()); | 879 not_outputted.push_back(pic); |
1334 | 880 |
1335 // Sort in output order. | 881 // Sort in output order. |
1336 std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare()); | 882 std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare()); |
1337 | 883 |
1338 // Try to output as many pictures as we can. A picture can be output, | 884 // Try to output as many pictures as we can. A picture can be output, |
1339 // if the number of decoded and not yet outputted pictures that would remain | 885 // if the number of decoded and not yet outputted pictures that would remain |
1340 // in DPB afterwards would at least be equal to max_num_reorder_frames. | 886 // in DPB afterwards would at least be equal to max_num_reorder_frames. |
1341 // If the outputted picture is not a reference picture, it doesn't have | 887 // If the outputted picture is not a reference picture, it doesn't have |
1342 // to remain in the DPB and can be removed. | 888 // to remain in the DPB and can be removed. |
1343 H264Picture::PtrVector::iterator output_candidate = not_outputted.begin(); | 889 H264Picture::Vector::iterator output_candidate = not_outputted.begin(); |
1344 size_t num_remaining = not_outputted.size(); | 890 size_t num_remaining = not_outputted.size(); |
1345 while (num_remaining > max_num_reorder_frames_) { | 891 while (num_remaining > max_num_reorder_frames_) { |
1346 int poc = (*output_candidate)->pic_order_cnt; | 892 int poc = (*output_candidate)->pic_order_cnt; |
1347 DCHECK_GE(poc, last_output_poc_); | 893 DCHECK_GE(poc, last_output_poc_); |
1348 if (!OutputPic(*output_candidate)) | 894 OutputPic(*output_candidate); |
1349 return false; | |
1350 | 895 |
1351 if (!(*output_candidate)->ref) { | 896 if (!(*output_candidate)->ref) { |
1352 // Current picture hasn't been inserted into DPB yet, so don't remove it | 897 // Current picture hasn't been inserted into DPB yet, so don't remove it |
1353 // if we managed to output it immediately. | 898 // if we managed to output it immediately. |
1354 if (*output_candidate != pic) | 899 if ((*output_candidate)->pic_order_cnt != pic->pic_order_cnt) |
1355 dpb_.DeleteByPOC(poc); | 900 dpb_.DeleteByPOC(poc); |
1356 // Mark as unused. | |
1357 UnassignSurfaceFromPoC(poc); | |
1358 } | 901 } |
1359 | 902 |
1360 ++output_candidate; | 903 ++output_candidate; |
1361 --num_remaining; | 904 --num_remaining; |
1362 } | 905 } |
1363 | 906 |
1364 // If we haven't managed to output the picture that we just decoded, or if | 907 // If we haven't managed to output the picture that we just decoded, or if |
1365 // it's a reference picture, we have to store it in DPB. | 908 // it's a reference picture, we have to store it in DPB. |
1366 if (!pic->outputted || pic->ref) { | 909 if (!pic->outputted || pic->ref) { |
1367 if (dpb_.IsFull()) { | 910 if (dpb_.IsFull()) { |
1368 // If we haven't managed to output anything to free up space in DPB | 911 // If we haven't managed to output anything to free up space in DPB |
1369 // to store this picture, it's an error in the stream. | 912 // to store this picture, it's an error in the stream. |
1370 DVLOG(1) << "Could not free up space in DPB!"; | 913 DVLOG(1) << "Could not free up space in DPB!"; |
1371 return false; | 914 return false; |
1372 } | 915 } |
1373 | 916 |
1374 dpb_.StorePic(pic.release()); | 917 dpb_.StorePic(pic); |
1375 } | 918 } |
1376 | 919 |
1377 return true; | 920 return true; |
1378 } | 921 } |
1379 | 922 |
1380 static int LevelToMaxDpbMbs(int level) { | 923 static int LevelToMaxDpbMbs(int level) { |
1381 // See table A-1 in spec. | 924 // See table A-1 in spec. |
1382 switch (level) { | 925 switch (level) { |
1383 case 10: return 396; | 926 case 10: return 396; |
1384 case 11: return 900; | 927 case 11: return 900; |
(...skipping 10 matching lines...) Expand all Loading... |
1395 case 42: return 34816; | 938 case 42: return 34816; |
1396 case 50: return 110400; | 939 case 50: return 110400; |
1397 case 51: // fallthrough | 940 case 51: // fallthrough |
1398 case 52: return 184320; | 941 case 52: return 184320; |
1399 default: | 942 default: |
1400 DVLOG(1) << "Invalid codec level (" << level << ")"; | 943 DVLOG(1) << "Invalid codec level (" << level << ")"; |
1401 return 0; | 944 return 0; |
1402 } | 945 } |
1403 } | 946 } |
1404 | 947 |
1405 bool VaapiH264Decoder::UpdateMaxNumReorderFrames(const media::H264SPS* sps) { | 948 bool H264Decoder::UpdateMaxNumReorderFrames(const media::H264SPS* sps) { |
1406 if (sps->vui_parameters_present_flag && sps->bitstream_restriction_flag) { | 949 if (sps->vui_parameters_present_flag && sps->bitstream_restriction_flag) { |
1407 max_num_reorder_frames_ = | 950 max_num_reorder_frames_ = |
1408 base::checked_cast<size_t>(sps->max_num_reorder_frames); | 951 base::checked_cast<size_t>(sps->max_num_reorder_frames); |
1409 if (max_num_reorder_frames_ > dpb_.max_num_pics()) { | 952 if (max_num_reorder_frames_ > dpb_.max_num_pics()) { |
1410 DVLOG(1) | 953 DVLOG(1) |
1411 << "max_num_reorder_frames present, but larger than MaxDpbFrames (" | 954 << "max_num_reorder_frames present, but larger than MaxDpbFrames (" |
1412 << max_num_reorder_frames_ << " > " << dpb_.max_num_pics() << ")"; | 955 << max_num_reorder_frames_ << " > " << dpb_.max_num_pics() << ")"; |
1413 max_num_reorder_frames_ = 0; | 956 max_num_reorder_frames_ = 0; |
1414 return false; | 957 return false; |
1415 } | 958 } |
(...skipping 16 matching lines...) Expand all Loading... |
1432 max_num_reorder_frames_ = dpb_.max_num_pics(); | 975 max_num_reorder_frames_ = dpb_.max_num_pics(); |
1433 break; | 976 break; |
1434 } | 977 } |
1435 } else { | 978 } else { |
1436 max_num_reorder_frames_ = dpb_.max_num_pics(); | 979 max_num_reorder_frames_ = dpb_.max_num_pics(); |
1437 } | 980 } |
1438 | 981 |
1439 return true; | 982 return true; |
1440 } | 983 } |
1441 | 984 |
1442 bool VaapiH264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) { | 985 bool H264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) { |
1443 const media::H264SPS* sps = parser_.GetSPS(sps_id); | 986 const media::H264SPS* sps = parser_.GetSPS(sps_id); |
1444 DCHECK(sps); | 987 DCHECK(sps); |
1445 DVLOG(4) << "Processing SPS"; | 988 DVLOG(4) << "Processing SPS"; |
1446 | 989 |
1447 *need_new_buffers = false; | 990 *need_new_buffers = false; |
1448 | 991 |
1449 if (sps->frame_mbs_only_flag == 0) { | 992 if (sps->frame_mbs_only_flag == 0) { |
1450 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; | 993 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; |
1451 report_error_to_uma_cb_.Run(FRAME_MBS_ONLY_FLAG_NOT_ONE); | |
1452 return false; | 994 return false; |
1453 } | 995 } |
1454 | 996 |
1455 if (sps->gaps_in_frame_num_value_allowed_flag) { | 997 if (sps->gaps_in_frame_num_value_allowed_flag) { |
1456 DVLOG(1) << "Gaps in frame numbers not supported"; | 998 DVLOG(1) << "Gaps in frame numbers not supported"; |
1457 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM); | |
1458 return false; | 999 return false; |
1459 } | 1000 } |
1460 | 1001 |
1461 curr_sps_id_ = sps->seq_parameter_set_id; | 1002 curr_sps_id_ = sps->seq_parameter_set_id; |
1462 | 1003 |
1463 // Calculate picture height/width in macroblocks and pixels | 1004 // Calculate picture height/width in macroblocks and pixels |
1464 // (spec 7.4.2.1.1, 7.4.3). | 1005 // (spec 7.4.2.1.1, 7.4.3). |
1465 int width_mb = sps->pic_width_in_mbs_minus1 + 1; | 1006 int width_mb = sps->pic_width_in_mbs_minus1 + 1; |
1466 int height_mb = (2 - sps->frame_mbs_only_flag) * | 1007 int height_mb = (2 - sps->frame_mbs_only_flag) * |
1467 (sps->pic_height_in_map_units_minus1 + 1); | 1008 (sps->pic_height_in_map_units_minus1 + 1); |
1468 | 1009 |
1469 gfx::Size new_pic_size(16 * width_mb, 16 * height_mb); | 1010 gfx::Size new_pic_size(16 * width_mb, 16 * height_mb); |
1470 if (new_pic_size.IsEmpty()) { | 1011 if (new_pic_size.IsEmpty()) { |
1471 DVLOG(1) << "Invalid picture size: " << new_pic_size.ToString(); | 1012 DVLOG(1) << "Invalid picture size: " << new_pic_size.ToString(); |
1472 return false; | 1013 return false; |
1473 } | 1014 } |
1474 | 1015 |
1475 if (!pic_size_.IsEmpty() && new_pic_size == pic_size_) { | 1016 if (!pic_size_.IsEmpty() && new_pic_size == pic_size_) { |
1476 // Already have surfaces and this SPS keeps the same resolution, | 1017 // Already have surfaces and this SPS keeps the same resolution, |
1477 // no need to request a new set. | 1018 // no need to request a new set. |
(...skipping 22 matching lines...) Expand all Loading... |
1500 dpb_.set_max_num_pics(max_dpb_size); | 1041 dpb_.set_max_num_pics(max_dpb_size); |
1501 | 1042 |
1502 if (!UpdateMaxNumReorderFrames(sps)) | 1043 if (!UpdateMaxNumReorderFrames(sps)) |
1503 return false; | 1044 return false; |
1504 DVLOG(1) << "max_num_reorder_frames: " << max_num_reorder_frames_; | 1045 DVLOG(1) << "max_num_reorder_frames: " << max_num_reorder_frames_; |
1505 | 1046 |
1506 *need_new_buffers = true; | 1047 *need_new_buffers = true; |
1507 return true; | 1048 return true; |
1508 } | 1049 } |
1509 | 1050 |
1510 bool VaapiH264Decoder::ProcessPPS(int pps_id) { | 1051 bool H264Decoder::ProcessPPS(int pps_id) { |
1511 const media::H264PPS* pps = parser_.GetPPS(pps_id); | 1052 const media::H264PPS* pps = parser_.GetPPS(pps_id); |
1512 DCHECK(pps); | 1053 DCHECK(pps); |
1513 | 1054 |
1514 curr_pps_id_ = pps->pic_parameter_set_id; | 1055 curr_pps_id_ = pps->pic_parameter_set_id; |
1515 | 1056 |
1516 return true; | 1057 return true; |
1517 } | 1058 } |
1518 | 1059 |
1519 bool VaapiH264Decoder::FinishPrevFrameIfPresent() { | 1060 bool H264Decoder::FinishPrevFrameIfPresent() { |
1520 // If we already have a frame waiting to be decoded, decode it and finish. | 1061 // If we already have a frame waiting to be decoded, decode it and finish. |
1521 if (curr_pic_ != NULL) { | 1062 if (curr_pic_ != NULL) { |
1522 if (!DecodePicture()) | 1063 if (!DecodePicture()) |
1523 return false; | 1064 return false; |
1524 return FinishPicture(); | 1065 return FinishPicture(); |
1525 } | 1066 } |
1526 | 1067 |
1527 return true; | 1068 return true; |
1528 } | 1069 } |
1529 | 1070 |
1530 bool VaapiH264Decoder::ProcessSlice(media::H264SliceHeader* slice_hdr) { | 1071 bool H264Decoder::PreprocessSlice(media::H264SliceHeader* slice_hdr) { |
1531 prev_frame_num_ = frame_num_; | 1072 prev_frame_num_ = frame_num_; |
1532 frame_num_ = slice_hdr->frame_num; | 1073 frame_num_ = slice_hdr->frame_num; |
1533 | 1074 |
1534 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { | 1075 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { |
1535 DVLOG(1) << "Gap in frame_num!"; | 1076 DVLOG(1) << "Gap in frame_num!"; |
1536 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM); | |
1537 return false; | 1077 return false; |
1538 } | 1078 } |
1539 | 1079 |
1540 if (slice_hdr->field_pic_flag == 0) | 1080 if (slice_hdr->field_pic_flag == 0) |
1541 max_pic_num_ = max_frame_num_; | 1081 max_pic_num_ = max_frame_num_; |
1542 else | 1082 else |
1543 max_pic_num_ = 2 * max_frame_num_; | 1083 max_pic_num_ = 2 * max_frame_num_; |
1544 | 1084 |
1545 // TODO posciak: switch to new picture detection per 7.4.1.2.4. | 1085 // TODO posciak: switch to new picture detection per 7.4.1.2.4. |
1546 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { | 1086 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { |
1547 // This is just some more slice data of the current picture, so | 1087 // More slice data of the current picture. |
1548 // just queue it and return. | |
1549 QueueSlice(slice_hdr); | |
1550 return true; | 1088 return true; |
1551 } else { | 1089 } else { |
1552 // A new frame, so first finish the previous one before processing it... | 1090 // A new frame, so first finish the previous one before processing it... |
1553 if (!FinishPrevFrameIfPresent()) | 1091 if (!FinishPrevFrameIfPresent()) |
1554 return false; | 1092 return false; |
| 1093 } |
1555 | 1094 |
1556 // and then start a new one. | 1095 // If the new frame is an IDR, output what's left to output and clear DPB |
1557 return StartNewFrame(slice_hdr); | 1096 if (slice_hdr->idr_pic_flag) { |
| 1097 // (unless we are explicitly instructed not to do so). |
| 1098 if (!slice_hdr->no_output_of_prior_pics_flag) { |
| 1099 // Output DPB contents. |
| 1100 if (!Flush()) |
| 1101 return false; |
| 1102 } |
| 1103 dpb_.Clear(); |
| 1104 last_output_poc_ = std::numeric_limits<int>::min(); |
1558 } | 1105 } |
| 1106 |
| 1107 return true; |
1559 } | 1108 } |
1560 | 1109 |
1561 #define SET_ERROR_AND_RETURN() \ | 1110 bool H264Decoder::ProcessSlice(media::H264SliceHeader* slice_hdr) { |
1562 do { \ | 1111 DCHECK(curr_pic_.get()); |
1563 DVLOG(1) << "Error during decode"; \ | 1112 H264Picture::Vector ref_pic_list0, ref_pic_list1; |
1564 state_ = kError; \ | 1113 |
1565 return VaapiH264Decoder::kDecodeError; \ | 1114 if (!ModifyReferencePicLists(slice_hdr, &ref_pic_list0, &ref_pic_list1)) |
| 1115 return false; |
| 1116 |
| 1117 const media::H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); |
| 1118 DCHECK(pps); |
| 1119 |
| 1120 if (!accelerator_->SubmitSlice(pps, slice_hdr, ref_pic_list0, ref_pic_list1, |
| 1121 curr_pic_.get(), slice_hdr->nalu_data, |
| 1122 slice_hdr->nalu_size)) |
| 1123 return false; |
| 1124 |
| 1125 curr_slice_hdr_.reset(); |
| 1126 return true; |
| 1127 } |
| 1128 |
| 1129 #define SET_ERROR_AND_RETURN() \ |
| 1130 do { \ |
| 1131 DVLOG(1) << "Error during decode"; \ |
| 1132 state_ = kError; \ |
| 1133 return H264Decoder::kDecodeError; \ |
1566 } while (0) | 1134 } while (0) |
1567 | 1135 |
1568 void VaapiH264Decoder::SetStream(const uint8* ptr, | 1136 void H264Decoder::SetStream(const uint8* ptr, size_t size) { |
1569 size_t size, | |
1570 int32 input_id) { | |
1571 DCHECK(ptr); | 1137 DCHECK(ptr); |
1572 DCHECK(size); | 1138 DCHECK(size); |
1573 | 1139 |
1574 // Got new input stream data from the client. | 1140 DVLOG(4) << "New input stream at: " << (void*)ptr << " size: " << size; |
1575 DVLOG(4) << "New input stream id: " << input_id << " at: " << (void*) ptr | |
1576 << " size: " << size; | |
1577 parser_.SetStream(ptr, size); | 1141 parser_.SetStream(ptr, size); |
1578 curr_input_id_ = input_id; | |
1579 } | 1142 } |
1580 | 1143 |
1581 VaapiH264Decoder::DecResult VaapiH264Decoder::Decode() { | 1144 H264Decoder::DecResult H264Decoder::Decode() { |
1582 media::H264Parser::Result par_res; | |
1583 media::H264NALU nalu; | |
1584 DCHECK_NE(state_, kError); | 1145 DCHECK_NE(state_, kError); |
1585 | 1146 |
1586 while (1) { | 1147 while (1) { |
1587 // If we've already decoded some of the stream (after reset, i.e. we are | 1148 media::H264Parser::Result par_res; |
1588 // not in kNeedStreamMetadata state), we may be able to go back into | 1149 |
1589 // decoding state not only starting at/resuming from an SPS, but also from | 1150 if (!curr_nalu_) { |
1590 // other resume points, such as IDRs. In the latter case we need an output | 1151 curr_nalu_.reset(new media::H264NALU()); |
1591 // surface, because we will end up decoding that IDR in the process. | 1152 par_res = parser_.AdvanceToNextNALU(curr_nalu_.get()); |
1592 // Otherwise we just look for an SPS and don't produce any output frames. | 1153 if (par_res == media::H264Parser::kEOStream) |
1593 if (state_ != kNeedStreamMetadata && available_va_surfaces_.empty()) { | 1154 return kRanOutOfStreamData; |
1594 DVLOG(4) << "No output surfaces available"; | 1155 else if (par_res != media::H264Parser::kOk) |
1595 return kRanOutOfSurfaces; | 1156 SET_ERROR_AND_RETURN(); |
1596 } | 1157 } |
1597 | 1158 |
1598 par_res = parser_.AdvanceToNextNALU(&nalu); | 1159 DVLOG(4) << "NALU found: " << static_cast<int>(curr_nalu_->nal_unit_type); |
1599 if (par_res == media::H264Parser::kEOStream) | |
1600 return kRanOutOfStreamData; | |
1601 else if (par_res != media::H264Parser::kOk) | |
1602 SET_ERROR_AND_RETURN(); | |
1603 | 1160 |
1604 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type); | 1161 switch (curr_nalu_->nal_unit_type) { |
1605 | |
1606 switch (nalu.nal_unit_type) { | |
1607 case media::H264NALU::kNonIDRSlice: | 1162 case media::H264NALU::kNonIDRSlice: |
1608 // We can't resume from a non-IDR slice. | 1163 // We can't resume from a non-IDR slice. |
1609 if (state_ != kDecoding) | 1164 if (state_ != kDecoding) |
1610 break; | 1165 break; |
1611 // else fallthrough | 1166 // else fallthrough |
1612 case media::H264NALU::kIDRSlice: { | 1167 case media::H264NALU::kIDRSlice: { |
1613 // TODO(posciak): the IDR may require an SPS that we don't have | 1168 // TODO(posciak): the IDR may require an SPS that we don't have |
1614 // available. For now we'd fail if that happens, but ideally we'd like | 1169 // available. For now we'd fail if that happens, but ideally we'd like |
1615 // to keep going until the next SPS in the stream. | 1170 // to keep going until the next SPS in the stream. |
1616 if (state_ == kNeedStreamMetadata) { | 1171 if (state_ == kNeedStreamMetadata) { |
1617 // We need an SPS, skip this IDR and keep looking. | 1172 // We need an SPS, skip this IDR and keep looking. |
1618 break; | 1173 break; |
1619 } | 1174 } |
1620 | 1175 |
1621 // If after reset, we should be able to recover from an IDR. | 1176 // If after reset, we should be able to recover from an IDR. |
1622 media::H264SliceHeader slice_hdr; | 1177 if (!curr_slice_hdr_) { |
| 1178 curr_slice_hdr_.reset(new media::H264SliceHeader()); |
| 1179 par_res = |
| 1180 parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get()); |
| 1181 if (par_res != media::H264Parser::kOk) |
| 1182 SET_ERROR_AND_RETURN(); |
1623 | 1183 |
1624 par_res = parser_.ParseSliceHeader(nalu, &slice_hdr); | 1184 if (!PreprocessSlice(curr_slice_hdr_.get())) |
1625 if (par_res != media::H264Parser::kOk) | 1185 SET_ERROR_AND_RETURN(); |
1626 SET_ERROR_AND_RETURN(); | 1186 } |
1627 | 1187 |
1628 if (!ProcessSlice(&slice_hdr)) | 1188 if (!curr_pic_) { |
| 1189 // New picture/finished previous one, try to start a new one |
| 1190 // or tell the client we need more surfaces. |
| 1191 curr_pic_ = accelerator_->CreateH264Picture(); |
| 1192 if (!curr_pic_) |
| 1193 return kRanOutOfSurfaces; |
| 1194 |
| 1195 if (!StartNewFrame(curr_slice_hdr_.get())) |
| 1196 SET_ERROR_AND_RETURN(); |
| 1197 } |
| 1198 |
| 1199 if (!ProcessSlice(curr_slice_hdr_.get())) |
1629 SET_ERROR_AND_RETURN(); | 1200 SET_ERROR_AND_RETURN(); |
1630 | 1201 |
1631 state_ = kDecoding; | 1202 state_ = kDecoding; |
1632 break; | 1203 break; |
1633 } | 1204 } |
1634 | 1205 |
1635 case media::H264NALU::kSPS: { | 1206 case media::H264NALU::kSPS: { |
1636 int sps_id; | 1207 int sps_id; |
1637 | 1208 |
1638 if (!FinishPrevFrameIfPresent()) | 1209 if (!FinishPrevFrameIfPresent()) |
1639 SET_ERROR_AND_RETURN(); | 1210 SET_ERROR_AND_RETURN(); |
1640 | 1211 |
1641 par_res = parser_.ParseSPS(&sps_id); | 1212 par_res = parser_.ParseSPS(&sps_id); |
1642 if (par_res != media::H264Parser::kOk) | 1213 if (par_res != media::H264Parser::kOk) |
1643 SET_ERROR_AND_RETURN(); | 1214 SET_ERROR_AND_RETURN(); |
1644 | 1215 |
1645 bool need_new_buffers = false; | 1216 bool need_new_buffers = false; |
1646 if (!ProcessSPS(sps_id, &need_new_buffers)) | 1217 if (!ProcessSPS(sps_id, &need_new_buffers)) |
1647 SET_ERROR_AND_RETURN(); | 1218 SET_ERROR_AND_RETURN(); |
1648 | 1219 |
1649 state_ = kDecoding; | 1220 state_ = kDecoding; |
1650 | 1221 |
1651 if (need_new_buffers) { | 1222 if (need_new_buffers) { |
1652 if (!Flush()) | 1223 if (!Flush()) |
1653 return kDecodeError; | 1224 return kDecodeError; |
1654 | 1225 |
1655 available_va_surfaces_.clear(); | 1226 curr_pic_ = nullptr; |
| 1227 curr_nalu_ = nullptr; |
| 1228 ref_pic_list_p0_.clear(); |
| 1229 ref_pic_list_b0_.clear(); |
| 1230 ref_pic_list_b1_.clear(); |
| 1231 |
1656 return kAllocateNewSurfaces; | 1232 return kAllocateNewSurfaces; |
1657 } | 1233 } |
1658 break; | 1234 break; |
1659 } | 1235 } |
1660 | 1236 |
1661 case media::H264NALU::kPPS: { | 1237 case media::H264NALU::kPPS: { |
1662 if (state_ != kDecoding) | 1238 if (state_ != kDecoding) |
1663 break; | 1239 break; |
1664 | 1240 |
1665 int pps_id; | 1241 int pps_id; |
1666 | 1242 |
1667 if (!FinishPrevFrameIfPresent()) | 1243 if (!FinishPrevFrameIfPresent()) |
1668 SET_ERROR_AND_RETURN(); | 1244 SET_ERROR_AND_RETURN(); |
1669 | 1245 |
1670 par_res = parser_.ParsePPS(&pps_id); | 1246 par_res = parser_.ParsePPS(&pps_id); |
1671 if (par_res != media::H264Parser::kOk) | 1247 if (par_res != media::H264Parser::kOk) |
1672 SET_ERROR_AND_RETURN(); | 1248 SET_ERROR_AND_RETURN(); |
1673 | 1249 |
1674 if (!ProcessPPS(pps_id)) | 1250 if (!ProcessPPS(pps_id)) |
1675 SET_ERROR_AND_RETURN(); | 1251 SET_ERROR_AND_RETURN(); |
1676 break; | 1252 break; |
1677 } | 1253 } |
1678 | 1254 |
1679 case media::H264NALU::kAUD: | |
1680 case media::H264NALU::kEOSeq: | |
1681 case media::H264NALU::kEOStream: | |
1682 if (state_ != kDecoding) | |
1683 break; | |
1684 if (!FinishPrevFrameIfPresent()) | |
1685 SET_ERROR_AND_RETURN(); | |
1686 | |
1687 break; | |
1688 | |
1689 default: | 1255 default: |
1690 DVLOG(4) << "Skipping NALU type: " << nalu.nal_unit_type; | 1256 DVLOG(4) << "Skipping NALU type: " << curr_nalu_->nal_unit_type; |
1691 break; | 1257 break; |
1692 } | 1258 } |
| 1259 |
| 1260 DVLOG(4) << "Dropping nalu"; |
| 1261 curr_nalu_.reset(); |
1693 } | 1262 } |
1694 } | 1263 } |
1695 | 1264 |
1696 size_t VaapiH264Decoder::GetRequiredNumOfPictures() { | 1265 size_t H264Decoder::GetRequiredNumOfPictures() { |
1697 return dpb_.max_num_pics() + kPicsInPipeline; | 1266 return dpb_.max_num_pics() + kPicsInPipeline; |
1698 } | 1267 } |
1699 | 1268 |
1700 } // namespace content | 1269 } // namespace content |
OLD | NEW |