OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 #include <limits> | 6 #include <limits> |
7 | 7 |
8 #include "base/bind.h" | 8 #include "base/bind.h" |
9 #include "base/bind_helpers.h" | 9 #include "base/bind_helpers.h" |
| 10 #include "base/callback_helpers.h" |
10 #include "base/numerics/safe_conversions.h" | 11 #include "base/numerics/safe_conversions.h" |
11 #include "base/stl_util.h" | 12 #include "base/stl_util.h" |
12 #include "content/common/gpu/media/vaapi_h264_decoder.h" | 13 #include "content/common/gpu/media/h264_decoder.h" |
13 | 14 |
14 namespace content { | 15 namespace content { |
15 | 16 |
16 // Decode surface, used for decoding and reference. input_id comes from client | 17 H264Decoder::H264Accelerator::H264Accelerator() { |
17 // and is associated with the surface that was produced as the result | |
18 // of decoding a bitstream buffer with that id. | |
19 class VaapiH264Decoder::DecodeSurface { | |
20 public: | |
21 DecodeSurface(int poc, | |
22 int32 input_id, | |
23 const scoped_refptr<VASurface>& va_surface); | |
24 DecodeSurface(int poc, const scoped_refptr<DecodeSurface>& dec_surface); | |
25 ~DecodeSurface(); | |
26 | |
27 int poc() { | |
28 return poc_; | |
29 } | |
30 | |
31 scoped_refptr<VASurface> va_surface() { | |
32 return va_surface_; | |
33 } | |
34 | |
35 int32 input_id() { | |
36 return input_id_; | |
37 } | |
38 | |
39 private: | |
40 int poc_; | |
41 int32 input_id_; | |
42 scoped_refptr<VASurface> va_surface_; | |
43 }; | |
44 | |
45 VaapiH264Decoder::DecodeSurface::DecodeSurface( | |
46 int poc, | |
47 int32 input_id, | |
48 const scoped_refptr<VASurface>& va_surface) | |
49 : poc_(poc), | |
50 input_id_(input_id), | |
51 va_surface_(va_surface) { | |
52 DCHECK(va_surface_.get()); | |
53 } | 18 } |
54 | 19 |
55 VaapiH264Decoder::DecodeSurface::~DecodeSurface() { | 20 H264Decoder::H264Accelerator::~H264Accelerator() { |
56 } | 21 } |
57 | 22 |
58 VaapiH264Decoder::VaapiH264Decoder( | 23 H264Decoder::H264Decoder(H264Accelerator* accelerator) |
59 VaapiWrapper* vaapi_wrapper, | |
60 const OutputPicCB& output_pic_cb, | |
61 const ReportErrorToUmaCB& report_error_to_uma_cb) | |
62 : max_pic_order_cnt_lsb_(0), | 24 : max_pic_order_cnt_lsb_(0), |
63 max_frame_num_(0), | 25 max_frame_num_(0), |
64 max_pic_num_(0), | 26 max_pic_num_(0), |
65 max_long_term_frame_idx_(0), | 27 max_long_term_frame_idx_(0), |
66 max_num_reorder_frames_(0), | 28 max_num_reorder_frames_(0), |
67 curr_sps_id_(-1), | 29 curr_sps_id_(-1), |
68 curr_pps_id_(-1), | 30 curr_pps_id_(-1), |
69 vaapi_wrapper_(vaapi_wrapper), | 31 accelerator_(accelerator) { |
70 output_pic_cb_(output_pic_cb), | 32 DCHECK(accelerator_); |
71 report_error_to_uma_cb_(report_error_to_uma_cb) { | |
72 Reset(); | 33 Reset(); |
73 state_ = kNeedStreamMetadata; | 34 state_ = kNeedStreamMetadata; |
74 } | 35 } |
75 | 36 |
76 VaapiH264Decoder::~VaapiH264Decoder() { | 37 H264Decoder::~H264Decoder() { |
77 } | 38 } |
78 | 39 |
79 void VaapiH264Decoder::Reset() { | 40 void H264Decoder::Reset() { |
80 curr_pic_.reset(); | 41 curr_pic_ = nullptr; |
| 42 curr_nalu_ = nullptr; |
| 43 curr_slice_hdr_ = nullptr; |
81 | 44 |
82 curr_input_id_ = -1; | |
83 frame_num_ = 0; | 45 frame_num_ = 0; |
84 prev_frame_num_ = -1; | 46 prev_frame_num_ = -1; |
85 prev_frame_num_offset_ = -1; | 47 prev_frame_num_offset_ = -1; |
86 | 48 |
87 prev_ref_has_memmgmnt5_ = false; | 49 prev_ref_has_memmgmnt5_ = false; |
88 prev_ref_top_field_order_cnt_ = -1; | 50 prev_ref_top_field_order_cnt_ = -1; |
89 prev_ref_pic_order_cnt_msb_ = -1; | 51 prev_ref_pic_order_cnt_msb_ = -1; |
90 prev_ref_pic_order_cnt_lsb_ = -1; | 52 prev_ref_pic_order_cnt_lsb_ = -1; |
91 prev_ref_field_ = H264Picture::FIELD_NONE; | 53 prev_ref_field_ = H264Picture::FIELD_NONE; |
92 | 54 |
93 vaapi_wrapper_->DestroyPendingBuffers(); | 55 ref_pic_list_p0_.clear(); |
94 | 56 ref_pic_list_b0_.clear(); |
95 ref_pic_list0_.clear(); | 57 ref_pic_list_b1_.clear(); |
96 ref_pic_list1_.clear(); | |
97 | |
98 for (DecSurfacesInUse::iterator it = decode_surfaces_in_use_.begin(); | |
99 it != decode_surfaces_in_use_.end(); ) { | |
100 int poc = it->second->poc(); | |
101 // Must be incremented before UnassignSurfaceFromPoC as this call | |
102 // invalidates |it|. | |
103 ++it; | |
104 UnassignSurfaceFromPoC(poc); | |
105 } | |
106 DCHECK(decode_surfaces_in_use_.empty()); | |
107 | |
108 dpb_.Clear(); | 58 dpb_.Clear(); |
109 parser_.Reset(); | 59 parser_.Reset(); |
110 last_output_poc_ = std::numeric_limits<int>::min(); | 60 last_output_poc_ = std::numeric_limits<int>::min(); |
111 | 61 |
112 // If we are in kDecoding, we can resume without processing an SPS. | 62 // If we are in kDecoding, we can resume without processing an SPS. |
113 if (state_ == kDecoding) | 63 if (state_ == kDecoding) |
114 state_ = kAfterReset; | 64 state_ = kAfterReset; |
115 } | 65 } |
116 | 66 |
117 void VaapiH264Decoder::ReuseSurface( | 67 void H264Decoder::PrepareRefPicLists(media::H264SliceHeader* slice_hdr) { |
118 const scoped_refptr<VASurface>& va_surface) { | 68 ConstructReferencePicListsP(slice_hdr); |
119 available_va_surfaces_.push_back(va_surface); | 69 ConstructReferencePicListsB(slice_hdr); |
120 } | 70 } |
121 | 71 |
122 // Fill |va_pic| with default/neutral values. | 72 bool H264Decoder::ModifyReferencePicLists(media::H264SliceHeader* slice_hdr, |
123 static void InitVAPicture(VAPictureH264* va_pic) { | 73 H264Picture::Vector* ref_pic_list0, |
124 memset(va_pic, 0, sizeof(*va_pic)); | 74 H264Picture::Vector* ref_pic_list1) { |
125 va_pic->picture_id = VA_INVALID_ID; | 75 ref_pic_list0->clear(); |
126 va_pic->flags = VA_PICTURE_H264_INVALID; | 76 ref_pic_list1->clear(); |
127 } | |
128 | |
129 void VaapiH264Decoder::FillVAPicture(VAPictureH264 *va_pic, H264Picture* pic) { | |
130 DCHECK(pic); | |
131 | |
132 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt); | |
133 if (!dec_surface) { | |
134 // Cannot provide a ref picture, will corrupt output, but may be able | |
135 // to recover. | |
136 InitVAPicture(va_pic); | |
137 return; | |
138 } | |
139 | |
140 va_pic->picture_id = dec_surface->va_surface()->id(); | |
141 va_pic->frame_idx = pic->frame_num; | |
142 va_pic->flags = 0; | |
143 | |
144 switch (pic->field) { | |
145 case H264Picture::FIELD_NONE: | |
146 break; | |
147 case H264Picture::FIELD_TOP: | |
148 va_pic->flags |= VA_PICTURE_H264_TOP_FIELD; | |
149 break; | |
150 case H264Picture::FIELD_BOTTOM: | |
151 va_pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD; | |
152 break; | |
153 } | |
154 | |
155 if (pic->ref) { | |
156 va_pic->flags |= pic->long_term ? VA_PICTURE_H264_LONG_TERM_REFERENCE | |
157 : VA_PICTURE_H264_SHORT_TERM_REFERENCE; | |
158 } | |
159 | |
160 va_pic->TopFieldOrderCnt = pic->top_field_order_cnt; | |
161 va_pic->BottomFieldOrderCnt = pic->bottom_field_order_cnt; | |
162 } | |
163 | |
164 int VaapiH264Decoder::FillVARefFramesFromDPB(VAPictureH264 *va_pics, | |
165 int num_pics) { | |
166 H264DPB::Pictures::reverse_iterator rit; | |
167 int i; | |
168 | |
169 // Return reference frames in reverse order of insertion. | |
170 // Libva does not document this, but other implementations (e.g. mplayer) | |
171 // do it this way as well. | |
172 for (rit = dpb_.rbegin(), i = 0; rit != dpb_.rend() && i < num_pics; ++rit) { | |
173 if ((*rit)->ref) | |
174 FillVAPicture(&va_pics[i++], *rit); | |
175 } | |
176 | |
177 return i; | |
178 } | |
179 | |
180 VaapiH264Decoder::DecodeSurface* VaapiH264Decoder::DecodeSurfaceByPoC(int poc) { | |
181 DecSurfacesInUse::iterator iter = decode_surfaces_in_use_.find(poc); | |
182 if (iter == decode_surfaces_in_use_.end()) { | |
183 DVLOG(1) << "Could not find surface assigned to POC: " << poc; | |
184 return NULL; | |
185 } | |
186 | |
187 return iter->second.get(); | |
188 } | |
189 | |
190 bool VaapiH264Decoder::AssignSurfaceToPoC(int32 input_id, int poc) { | |
191 if (available_va_surfaces_.empty()) { | |
192 DVLOG(1) << "No VA Surfaces available"; | |
193 return false; | |
194 } | |
195 | |
196 linked_ptr<DecodeSurface> dec_surface(new DecodeSurface( | |
197 poc, input_id, available_va_surfaces_.back())); | |
198 available_va_surfaces_.pop_back(); | |
199 | |
200 DVLOG(4) << "POC " << poc | |
201 << " will use surface " << dec_surface->va_surface()->id(); | |
202 | |
203 bool inserted = decode_surfaces_in_use_.insert( | |
204 std::make_pair(poc, dec_surface)).second; | |
205 DCHECK(inserted); | |
206 | |
207 return true; | |
208 } | |
209 | |
210 void VaapiH264Decoder::UnassignSurfaceFromPoC(int poc) { | |
211 DecSurfacesInUse::iterator it = decode_surfaces_in_use_.find(poc); | |
212 if (it == decode_surfaces_in_use_.end()) { | |
213 DVLOG(1) << "Asked to unassign an unassigned POC " << poc; | |
214 return; | |
215 } | |
216 | |
217 DVLOG(4) << "POC " << poc << " no longer using VA surface " | |
218 << it->second->va_surface()->id(); | |
219 | |
220 decode_surfaces_in_use_.erase(it); | |
221 } | |
222 | |
223 bool VaapiH264Decoder::SendPPS() { | |
224 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
225 DCHECK(pps); | |
226 | |
227 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
228 DCHECK(sps); | |
229 | |
230 DCHECK(curr_pic_.get()); | |
231 | |
232 VAPictureParameterBufferH264 pic_param; | |
233 memset(&pic_param, 0, sizeof(VAPictureParameterBufferH264)); | |
234 | |
235 #define FROM_SPS_TO_PP(a) pic_param.a = sps->a; | |
236 #define FROM_SPS_TO_PP2(a, b) pic_param.b = sps->a; | |
237 FROM_SPS_TO_PP2(pic_width_in_mbs_minus1, picture_width_in_mbs_minus1); | |
238 // This assumes non-interlaced video | |
239 FROM_SPS_TO_PP2(pic_height_in_map_units_minus1, | |
240 picture_height_in_mbs_minus1); | |
241 FROM_SPS_TO_PP(bit_depth_luma_minus8); | |
242 FROM_SPS_TO_PP(bit_depth_chroma_minus8); | |
243 #undef FROM_SPS_TO_PP | |
244 #undef FROM_SPS_TO_PP2 | |
245 | |
246 #define FROM_SPS_TO_PP_SF(a) pic_param.seq_fields.bits.a = sps->a; | |
247 #define FROM_SPS_TO_PP_SF2(a, b) pic_param.seq_fields.bits.b = sps->a; | |
248 FROM_SPS_TO_PP_SF(chroma_format_idc); | |
249 FROM_SPS_TO_PP_SF2(separate_colour_plane_flag, | |
250 residual_colour_transform_flag); | |
251 FROM_SPS_TO_PP_SF(gaps_in_frame_num_value_allowed_flag); | |
252 FROM_SPS_TO_PP_SF(frame_mbs_only_flag); | |
253 FROM_SPS_TO_PP_SF(mb_adaptive_frame_field_flag); | |
254 FROM_SPS_TO_PP_SF(direct_8x8_inference_flag); | |
255 pic_param.seq_fields.bits.MinLumaBiPredSize8x8 = (sps->level_idc >= 31); | |
256 FROM_SPS_TO_PP_SF(log2_max_frame_num_minus4); | |
257 FROM_SPS_TO_PP_SF(pic_order_cnt_type); | |
258 FROM_SPS_TO_PP_SF(log2_max_pic_order_cnt_lsb_minus4); | |
259 FROM_SPS_TO_PP_SF(delta_pic_order_always_zero_flag); | |
260 #undef FROM_SPS_TO_PP_SF | |
261 #undef FROM_SPS_TO_PP_SF2 | |
262 | |
263 #define FROM_PPS_TO_PP(a) pic_param.a = pps->a; | |
264 FROM_PPS_TO_PP(num_slice_groups_minus1); | |
265 pic_param.slice_group_map_type = 0; | |
266 pic_param.slice_group_change_rate_minus1 = 0; | |
267 FROM_PPS_TO_PP(pic_init_qp_minus26); | |
268 FROM_PPS_TO_PP(pic_init_qs_minus26); | |
269 FROM_PPS_TO_PP(chroma_qp_index_offset); | |
270 FROM_PPS_TO_PP(second_chroma_qp_index_offset); | |
271 #undef FROM_PPS_TO_PP | |
272 | |
273 #define FROM_PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = pps->a; | |
274 #define FROM_PPS_TO_PP_PF2(a, b) pic_param.pic_fields.bits.b = pps->a; | |
275 FROM_PPS_TO_PP_PF(entropy_coding_mode_flag); | |
276 FROM_PPS_TO_PP_PF(weighted_pred_flag); | |
277 FROM_PPS_TO_PP_PF(weighted_bipred_idc); | |
278 FROM_PPS_TO_PP_PF(transform_8x8_mode_flag); | |
279 | |
280 pic_param.pic_fields.bits.field_pic_flag = 0; | |
281 FROM_PPS_TO_PP_PF(constrained_intra_pred_flag); | |
282 FROM_PPS_TO_PP_PF2(bottom_field_pic_order_in_frame_present_flag, | |
283 pic_order_present_flag); | |
284 FROM_PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
285 FROM_PPS_TO_PP_PF(redundant_pic_cnt_present_flag); | |
286 pic_param.pic_fields.bits.reference_pic_flag = curr_pic_->ref; | |
287 #undef FROM_PPS_TO_PP_PF | |
288 #undef FROM_PPS_TO_PP_PF2 | |
289 | |
290 pic_param.frame_num = curr_pic_->frame_num; | |
291 | |
292 InitVAPicture(&pic_param.CurrPic); | |
293 FillVAPicture(&pic_param.CurrPic, curr_pic_.get()); | |
294 | |
295 // Init reference pictures' array. | |
296 for (int i = 0; i < 16; ++i) | |
297 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
298 | |
299 // And fill it with picture info from DPB. | |
300 FillVARefFramesFromDPB(pic_param.ReferenceFrames, | |
301 arraysize(pic_param.ReferenceFrames)); | |
302 | |
303 pic_param.num_ref_frames = sps->max_num_ref_frames; | |
304 | |
305 return vaapi_wrapper_->SubmitBuffer(VAPictureParameterBufferType, | |
306 sizeof(VAPictureParameterBufferH264), | |
307 &pic_param); | |
308 } | |
309 | |
310 bool VaapiH264Decoder::SendIQMatrix() { | |
311 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); | |
312 DCHECK(pps); | |
313 | |
314 VAIQMatrixBufferH264 iq_matrix_buf; | |
315 memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferH264)); | |
316 | |
317 if (pps->pic_scaling_matrix_present_flag) { | |
318 for (int i = 0; i < 6; ++i) { | |
319 for (int j = 0; j < 16; ++j) | |
320 iq_matrix_buf.ScalingList4x4[i][j] = pps->scaling_list4x4[i][j]; | |
321 } | |
322 | |
323 for (int i = 0; i < 2; ++i) { | |
324 for (int j = 0; j < 64; ++j) | |
325 iq_matrix_buf.ScalingList8x8[i][j] = pps->scaling_list8x8[i][j]; | |
326 } | |
327 } else { | |
328 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
329 DCHECK(sps); | |
330 for (int i = 0; i < 6; ++i) { | |
331 for (int j = 0; j < 16; ++j) | |
332 iq_matrix_buf.ScalingList4x4[i][j] = sps->scaling_list4x4[i][j]; | |
333 } | |
334 | |
335 for (int i = 0; i < 2; ++i) { | |
336 for (int j = 0; j < 64; ++j) | |
337 iq_matrix_buf.ScalingList8x8[i][j] = sps->scaling_list8x8[i][j]; | |
338 } | |
339 } | |
340 | |
341 return vaapi_wrapper_->SubmitBuffer(VAIQMatrixBufferType, | |
342 sizeof(VAIQMatrixBufferH264), | |
343 &iq_matrix_buf); | |
344 } | |
345 | |
346 bool VaapiH264Decoder::SendVASliceParam(media::H264SliceHeader* slice_hdr) { | |
347 const media::H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); | |
348 DCHECK(pps); | |
349 | |
350 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); | |
351 DCHECK(sps); | |
352 | |
353 VASliceParameterBufferH264 slice_param; | |
354 memset(&slice_param, 0, sizeof(VASliceParameterBufferH264)); | |
355 | |
356 slice_param.slice_data_size = slice_hdr->nalu_size; | |
357 slice_param.slice_data_offset = 0; | |
358 slice_param.slice_data_flag = VA_SLICE_DATA_FLAG_ALL; | |
359 slice_param.slice_data_bit_offset = slice_hdr->header_bit_size; | |
360 | |
361 #define SHDRToSP(a) slice_param.a = slice_hdr->a; | |
362 SHDRToSP(first_mb_in_slice); | |
363 slice_param.slice_type = slice_hdr->slice_type % 5; | |
364 SHDRToSP(direct_spatial_mv_pred_flag); | |
365 | |
366 // TODO posciak: make sure parser sets those even when override flags | |
367 // in slice header is off. | |
368 SHDRToSP(num_ref_idx_l0_active_minus1); | |
369 SHDRToSP(num_ref_idx_l1_active_minus1); | |
370 SHDRToSP(cabac_init_idc); | |
371 SHDRToSP(slice_qp_delta); | |
372 SHDRToSP(disable_deblocking_filter_idc); | |
373 SHDRToSP(slice_alpha_c0_offset_div2); | |
374 SHDRToSP(slice_beta_offset_div2); | |
375 | |
376 if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) && | |
377 pps->weighted_pred_flag) || | |
378 (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) { | |
379 SHDRToSP(luma_log2_weight_denom); | |
380 SHDRToSP(chroma_log2_weight_denom); | |
381 | |
382 SHDRToSP(luma_weight_l0_flag); | |
383 SHDRToSP(luma_weight_l1_flag); | |
384 | |
385 SHDRToSP(chroma_weight_l0_flag); | |
386 SHDRToSP(chroma_weight_l1_flag); | |
387 | |
388 for (int i = 0; i <= slice_param.num_ref_idx_l0_active_minus1; ++i) { | |
389 slice_param.luma_weight_l0[i] = | |
390 slice_hdr->pred_weight_table_l0.luma_weight[i]; | |
391 slice_param.luma_offset_l0[i] = | |
392 slice_hdr->pred_weight_table_l0.luma_offset[i]; | |
393 | |
394 for (int j = 0; j < 2; ++j) { | |
395 slice_param.chroma_weight_l0[i][j] = | |
396 slice_hdr->pred_weight_table_l0.chroma_weight[i][j]; | |
397 slice_param.chroma_offset_l0[i][j] = | |
398 slice_hdr->pred_weight_table_l0.chroma_offset[i][j]; | |
399 } | |
400 } | |
401 | |
402 if (slice_hdr->IsBSlice()) { | |
403 for (int i = 0; i <= slice_param.num_ref_idx_l1_active_minus1; ++i) { | |
404 slice_param.luma_weight_l1[i] = | |
405 slice_hdr->pred_weight_table_l1.luma_weight[i]; | |
406 slice_param.luma_offset_l1[i] = | |
407 slice_hdr->pred_weight_table_l1.luma_offset[i]; | |
408 | |
409 for (int j = 0; j < 2; ++j) { | |
410 slice_param.chroma_weight_l1[i][j] = | |
411 slice_hdr->pred_weight_table_l1.chroma_weight[i][j]; | |
412 slice_param.chroma_offset_l1[i][j] = | |
413 slice_hdr->pred_weight_table_l1.chroma_offset[i][j]; | |
414 } | |
415 } | |
416 } | |
417 } | |
418 | |
419 for (int i = 0; i < 32; ++i) { | |
420 InitVAPicture(&slice_param.RefPicList0[i]); | |
421 InitVAPicture(&slice_param.RefPicList1[i]); | |
422 } | |
423 | |
424 int i; | |
425 H264Picture::PtrVector::iterator it; | |
426 for (it = ref_pic_list0_.begin(), i = 0; it != ref_pic_list0_.end() && *it; | |
427 ++it, ++i) | |
428 FillVAPicture(&slice_param.RefPicList0[i], *it); | |
429 for (it = ref_pic_list1_.begin(), i = 0; it != ref_pic_list1_.end() && *it; | |
430 ++it, ++i) | |
431 FillVAPicture(&slice_param.RefPicList1[i], *it); | |
432 | |
433 return vaapi_wrapper_->SubmitBuffer(VASliceParameterBufferType, | |
434 sizeof(VASliceParameterBufferH264), | |
435 &slice_param); | |
436 } | |
437 | |
438 bool VaapiH264Decoder::SendSliceData(const uint8* ptr, size_t size) { | |
439 // Can't help it, blame libva... | |
440 void* non_const_ptr = const_cast<uint8*>(ptr); | |
441 return vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType, size, | |
442 non_const_ptr); | |
443 } | |
444 | |
445 bool VaapiH264Decoder::PrepareRefPicLists(media::H264SliceHeader* slice_hdr) { | |
446 ref_pic_list0_.clear(); | |
447 ref_pic_list1_.clear(); | |
448 | 77 |
449 // Fill reference picture lists for B and S/SP slices. | 78 // Fill reference picture lists for B and S/SP slices. |
450 if (slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) { | 79 if (slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) { |
451 ConstructReferencePicListsP(slice_hdr); | 80 *ref_pic_list0 = ref_pic_list_p0_; |
452 return ModifyReferencePicList(slice_hdr, 0); | 81 return ModifyReferencePicList(slice_hdr, 0, ref_pic_list0); |
453 } | 82 } else if (slice_hdr->IsBSlice()) { |
454 | 83 *ref_pic_list0 = ref_pic_list_b0_; |
455 if (slice_hdr->IsBSlice()) { | 84 *ref_pic_list1 = ref_pic_list_b1_; |
456 ConstructReferencePicListsB(slice_hdr); | 85 return ModifyReferencePicList(slice_hdr, 0, ref_pic_list0) && |
457 return ModifyReferencePicList(slice_hdr, 0) && | 86 ModifyReferencePicList(slice_hdr, 1, ref_pic_list1); |
458 ModifyReferencePicList(slice_hdr, 1); | |
459 } | 87 } |
460 | 88 |
461 return true; | 89 return true; |
462 } | 90 } |
463 | 91 |
464 bool VaapiH264Decoder::QueueSlice(media::H264SliceHeader* slice_hdr) { | 92 bool H264Decoder::DecodePicture() { |
465 DCHECK(curr_pic_.get()); | |
466 | |
467 if (!PrepareRefPicLists(slice_hdr)) | |
468 return false; | |
469 | |
470 if (!SendVASliceParam(slice_hdr)) | |
471 return false; | |
472 | |
473 if (!SendSliceData(slice_hdr->nalu_data, slice_hdr->nalu_size)) | |
474 return false; | |
475 | |
476 return true; | |
477 } | |
478 | |
479 // TODO(posciak) start using vaMapBuffer instead of vaCreateBuffer wherever | |
480 // possible. | |
481 bool VaapiH264Decoder::DecodePicture() { | |
482 DCHECK(curr_pic_.get()); | 93 DCHECK(curr_pic_.get()); |
483 | 94 |
484 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt; | 95 DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt; |
485 DecodeSurface* dec_surface = DecodeSurfaceByPoC(curr_pic_->pic_order_cnt); | 96 return accelerator_->SubmitDecode(curr_pic_); |
486 if (!dec_surface) { | |
487 DVLOG(1) << "Asked to decode an invalid POC " << curr_pic_->pic_order_cnt; | |
488 return false; | |
489 } | |
490 | |
491 if (!vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | |
492 dec_surface->va_surface()->id())) { | |
493 DVLOG(1) << "Failed decoding picture"; | |
494 return false; | |
495 } | |
496 | |
497 return true; | |
498 } | 97 } |
499 | 98 |
500 bool VaapiH264Decoder::InitCurrPicture(media::H264SliceHeader* slice_hdr) { | 99 bool H264Decoder::InitCurrPicture(media::H264SliceHeader* slice_hdr) { |
501 DCHECK(curr_pic_.get()); | 100 DCHECK(curr_pic_.get()); |
502 | 101 |
503 memset(curr_pic_.get(), 0, sizeof(H264Picture)); | |
504 | |
505 curr_pic_->idr = slice_hdr->idr_pic_flag; | 102 curr_pic_->idr = slice_hdr->idr_pic_flag; |
506 | 103 |
507 if (slice_hdr->field_pic_flag) { | 104 if (slice_hdr->field_pic_flag) { |
508 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM | 105 curr_pic_->field = slice_hdr->bottom_field_flag ? H264Picture::FIELD_BOTTOM |
509 : H264Picture::FIELD_TOP; | 106 : H264Picture::FIELD_TOP; |
510 } else { | 107 } else { |
511 curr_pic_->field = H264Picture::FIELD_NONE; | 108 curr_pic_->field = H264Picture::FIELD_NONE; |
512 } | 109 } |
513 | 110 |
514 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; | 111 curr_pic_->ref = slice_hdr->nal_ref_idc != 0; |
515 // This assumes non-interlaced stream. | 112 // This assumes non-interlaced stream. |
516 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; | 113 curr_pic_->frame_num = curr_pic_->pic_num = slice_hdr->frame_num; |
517 | 114 |
518 if (!CalculatePicOrderCounts(slice_hdr)) | 115 if (!CalculatePicOrderCounts(slice_hdr)) |
519 return false; | 116 return false; |
520 | 117 |
521 // Try to get an empty surface to decode this picture to. | |
522 if (!AssignSurfaceToPoC(curr_input_id_, curr_pic_->pic_order_cnt)) { | |
523 DVLOG(1) << "Failed getting a free surface for a picture"; | |
524 return false; | |
525 } | |
526 | |
527 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; | 118 curr_pic_->long_term_reference_flag = slice_hdr->long_term_reference_flag; |
528 curr_pic_->adaptive_ref_pic_marking_mode_flag = | 119 curr_pic_->adaptive_ref_pic_marking_mode_flag = |
529 slice_hdr->adaptive_ref_pic_marking_mode_flag; | 120 slice_hdr->adaptive_ref_pic_marking_mode_flag; |
530 | 121 |
531 // If the slice header indicates we will have to perform reference marking | 122 // If the slice header indicates we will have to perform reference marking |
532 // process after this picture is decoded, store required data for that | 123 // process after this picture is decoded, store required data for that |
533 // purpose. | 124 // purpose. |
534 if (slice_hdr->adaptive_ref_pic_marking_mode_flag) { | 125 if (slice_hdr->adaptive_ref_pic_marking_mode_flag) { |
535 static_assert(sizeof(curr_pic_->ref_pic_marking) == | 126 COMPILE_ASSERT(sizeof(curr_pic_->ref_pic_marking) == |
536 sizeof(slice_hdr->ref_pic_marking), | 127 sizeof(slice_hdr->ref_pic_marking), |
537 "ref_pic_marking array sizes do not match"); | 128 ref_pic_marking_array_sizes_do_not_match); |
538 memcpy(curr_pic_->ref_pic_marking, slice_hdr->ref_pic_marking, | 129 memcpy(curr_pic_->ref_pic_marking, slice_hdr->ref_pic_marking, |
539 sizeof(curr_pic_->ref_pic_marking)); | 130 sizeof(curr_pic_->ref_pic_marking)); |
540 } | 131 } |
541 | 132 |
542 return true; | 133 return true; |
543 } | 134 } |
544 | 135 |
545 bool VaapiH264Decoder::CalculatePicOrderCounts( | 136 bool H264Decoder::CalculatePicOrderCounts(media::H264SliceHeader* slice_hdr) { |
546 media::H264SliceHeader* slice_hdr) { | |
547 DCHECK_NE(curr_sps_id_, -1); | 137 DCHECK_NE(curr_sps_id_, -1); |
548 const media::H264SPS* sps = parser_.GetSPS(curr_sps_id_); | 138 const media::H264SPS* sps = parser_.GetSPS(curr_sps_id_); |
549 | 139 |
550 int pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; | 140 int pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; |
551 curr_pic_->pic_order_cnt_lsb = pic_order_cnt_lsb; | 141 curr_pic_->pic_order_cnt_lsb = pic_order_cnt_lsb; |
552 | 142 |
553 switch (sps->pic_order_cnt_type) { | 143 switch (sps->pic_order_cnt_type) { |
554 case 0: | 144 case 0: |
555 // See spec 8.2.1.1. | 145 // See spec 8.2.1.1. |
556 int prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb; | 146 int prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb; |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
642 // frame_num_in_pic_order_cnt_cycle is verified < 255 in parser | 232 // frame_num_in_pic_order_cnt_cycle is verified < 255 in parser |
643 for (int i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i) | 233 for (int i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i) |
644 expected_pic_order_cnt += sps->offset_for_ref_frame[i]; | 234 expected_pic_order_cnt += sps->offset_for_ref_frame[i]; |
645 } | 235 } |
646 | 236 |
647 if (!slice_hdr->nal_ref_idc) | 237 if (!slice_hdr->nal_ref_idc) |
648 expected_pic_order_cnt += sps->offset_for_non_ref_pic; | 238 expected_pic_order_cnt += sps->offset_for_non_ref_pic; |
649 | 239 |
650 if (!slice_hdr->field_pic_flag) { | 240 if (!slice_hdr->field_pic_flag) { |
651 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + | 241 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + |
652 slice_hdr->delta_pic_order_cnt[0]; | 242 slice_hdr->delta_pic_order_cnt0; |
653 curr_pic_->bottom_field_order_cnt = curr_pic_->top_field_order_cnt + | 243 curr_pic_->bottom_field_order_cnt = curr_pic_->top_field_order_cnt + |
654 sps->offset_for_top_to_bottom_field + | 244 sps->offset_for_top_to_bottom_field + |
655 slice_hdr->delta_pic_order_cnt[1]; | 245 slice_hdr->delta_pic_order_cnt1; |
656 } else if (!slice_hdr->bottom_field_flag) { | 246 } else if (!slice_hdr->bottom_field_flag) { |
657 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + | 247 curr_pic_->top_field_order_cnt = expected_pic_order_cnt + |
658 slice_hdr->delta_pic_order_cnt[0]; | 248 slice_hdr->delta_pic_order_cnt0; |
659 } else { | 249 } else { |
660 curr_pic_->bottom_field_order_cnt = expected_pic_order_cnt + | 250 curr_pic_->bottom_field_order_cnt = expected_pic_order_cnt + |
661 sps->offset_for_top_to_bottom_field + | 251 sps->offset_for_top_to_bottom_field + |
662 slice_hdr->delta_pic_order_cnt[0]; | 252 slice_hdr->delta_pic_order_cnt0; |
663 } | 253 } |
664 break; | 254 break; |
665 } | 255 } |
666 | 256 |
667 case 2: | 257 case 2: |
668 // See spec 8.2.1.3. | 258 // See spec 8.2.1.3. |
669 if (prev_has_memmgmnt5_) | 259 if (prev_has_memmgmnt5_) |
670 prev_frame_num_offset_ = 0; | 260 prev_frame_num_offset_ = 0; |
671 | 261 |
672 if (slice_hdr->idr_pic_flag) | 262 if (slice_hdr->idr_pic_flag) |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
711 curr_pic_->pic_order_cnt = curr_pic_->top_field_order_cnt; | 301 curr_pic_->pic_order_cnt = curr_pic_->top_field_order_cnt; |
712 break; | 302 break; |
713 case H264Picture::FIELD_BOTTOM: | 303 case H264Picture::FIELD_BOTTOM: |
714 curr_pic_->pic_order_cnt = curr_pic_->bottom_field_order_cnt; | 304 curr_pic_->pic_order_cnt = curr_pic_->bottom_field_order_cnt; |
715 break; | 305 break; |
716 } | 306 } |
717 | 307 |
718 return true; | 308 return true; |
719 } | 309 } |
720 | 310 |
721 void VaapiH264Decoder::UpdatePicNums() { | 311 void H264Decoder::UpdatePicNums() { |
722 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { | 312 for (auto& pic : dpb_) { |
723 H264Picture* pic = *it; | |
724 DCHECK(pic); | |
725 if (!pic->ref) | 313 if (!pic->ref) |
726 continue; | 314 continue; |
727 | 315 |
728 // Below assumes non-interlaced stream. | 316 // Below assumes non-interlaced stream. |
729 DCHECK_EQ(pic->field, H264Picture::FIELD_NONE); | 317 DCHECK_EQ(pic->field, H264Picture::FIELD_NONE); |
730 if (pic->long_term) { | 318 if (pic->long_term) { |
731 pic->long_term_pic_num = pic->long_term_frame_idx; | 319 pic->long_term_pic_num = pic->long_term_frame_idx; |
732 } else { | 320 } else { |
733 if (pic->frame_num > frame_num_) | 321 if (pic->frame_num > frame_num_) |
734 pic->frame_num_wrap = pic->frame_num - max_frame_num_; | 322 pic->frame_num_wrap = pic->frame_num - max_frame_num_; |
735 else | 323 else |
736 pic->frame_num_wrap = pic->frame_num; | 324 pic->frame_num_wrap = pic->frame_num; |
737 | 325 |
738 pic->pic_num = pic->frame_num_wrap; | 326 pic->pic_num = pic->frame_num_wrap; |
739 } | 327 } |
740 } | 328 } |
741 } | 329 } |
742 | 330 |
743 struct PicNumDescCompare { | 331 struct PicNumDescCompare { |
744 bool operator()(const H264Picture* a, const H264Picture* b) const { | 332 bool operator()(const scoped_refptr<H264Picture>& a, |
| 333 const scoped_refptr<H264Picture>& b) const { |
745 return a->pic_num > b->pic_num; | 334 return a->pic_num > b->pic_num; |
746 } | 335 } |
747 }; | 336 }; |
748 | 337 |
749 struct LongTermPicNumAscCompare { | 338 struct LongTermPicNumAscCompare { |
750 bool operator()(const H264Picture* a, const H264Picture* b) const { | 339 bool operator()(const scoped_refptr<H264Picture>& a, |
| 340 const scoped_refptr<H264Picture>& b) const { |
751 return a->long_term_pic_num < b->long_term_pic_num; | 341 return a->long_term_pic_num < b->long_term_pic_num; |
752 } | 342 } |
753 }; | 343 }; |
754 | 344 |
755 void VaapiH264Decoder::ConstructReferencePicListsP( | 345 void H264Decoder::ConstructReferencePicListsP( |
756 media::H264SliceHeader* slice_hdr) { | 346 media::H264SliceHeader* slice_hdr) { |
757 // RefPicList0 (8.2.4.2.1) [[1] [2]], where: | 347 // RefPicList0 (8.2.4.2.1) [[1] [2]], where: |
758 // [1] shortterm ref pics sorted by descending pic_num, | 348 // [1] shortterm ref pics sorted by descending pic_num, |
759 // [2] longterm ref pics by ascending long_term_pic_num. | 349 // [2] longterm ref pics by ascending long_term_pic_num. |
760 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | 350 ref_pic_list_p0_.clear(); |
| 351 |
761 // First get the short ref pics... | 352 // First get the short ref pics... |
762 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | 353 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_p0_); |
763 size_t num_short_refs = ref_pic_list0_.size(); | 354 size_t num_short_refs = ref_pic_list_p0_.size(); |
764 | 355 |
765 // and sort them to get [1]. | 356 // and sort them to get [1]. |
766 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), PicNumDescCompare()); | 357 std::sort(ref_pic_list_p0_.begin(), ref_pic_list_p0_.end(), |
| 358 PicNumDescCompare()); |
767 | 359 |
768 // Now get long term pics and sort them by long_term_pic_num to get [2]. | 360 // Now get long term pics and sort them by long_term_pic_num to get [2]. |
769 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | 361 dpb_.GetLongTermRefPicsAppending(&ref_pic_list_p0_); |
770 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | 362 std::sort(ref_pic_list_p0_.begin() + num_short_refs, ref_pic_list_p0_.end(), |
771 LongTermPicNumAscCompare()); | 363 LongTermPicNumAscCompare()); |
772 | 364 |
773 // Cut off if we have more than requested in slice header. | 365 // Cut off if we have more than requested in slice header. |
774 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | 366 ref_pic_list_p0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); |
775 } | 367 } |
776 | 368 |
777 struct POCAscCompare { | 369 struct POCAscCompare { |
778 bool operator()(const H264Picture* a, const H264Picture* b) const { | 370 bool operator()(const scoped_refptr<H264Picture>& a, |
| 371 const scoped_refptr<H264Picture>& b) const { |
779 return a->pic_order_cnt < b->pic_order_cnt; | 372 return a->pic_order_cnt < b->pic_order_cnt; |
780 } | 373 } |
781 }; | 374 }; |
782 | 375 |
783 struct POCDescCompare { | 376 struct POCDescCompare { |
784 bool operator()(const H264Picture* a, const H264Picture* b) const { | 377 bool operator()(const scoped_refptr<H264Picture>& a, |
| 378 const scoped_refptr<H264Picture>& b) const { |
785 return a->pic_order_cnt > b->pic_order_cnt; | 379 return a->pic_order_cnt > b->pic_order_cnt; |
786 } | 380 } |
787 }; | 381 }; |
788 | 382 |
789 void VaapiH264Decoder::ConstructReferencePicListsB( | 383 void H264Decoder::ConstructReferencePicListsB( |
790 media::H264SliceHeader* slice_hdr) { | 384 media::H264SliceHeader* slice_hdr) { |
791 // RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where: | 385 // RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where: |
792 // [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC, | 386 // [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC, |
793 // [2] shortterm ref pics with POC > curr_pic's POC by ascending POC, | 387 // [2] shortterm ref pics with POC > curr_pic's POC by ascending POC, |
794 // [3] longterm ref pics by ascending long_term_pic_num. | 388 // [3] longterm ref pics by ascending long_term_pic_num. |
795 DCHECK(ref_pic_list0_.empty() && ref_pic_list1_.empty()); | 389 ref_pic_list_b0_.clear(); |
796 dpb_.GetShortTermRefPicsAppending(ref_pic_list0_); | 390 ref_pic_list_b1_.clear(); |
797 size_t num_short_refs = ref_pic_list0_.size(); | 391 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b0_); |
| 392 size_t num_short_refs = ref_pic_list_b0_.size(); |
798 | 393 |
799 // First sort ascending, this will put [1] in right place and finish [2]. | 394 // First sort ascending, this will put [1] in right place and finish [2]. |
800 std::sort(ref_pic_list0_.begin(), ref_pic_list0_.end(), POCAscCompare()); | 395 std::sort(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), POCAscCompare()); |
801 | 396 |
802 // Find first with POC > curr_pic's POC to get first element in [2]... | 397 // Find first with POC > curr_pic's POC to get first element in [2]... |
803 H264Picture::PtrVector::iterator iter; | 398 H264Picture::Vector::iterator iter; |
804 iter = std::upper_bound(ref_pic_list0_.begin(), ref_pic_list0_.end(), | 399 iter = std::upper_bound(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), |
805 curr_pic_.get(), POCAscCompare()); | 400 curr_pic_.get(), POCAscCompare()); |
806 | 401 |
807 // and sort [1] descending, thus finishing sequence [1] [2]. | 402 // and sort [1] descending, thus finishing sequence [1] [2]. |
808 std::sort(ref_pic_list0_.begin(), iter, POCDescCompare()); | 403 std::sort(ref_pic_list_b0_.begin(), iter, POCDescCompare()); |
809 | 404 |
810 // Now add [3] and sort by ascending long_term_pic_num. | 405 // Now add [3] and sort by ascending long_term_pic_num. |
811 dpb_.GetLongTermRefPicsAppending(ref_pic_list0_); | 406 dpb_.GetLongTermRefPicsAppending(&ref_pic_list_b0_); |
812 std::sort(ref_pic_list0_.begin() + num_short_refs, ref_pic_list0_.end(), | 407 std::sort(ref_pic_list_b0_.begin() + num_short_refs, ref_pic_list_b0_.end(), |
813 LongTermPicNumAscCompare()); | 408 LongTermPicNumAscCompare()); |
814 | 409 |
815 // RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where: | 410 // RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where: |
816 // [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC, | 411 // [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC, |
817 // [2] shortterm ref pics with POC < curr_pic's POC by descending POC, | 412 // [2] shortterm ref pics with POC < curr_pic's POC by descending POC, |
818 // [3] longterm ref pics by ascending long_term_pic_num. | 413 // [3] longterm ref pics by ascending long_term_pic_num. |
819 | 414 |
820 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | 415 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b1_); |
821 num_short_refs = ref_pic_list1_.size(); | 416 num_short_refs = ref_pic_list_b1_.size(); |
822 | 417 |
823 // First sort by descending POC. | 418 // First sort by descending POC. |
824 std::sort(ref_pic_list1_.begin(), ref_pic_list1_.end(), POCDescCompare()); | 419 std::sort(ref_pic_list_b1_.begin(), ref_pic_list_b1_.end(), POCDescCompare()); |
825 | 420 |
826 // Find first with POC < curr_pic's POC to get first element in [2]... | 421 // Find first with POC < curr_pic's POC to get first element in [2]... |
827 iter = std::upper_bound(ref_pic_list1_.begin(), ref_pic_list1_.end(), | 422 iter = std::upper_bound(ref_pic_list_b1_.begin(), ref_pic_list_b1_.end(), |
828 curr_pic_.get(), POCDescCompare()); | 423 curr_pic_.get(), POCDescCompare()); |
829 | 424 |
830 // and sort [1] ascending. | 425 // and sort [1] ascending. |
831 std::sort(ref_pic_list1_.begin(), iter, POCAscCompare()); | 426 std::sort(ref_pic_list_b1_.begin(), iter, POCAscCompare()); |
832 | 427 |
833 // Now add [3] and sort by ascending long_term_pic_num | 428 // Now add [3] and sort by ascending long_term_pic_num |
834 dpb_.GetShortTermRefPicsAppending(ref_pic_list1_); | 429 dpb_.GetShortTermRefPicsAppending(&ref_pic_list_b1_); |
835 std::sort(ref_pic_list1_.begin() + num_short_refs, ref_pic_list1_.end(), | 430 std::sort(ref_pic_list_b1_.begin() + num_short_refs, ref_pic_list_b1_.end(), |
836 LongTermPicNumAscCompare()); | 431 LongTermPicNumAscCompare()); |
837 | 432 |
838 // If lists identical, swap first two entries in RefPicList1 (spec 8.2.4.2.3) | 433 // If lists identical, swap first two entries in RefPicList1 (spec 8.2.4.2.3) |
839 if (ref_pic_list1_.size() > 1 && | 434 if (ref_pic_list_b1_.size() > 1 && |
840 std::equal(ref_pic_list0_.begin(), ref_pic_list0_.end(), | 435 std::equal(ref_pic_list_b0_.begin(), ref_pic_list_b0_.end(), |
841 ref_pic_list1_.begin())) | 436 ref_pic_list_b1_.begin())) |
842 std::swap(ref_pic_list1_[0], ref_pic_list1_[1]); | 437 std::swap(ref_pic_list_b1_[0], ref_pic_list_b1_[1]); |
843 | 438 |
844 // Per 8.2.4.2 it's possible for num_ref_idx_lX_active_minus1 to indicate | 439 // Per 8.2.4.2 it's possible for num_ref_idx_lX_active_minus1 to indicate |
845 // there should be more ref pics on list than we constructed. | 440 // there should be more ref pics on list than we constructed. |
846 // Those superfluous ones should be treated as non-reference. | 441 // Those superfluous ones should be treated as non-reference. |
847 ref_pic_list0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); | 442 ref_pic_list_b0_.resize(slice_hdr->num_ref_idx_l0_active_minus1 + 1); |
848 ref_pic_list1_.resize(slice_hdr->num_ref_idx_l1_active_minus1 + 1); | 443 ref_pic_list_b1_.resize(slice_hdr->num_ref_idx_l1_active_minus1 + 1); |
849 } | 444 } |
850 | 445 |
851 // See 8.2.4 | 446 // See 8.2.4 |
852 int VaapiH264Decoder::PicNumF(H264Picture *pic) { | 447 int H264Decoder::PicNumF(const scoped_refptr<H264Picture>& pic) { |
853 if (!pic) | 448 if (!pic) |
854 return -1; | 449 return -1; |
855 | 450 |
856 if (!pic->long_term) | 451 if (!pic->long_term) |
857 return pic->pic_num; | 452 return pic->pic_num; |
858 else | 453 else |
859 return max_pic_num_; | 454 return max_pic_num_; |
860 } | 455 } |
861 | 456 |
862 // See 8.2.4 | 457 // See 8.2.4 |
863 int VaapiH264Decoder::LongTermPicNumF(H264Picture *pic) { | 458 int H264Decoder::LongTermPicNumF(const scoped_refptr<H264Picture>& pic) { |
864 if (pic->ref && pic->long_term) | 459 if (pic->ref && pic->long_term) |
865 return pic->long_term_pic_num; | 460 return pic->long_term_pic_num; |
866 else | 461 else |
867 return 2 * (max_long_term_frame_idx_ + 1); | 462 return 2 * (max_long_term_frame_idx_ + 1); |
868 } | 463 } |
869 | 464 |
870 // Shift elements on the |v| starting from |from| to |to|, inclusive, | 465 // Shift elements on the |v| starting from |from| to |to|, inclusive, |
871 // one position to the right and insert pic at |from|. | 466 // one position to the right and insert pic at |from|. |
872 static void ShiftRightAndInsert(H264Picture::PtrVector *v, | 467 static void ShiftRightAndInsert(H264Picture::Vector* v, |
873 int from, | 468 int from, |
874 int to, | 469 int to, |
875 H264Picture* pic) { | 470 const scoped_refptr<H264Picture>& pic) { |
876 // Security checks, do not disable in Debug mode. | 471 // Security checks, do not disable in Debug mode. |
877 CHECK(from <= to); | 472 CHECK(from <= to); |
878 CHECK(to <= std::numeric_limits<int>::max() - 2); | 473 CHECK(to <= std::numeric_limits<int>::max() - 2); |
879 // Additional checks. Debug mode ok. | 474 // Additional checks. Debug mode ok. |
880 DCHECK(v); | 475 DCHECK(v); |
881 DCHECK(pic); | 476 DCHECK(pic); |
882 DCHECK((to + 1 == static_cast<int>(v->size())) || | 477 DCHECK((to + 1 == static_cast<int>(v->size())) || |
883 (to + 2 == static_cast<int>(v->size()))); | 478 (to + 2 == static_cast<int>(v->size()))); |
884 | 479 |
885 v->resize(to + 2); | 480 v->resize(to + 2); |
886 | 481 |
887 for (int i = to + 1; i > from; --i) | 482 for (int i = to + 1; i > from; --i) |
888 (*v)[i] = (*v)[i - 1]; | 483 (*v)[i] = (*v)[i - 1]; |
889 | 484 |
890 (*v)[from] = pic; | 485 (*v)[from] = pic; |
891 } | 486 } |
892 | 487 |
893 bool VaapiH264Decoder::ModifyReferencePicList(media::H264SliceHeader* slice_hdr, | 488 bool H264Decoder::ModifyReferencePicList(media::H264SliceHeader* slice_hdr, |
894 int list) { | 489 int list, |
| 490 H264Picture::Vector* ref_pic_listx) { |
895 int num_ref_idx_lX_active_minus1; | 491 int num_ref_idx_lX_active_minus1; |
896 H264Picture::PtrVector* ref_pic_listx; | |
897 media::H264ModificationOfPicNum* list_mod; | 492 media::H264ModificationOfPicNum* list_mod; |
898 | 493 |
899 // This can process either ref_pic_list0 or ref_pic_list1, depending on | 494 // This can process either ref_pic_list0 or ref_pic_list1, depending on |
900 // the list argument. Set up pointers to proper list to be processed here. | 495 // the list argument. Set up pointers to proper list to be processed here. |
901 if (list == 0) { | 496 if (list == 0) { |
902 if (!slice_hdr->ref_pic_list_modification_flag_l0) | 497 if (!slice_hdr->ref_pic_list_modification_flag_l0) |
903 return true; | 498 return true; |
904 | 499 |
905 list_mod = slice_hdr->ref_list_l0_modifications; | 500 list_mod = slice_hdr->ref_list_l0_modifications; |
906 num_ref_idx_lX_active_minus1 = ref_pic_list0_.size() - 1; | |
907 | |
908 ref_pic_listx = &ref_pic_list0_; | |
909 } else { | 501 } else { |
910 if (!slice_hdr->ref_pic_list_modification_flag_l1) | 502 if (!slice_hdr->ref_pic_list_modification_flag_l1) |
911 return true; | 503 return true; |
912 | 504 |
913 list_mod = slice_hdr->ref_list_l1_modifications; | 505 list_mod = slice_hdr->ref_list_l1_modifications; |
914 num_ref_idx_lX_active_minus1 = ref_pic_list1_.size() - 1; | |
915 | |
916 ref_pic_listx = &ref_pic_list1_; | |
917 } | 506 } |
918 | 507 |
| 508 num_ref_idx_lX_active_minus1 = ref_pic_listx->size() - 1; |
919 DCHECK_GE(num_ref_idx_lX_active_minus1, 0); | 509 DCHECK_GE(num_ref_idx_lX_active_minus1, 0); |
920 | 510 |
921 // Spec 8.2.4.3: | 511 // Spec 8.2.4.3: |
922 // Reorder pictures on the list in a way specified in the stream. | 512 // Reorder pictures on the list in a way specified in the stream. |
923 int pic_num_lx_pred = curr_pic_->pic_num; | 513 int pic_num_lx_pred = curr_pic_->pic_num; |
924 int ref_idx_lx = 0; | 514 int ref_idx_lx = 0; |
925 int pic_num_lx_no_wrap; | 515 int pic_num_lx_no_wrap; |
926 int pic_num_lx; | 516 int pic_num_lx; |
927 bool done = false; | 517 bool done = false; |
928 H264Picture* pic; | 518 scoped_refptr<H264Picture> pic; |
929 for (int i = 0; i < media::H264SliceHeader::kRefListModSize && !done; ++i) { | 519 for (int i = 0; i < media::H264SliceHeader::kRefListModSize && !done; ++i) { |
930 switch (list_mod->modification_of_pic_nums_idc) { | 520 switch (list_mod->modification_of_pic_nums_idc) { |
931 case 0: | 521 case 0: |
932 case 1: | 522 case 1: |
933 // Modify short reference picture position. | 523 // Modify short reference picture position. |
934 if (list_mod->modification_of_pic_nums_idc == 0) { | 524 if (list_mod->modification_of_pic_nums_idc == 0) { |
935 // Subtract given value from predicted PicNum. | 525 // Subtract given value from predicted PicNum. |
936 pic_num_lx_no_wrap = pic_num_lx_pred - | 526 pic_num_lx_no_wrap = pic_num_lx_pred - |
937 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); | 527 (static_cast<int>(list_mod->abs_diff_pic_num_minus1) + 1); |
938 // Wrap around max_pic_num_ if it becomes < 0 as result | 528 // Wrap around max_pic_num_ if it becomes < 0 as result |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 DVLOG(1) << "Malformed stream, no pic num " | 574 DVLOG(1) << "Malformed stream, no pic num " |
985 << list_mod->long_term_pic_num; | 575 << list_mod->long_term_pic_num; |
986 return false; | 576 return false; |
987 } | 577 } |
988 ShiftRightAndInsert(ref_pic_listx, ref_idx_lx, | 578 ShiftRightAndInsert(ref_pic_listx, ref_idx_lx, |
989 num_ref_idx_lX_active_minus1, pic); | 579 num_ref_idx_lX_active_minus1, pic); |
990 ref_idx_lx++; | 580 ref_idx_lx++; |
991 | 581 |
992 for (int src = ref_idx_lx, dst = ref_idx_lx; | 582 for (int src = ref_idx_lx, dst = ref_idx_lx; |
993 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { | 583 src <= num_ref_idx_lX_active_minus1 + 1; ++src) { |
994 if (LongTermPicNumF((*ref_pic_listx)[src]) | 584 if (LongTermPicNumF((*ref_pic_listx)[src]) != |
995 != static_cast<int>(list_mod->long_term_pic_num)) | 585 static_cast<int>(list_mod->long_term_pic_num)) |
996 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; | 586 (*ref_pic_listx)[dst++] = (*ref_pic_listx)[src]; |
997 } | 587 } |
998 break; | 588 break; |
999 | 589 |
1000 case 3: | 590 case 3: |
1001 // End of modification list. | 591 // End of modification list. |
1002 done = true; | 592 done = true; |
1003 break; | 593 break; |
1004 | 594 |
1005 default: | 595 default: |
1006 // May be recoverable. | 596 // May be recoverable. |
1007 DVLOG(1) << "Invalid modification_of_pic_nums_idc=" | 597 DVLOG(1) << "Invalid modification_of_pic_nums_idc=" |
1008 << list_mod->modification_of_pic_nums_idc | 598 << list_mod->modification_of_pic_nums_idc |
1009 << " in position " << i; | 599 << " in position " << i; |
1010 break; | 600 break; |
1011 } | 601 } |
1012 | 602 |
1013 ++list_mod; | 603 ++list_mod; |
1014 } | 604 } |
1015 | 605 |
1016 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is | 606 // Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx size in the above loop is |
1017 // temporarily made one element longer than the required final list. | 607 // temporarily made one element longer than the required final list. |
1018 // Resize the list back to its required size. | 608 // Resize the list back to its required size. |
1019 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); | 609 ref_pic_listx->resize(num_ref_idx_lX_active_minus1 + 1); |
1020 | 610 |
1021 return true; | 611 return true; |
1022 } | 612 } |
1023 | 613 |
1024 bool VaapiH264Decoder::OutputPic(H264Picture* pic) { | 614 void H264Decoder::OutputPic(scoped_refptr<H264Picture> pic) { |
1025 DCHECK(!pic->outputted); | 615 DCHECK(!pic->outputted); |
1026 pic->outputted = true; | 616 pic->outputted = true; |
1027 last_output_poc_ = pic->pic_order_cnt; | 617 last_output_poc_ = pic->pic_order_cnt; |
1028 | 618 |
1029 DecodeSurface* dec_surface = DecodeSurfaceByPoC(pic->pic_order_cnt); | 619 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt; |
1030 if (!dec_surface) | 620 accelerator_->OutputPicture(pic); |
1031 return false; | 621 } |
1032 | 622 |
1033 DCHECK_GE(dec_surface->input_id(), 0); | 623 void H264Decoder::ClearDPB() { |
1034 DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt | 624 // Clear DPB contents, marking the pictures as unused first. |
1035 << " input_id: " << dec_surface->input_id(); | 625 dpb_.Clear(); |
1036 output_pic_cb_.Run(dec_surface->input_id(), dec_surface->va_surface()); | 626 last_output_poc_ = std::numeric_limits<int>::min(); |
| 627 } |
| 628 |
| 629 bool H264Decoder::OutputAllRemainingPics() { |
| 630 // Output all pictures that are waiting to be outputted. |
| 631 FinishPrevFrameIfPresent(); |
| 632 H264Picture::Vector to_output; |
| 633 dpb_.GetNotOutputtedPicsAppending(&to_output); |
| 634 // Sort them by ascending POC to output in order. |
| 635 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); |
| 636 |
| 637 for (auto& pic : to_output) |
| 638 OutputPic(pic); |
1037 | 639 |
1038 return true; | 640 return true; |
1039 } | 641 } |
1040 | 642 |
1041 void VaapiH264Decoder::ClearDPB() { | 643 bool H264Decoder::Flush() { |
1042 // Clear DPB contents, marking the pictures as unused first. | |
1043 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) | |
1044 UnassignSurfaceFromPoC((*it)->pic_order_cnt); | |
1045 | |
1046 dpb_.Clear(); | |
1047 last_output_poc_ = std::numeric_limits<int>::min(); | |
1048 } | |
1049 | |
1050 bool VaapiH264Decoder::OutputAllRemainingPics() { | |
1051 // Output all pictures that are waiting to be outputted. | |
1052 FinishPrevFrameIfPresent(); | |
1053 H264Picture::PtrVector to_output; | |
1054 dpb_.GetNotOutputtedPicsAppending(to_output); | |
1055 // Sort them by ascending POC to output in order. | |
1056 std::sort(to_output.begin(), to_output.end(), POCAscCompare()); | |
1057 | |
1058 H264Picture::PtrVector::iterator it; | |
1059 for (it = to_output.begin(); it != to_output.end(); ++it) { | |
1060 if (!OutputPic(*it)) { | |
1061 DVLOG(1) << "Failed to output pic POC: " << (*it)->pic_order_cnt; | |
1062 return false; | |
1063 } | |
1064 } | |
1065 | |
1066 return true; | |
1067 } | |
1068 | |
1069 bool VaapiH264Decoder::Flush() { | |
1070 DVLOG(2) << "Decoder flush"; | 644 DVLOG(2) << "Decoder flush"; |
1071 | 645 |
1072 if (!OutputAllRemainingPics()) | 646 if (!OutputAllRemainingPics()) |
1073 return false; | 647 return false; |
1074 | 648 |
1075 ClearDPB(); | 649 ClearDPB(); |
1076 | 650 DVLOG(2) << "Decoder flush finished"; |
1077 DCHECK(decode_surfaces_in_use_.empty()); | |
1078 return true; | 651 return true; |
1079 } | 652 } |
1080 | 653 |
1081 bool VaapiH264Decoder::StartNewFrame(media::H264SliceHeader* slice_hdr) { | 654 bool H264Decoder::StartNewFrame(media::H264SliceHeader* slice_hdr) { |
1082 // TODO posciak: add handling of max_num_ref_frames per spec. | 655 // TODO posciak: add handling of max_num_ref_frames per spec. |
1083 | |
1084 // If the new frame is an IDR, output what's left to output and clear DPB | |
1085 if (slice_hdr->idr_pic_flag) { | |
1086 // (unless we are explicitly instructed not to do so). | |
1087 if (!slice_hdr->no_output_of_prior_pics_flag) { | |
1088 // Output DPB contents. | |
1089 if (!Flush()) | |
1090 return false; | |
1091 } | |
1092 dpb_.Clear(); | |
1093 last_output_poc_ = std::numeric_limits<int>::min(); | |
1094 } | |
1095 | |
1096 // curr_pic_ should have either been added to DPB or discarded when finishing | |
1097 // the last frame. DPB is responsible for releasing that memory once it's | |
1098 // not needed anymore. | |
1099 DCHECK(!curr_pic_.get()); | |
1100 curr_pic_.reset(new H264Picture); | |
1101 CHECK(curr_pic_.get()); | 656 CHECK(curr_pic_.get()); |
1102 | 657 |
1103 if (!InitCurrPicture(slice_hdr)) | 658 if (!InitCurrPicture(slice_hdr)) |
1104 return false; | 659 return false; |
1105 | 660 |
1106 DCHECK_GT(max_frame_num_, 0); | 661 DCHECK_GT(max_frame_num_, 0); |
1107 | 662 |
1108 UpdatePicNums(); | 663 UpdatePicNums(); |
| 664 DCHECK(slice_hdr); |
| 665 PrepareRefPicLists(slice_hdr); |
1109 | 666 |
1110 // Send parameter buffers before each new picture, before the first slice. | 667 const media::H264PPS* pps = parser_.GetPPS(curr_pps_id_); |
1111 if (!SendPPS()) | 668 DCHECK(pps); |
1112 return false; | 669 const media::H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id); |
| 670 DCHECK(sps); |
1113 | 671 |
1114 if (!SendIQMatrix()) | 672 if (!accelerator_->SubmitFrameMetadata(sps, pps, dpb_, ref_pic_list_p0_, |
1115 return false; | 673 ref_pic_list_b0_, ref_pic_list_b1_, |
1116 | 674 curr_pic_.get())) |
1117 if (!QueueSlice(slice_hdr)) | |
1118 return false; | 675 return false; |
1119 | 676 |
1120 return true; | 677 return true; |
1121 } | 678 } |
1122 | 679 |
1123 bool VaapiH264Decoder::HandleMemoryManagementOps() { | 680 bool H264Decoder::HandleMemoryManagementOps() { |
1124 // 8.2.5.4 | 681 // 8.2.5.4 |
1125 for (unsigned int i = 0; i < arraysize(curr_pic_->ref_pic_marking); ++i) { | 682 for (unsigned int i = 0; i < arraysize(curr_pic_->ref_pic_marking); ++i) { |
1126 // Code below does not support interlaced stream (per-field pictures). | 683 // Code below does not support interlaced stream (per-field pictures). |
1127 media::H264DecRefPicMarking* ref_pic_marking = | 684 media::H264DecRefPicMarking* ref_pic_marking = |
1128 &curr_pic_->ref_pic_marking[i]; | 685 &curr_pic_->ref_pic_marking[i]; |
1129 H264Picture* to_mark; | 686 scoped_refptr<H264Picture> to_mark; |
1130 int pic_num_x; | 687 int pic_num_x; |
1131 | 688 |
1132 switch (ref_pic_marking->memory_mgmnt_control_operation) { | 689 switch (ref_pic_marking->memory_mgmnt_control_operation) { |
1133 case 0: | 690 case 0: |
1134 // Normal end of operations' specification. | 691 // Normal end of operations' specification. |
1135 return true; | 692 return true; |
1136 | 693 |
1137 case 1: | 694 case 1: |
1138 // Mark a short term reference picture as unused so it can be removed | 695 // Mark a short term reference picture as unused so it can be removed |
1139 // if outputted. | 696 // if outputted. |
1140 pic_num_x = curr_pic_->pic_num - | 697 pic_num_x = curr_pic_->pic_num - |
1141 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | 698 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); |
1142 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | 699 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); |
1143 if (to_mark) { | 700 if (to_mark) { |
1144 to_mark->ref = false; | 701 to_mark->ref = false; |
1145 } else { | 702 } else { |
1146 DVLOG(1) << "Invalid short ref pic num to unmark"; | 703 DVLOG(1) << "Invalid short ref pic num to unmark"; |
1147 return false; | 704 return false; |
1148 } | 705 } |
1149 break; | 706 break; |
1150 | 707 |
1151 case 2: | 708 case 2: |
1152 // Mark a long term reference picture as unused so it can be removed | 709 // Mark a long term reference picture as unused so it can be removed |
1153 // if outputted. | 710 // if outputted. |
1154 to_mark = dpb_.GetLongRefPicByLongTermPicNum( | 711 to_mark = dpb_.GetLongRefPicByLongTermPicNum( |
1155 ref_pic_marking->long_term_pic_num); | 712 ref_pic_marking->long_term_pic_num); |
1156 if (to_mark) { | 713 if (to_mark) { |
1157 to_mark->ref = false; | 714 to_mark->ref = false; |
1158 } else { | 715 } else { |
1159 DVLOG(1) << "Invalid long term ref pic num to unmark"; | 716 DVLOG(1) << "Invalid long term ref pic num to unmark"; |
1160 return false; | 717 return false; |
1161 } | 718 } |
1162 break; | 719 break; |
1163 | 720 |
1164 case 3: | 721 case 3: |
1165 // Mark a short term reference picture as long term reference. | 722 // Mark a short term reference picture as long term reference. |
1166 pic_num_x = curr_pic_->pic_num - | 723 pic_num_x = curr_pic_->pic_num - |
1167 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); | 724 (ref_pic_marking->difference_of_pic_nums_minus1 + 1); |
1168 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); | 725 to_mark = dpb_.GetShortRefPicByPicNum(pic_num_x); |
1169 if (to_mark) { | 726 if (to_mark) { |
1170 DCHECK(to_mark->ref && !to_mark->long_term); | 727 DCHECK(to_mark->ref && !to_mark->long_term); |
1171 to_mark->long_term = true; | 728 to_mark->long_term = true; |
1172 to_mark->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | 729 to_mark->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; |
1173 } else { | 730 } else { |
1174 DVLOG(1) << "Invalid short term ref pic num to mark as long ref"; | 731 DVLOG(1) << "Invalid short term ref pic num to mark as long ref"; |
1175 return false; | 732 return false; |
1176 } | 733 } |
1177 break; | 734 break; |
1178 | 735 |
1179 case 4: { | 736 case 4: { |
1180 // Unmark all reference pictures with long_term_frame_idx over new max. | 737 // Unmark all reference pictures with long_term_frame_idx over new max. |
1181 max_long_term_frame_idx_ | 738 max_long_term_frame_idx_ = |
1182 = ref_pic_marking->max_long_term_frame_idx_plus1 - 1; | 739 ref_pic_marking->max_long_term_frame_idx_plus1 - 1; |
1183 H264Picture::PtrVector long_terms; | 740 H264Picture::Vector long_terms; |
1184 dpb_.GetLongTermRefPicsAppending(long_terms); | 741 dpb_.GetLongTermRefPicsAppending(&long_terms); |
1185 for (size_t i = 0; i < long_terms.size(); ++i) { | 742 for (size_t i = 0; i < long_terms.size(); ++i) { |
1186 H264Picture* pic = long_terms[i]; | 743 scoped_refptr<H264Picture>& pic = long_terms[i]; |
1187 DCHECK(pic->ref && pic->long_term); | 744 DCHECK(pic->ref && pic->long_term); |
1188 // Ok to cast, max_long_term_frame_idx is much smaller than 16bit. | 745 // Ok to cast, max_long_term_frame_idx is much smaller than 16bit. |
1189 if (pic->long_term_frame_idx > | 746 if (pic->long_term_frame_idx > |
1190 static_cast<int>(max_long_term_frame_idx_)) | 747 static_cast<int>(max_long_term_frame_idx_)) |
1191 pic->ref = false; | 748 pic->ref = false; |
1192 } | 749 } |
1193 break; | 750 break; |
1194 } | 751 } |
1195 | 752 |
1196 case 5: | 753 case 5: |
1197 // Unmark all reference pictures. | 754 // Unmark all reference pictures. |
1198 dpb_.MarkAllUnusedForRef(); | 755 dpb_.MarkAllUnusedForRef(); |
1199 max_long_term_frame_idx_ = -1; | 756 max_long_term_frame_idx_ = -1; |
1200 curr_pic_->mem_mgmt_5 = true; | 757 curr_pic_->mem_mgmt_5 = true; |
1201 break; | 758 break; |
1202 | 759 |
1203 case 6: { | 760 case 6: { |
1204 // Replace long term reference pictures with current picture. | 761 // Replace long term reference pictures with current picture. |
1205 // First unmark if any existing with this long_term_frame_idx... | 762 // First unmark if any existing with this long_term_frame_idx... |
1206 H264Picture::PtrVector long_terms; | 763 H264Picture::Vector long_terms; |
1207 dpb_.GetLongTermRefPicsAppending(long_terms); | 764 dpb_.GetLongTermRefPicsAppending(&long_terms); |
1208 for (size_t i = 0; i < long_terms.size(); ++i) { | 765 for (size_t i = 0; i < long_terms.size(); ++i) { |
1209 H264Picture* pic = long_terms[i]; | 766 scoped_refptr<H264Picture>& pic = long_terms[i]; |
1210 DCHECK(pic->ref && pic->long_term); | 767 DCHECK(pic->ref && pic->long_term); |
1211 // Ok to cast, long_term_frame_idx is much smaller than 16bit. | 768 // Ok to cast, long_term_frame_idx is much smaller than 16bit. |
1212 if (pic->long_term_frame_idx == | 769 if (pic->long_term_frame_idx == |
1213 static_cast<int>(ref_pic_marking->long_term_frame_idx)) | 770 static_cast<int>(ref_pic_marking->long_term_frame_idx)) |
1214 pic->ref = false; | 771 pic->ref = false; |
1215 } | 772 } |
1216 | 773 |
1217 // and mark the current one instead. | 774 // and mark the current one instead. |
1218 curr_pic_->ref = true; | 775 curr_pic_->ref = true; |
1219 curr_pic_->long_term = true; | 776 curr_pic_->long_term = true; |
1220 curr_pic_->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; | 777 curr_pic_->long_term_frame_idx = ref_pic_marking->long_term_frame_idx; |
1221 break; | 778 break; |
1222 } | 779 } |
1223 | 780 |
1224 default: | 781 default: |
1225 // Would indicate a bug in parser. | 782 // Would indicate a bug in parser. |
1226 NOTREACHED(); | 783 NOTREACHED(); |
1227 } | 784 } |
1228 } | 785 } |
1229 | 786 |
1230 return true; | 787 return true; |
1231 } | 788 } |
1232 | 789 |
1233 // This method ensures that DPB does not overflow, either by removing | 790 // This method ensures that DPB does not overflow, either by removing |
1234 // reference pictures as specified in the stream, or using a sliding window | 791 // reference pictures as specified in the stream, or using a sliding window |
1235 // procedure to remove the oldest one. | 792 // procedure to remove the oldest one. |
1236 // It also performs marking and unmarking pictures as reference. | 793 // It also performs marking and unmarking pictures as reference. |
1237 // See spac 8.2.5.1. | 794 // See spac 8.2.5.1. |
1238 void VaapiH264Decoder::ReferencePictureMarking() { | 795 void H264Decoder::ReferencePictureMarking() { |
1239 if (curr_pic_->idr) { | 796 if (curr_pic_->idr) { |
1240 // If current picture is an IDR, all reference pictures are unmarked. | 797 // If current picture is an IDR, all reference pictures are unmarked. |
1241 dpb_.MarkAllUnusedForRef(); | 798 dpb_.MarkAllUnusedForRef(); |
1242 | 799 |
1243 if (curr_pic_->long_term_reference_flag) { | 800 if (curr_pic_->long_term_reference_flag) { |
1244 curr_pic_->long_term = true; | 801 curr_pic_->long_term = true; |
1245 curr_pic_->long_term_frame_idx = 0; | 802 curr_pic_->long_term_frame_idx = 0; |
1246 max_long_term_frame_idx_ = 0; | 803 max_long_term_frame_idx_ = 0; |
1247 } else { | 804 } else { |
1248 curr_pic_->long_term = false; | 805 curr_pic_->long_term = false; |
1249 max_long_term_frame_idx_ = -1; | 806 max_long_term_frame_idx_ = -1; |
1250 } | 807 } |
1251 } else { | 808 } else { |
1252 if (!curr_pic_->adaptive_ref_pic_marking_mode_flag) { | 809 if (!curr_pic_->adaptive_ref_pic_marking_mode_flag) { |
1253 // If non-IDR, and the stream does not indicate what we should do to | 810 // If non-IDR, and the stream does not indicate what we should do to |
1254 // ensure DPB doesn't overflow, discard oldest picture. | 811 // ensure DPB doesn't overflow, discard oldest picture. |
1255 // See spec 8.2.5.3. | 812 // See spec 8.2.5.3. |
1256 if (curr_pic_->field == H264Picture::FIELD_NONE) { | 813 if (curr_pic_->field == H264Picture::FIELD_NONE) { |
1257 DCHECK_LE(dpb_.CountRefPics(), | 814 DCHECK_LE( |
1258 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | 815 dpb_.CountRefPics(), |
1259 1)); | 816 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, 1)); |
1260 if (dpb_.CountRefPics() == | 817 if (dpb_.CountRefPics() == |
1261 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, | 818 std::max<int>(parser_.GetSPS(curr_sps_id_)->max_num_ref_frames, |
1262 1)) { | 819 1)) { |
1263 // Max number of reference pics reached, | 820 // Max number of reference pics reached, |
1264 // need to remove one of the short term ones. | 821 // need to remove one of the short term ones. |
1265 // Find smallest frame_num_wrap short reference picture and mark | 822 // Find smallest frame_num_wrap short reference picture and mark |
1266 // it as unused. | 823 // it as unused. |
1267 H264Picture* to_unmark = dpb_.GetLowestFrameNumWrapShortRefPic(); | 824 scoped_refptr<H264Picture> to_unmark = |
| 825 dpb_.GetLowestFrameNumWrapShortRefPic(); |
1268 if (to_unmark == NULL) { | 826 if (to_unmark == NULL) { |
1269 DVLOG(1) << "Couldn't find a short ref picture to unmark"; | 827 DVLOG(1) << "Couldn't find a short ref picture to unmark"; |
1270 return; | 828 return; |
1271 } | 829 } |
1272 to_unmark->ref = false; | 830 to_unmark->ref = false; |
1273 } | 831 } |
1274 } else { | 832 } else { |
1275 // Shouldn't get here. | 833 // Shouldn't get here. |
1276 DVLOG(1) << "Interlaced video not supported."; | 834 DVLOG(1) << "Interlaced video not supported."; |
1277 report_error_to_uma_cb_.Run(INTERLACED_STREAM); | |
1278 } | 835 } |
1279 } else { | 836 } else { |
1280 // Stream has instructions how to discard pictures from DPB and how | 837 // Stream has instructions how to discard pictures from DPB and how |
1281 // to mark/unmark existing reference pictures. Do it. | 838 // to mark/unmark existing reference pictures. Do it. |
1282 // Spec 8.2.5.4. | 839 // Spec 8.2.5.4. |
1283 if (curr_pic_->field == H264Picture::FIELD_NONE) { | 840 if (curr_pic_->field == H264Picture::FIELD_NONE) { |
1284 HandleMemoryManagementOps(); | 841 HandleMemoryManagementOps(); |
1285 } else { | 842 } else { |
1286 // Shouldn't get here. | 843 // Shouldn't get here. |
1287 DVLOG(1) << "Interlaced video not supported."; | 844 DVLOG(1) << "Interlaced video not supported."; |
1288 report_error_to_uma_cb_.Run(INTERLACED_STREAM); | |
1289 } | 845 } |
1290 } | 846 } |
1291 } | 847 } |
1292 } | 848 } |
1293 | 849 |
1294 bool VaapiH264Decoder::FinishPicture() { | 850 bool H264Decoder::FinishPicture() { |
1295 DCHECK(curr_pic_.get()); | 851 DCHECK(curr_pic_.get()); |
1296 | 852 |
1297 // Finish processing previous picture. | 853 // Finish processing previous picture. |
1298 // Start by storing previous reference picture data for later use, | 854 // Start by storing previous reference picture data for later use, |
1299 // if picture being finished is a reference picture. | 855 // if picture being finished is a reference picture. |
1300 if (curr_pic_->ref) { | 856 if (curr_pic_->ref) { |
1301 ReferencePictureMarking(); | 857 ReferencePictureMarking(); |
1302 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; | 858 prev_ref_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; |
1303 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; | 859 prev_ref_top_field_order_cnt_ = curr_pic_->top_field_order_cnt; |
1304 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; | 860 prev_ref_pic_order_cnt_msb_ = curr_pic_->pic_order_cnt_msb; |
1305 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; | 861 prev_ref_pic_order_cnt_lsb_ = curr_pic_->pic_order_cnt_lsb; |
1306 prev_ref_field_ = curr_pic_->field; | 862 prev_ref_field_ = curr_pic_->field; |
1307 } | 863 } |
1308 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; | 864 prev_has_memmgmnt5_ = curr_pic_->mem_mgmt_5; |
1309 prev_frame_num_offset_ = curr_pic_->frame_num_offset; | 865 prev_frame_num_offset_ = curr_pic_->frame_num_offset; |
1310 | 866 |
1311 // Remove unused (for reference or later output) pictures from DPB, marking | 867 // Remove unused (for reference or later output) pictures from DPB, marking |
1312 // them as such. | 868 // them as such. |
1313 for (H264DPB::Pictures::iterator it = dpb_.begin(); it != dpb_.end(); ++it) { | |
1314 if ((*it)->outputted && !(*it)->ref) | |
1315 UnassignSurfaceFromPoC((*it)->pic_order_cnt); | |
1316 } | |
1317 dpb_.DeleteUnused(); | 869 dpb_.DeleteUnused(); |
1318 | 870 |
1319 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size(); | 871 DVLOG(4) << "Finishing picture, entries in DPB: " << dpb_.size(); |
1320 | 872 |
1321 // Whatever happens below, curr_pic_ will stop managing the pointer to the | 873 // Whatever happens below, curr_pic_ will stop managing the pointer to the |
1322 // picture after this function returns. The ownership will either be | 874 // picture after this. The ownership will either be transferred to DPB, if |
1323 // transferred to DPB, if the image is still needed (for output and/or | 875 // the image is still needed (for output and/or reference), or the memory |
1324 // reference), or the memory will be released if we manage to output it here | 876 // will be released if we manage to output it here without having to store |
1325 // without having to store it for future reference. | 877 // it for future reference. |
1326 scoped_ptr<H264Picture> pic(curr_pic_.release()); | 878 scoped_refptr<H264Picture> pic = curr_pic_; |
| 879 curr_pic_ = nullptr; |
1327 | 880 |
1328 // Get all pictures that haven't been outputted yet. | 881 // Get all pictures that haven't been outputted yet. |
1329 H264Picture::PtrVector not_outputted; | 882 H264Picture::Vector not_outputted; |
1330 // TODO(posciak): pass as pointer, not reference (violates coding style). | 883 dpb_.GetNotOutputtedPicsAppending(¬_outputted); |
1331 dpb_.GetNotOutputtedPicsAppending(not_outputted); | |
1332 // Include the one we've just decoded. | 884 // Include the one we've just decoded. |
1333 not_outputted.push_back(pic.get()); | 885 not_outputted.push_back(pic); |
1334 | 886 |
1335 // Sort in output order. | 887 // Sort in output order. |
1336 std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare()); | 888 std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare()); |
1337 | 889 |
1338 // Try to output as many pictures as we can. A picture can be output, | 890 // Try to output as many pictures as we can. A picture can be output, |
1339 // if the number of decoded and not yet outputted pictures that would remain | 891 // if the number of decoded and not yet outputted pictures that would remain |
1340 // in DPB afterwards would at least be equal to max_num_reorder_frames. | 892 // in DPB afterwards would at least be equal to max_num_reorder_frames. |
1341 // If the outputted picture is not a reference picture, it doesn't have | 893 // If the outputted picture is not a reference picture, it doesn't have |
1342 // to remain in the DPB and can be removed. | 894 // to remain in the DPB and can be removed. |
1343 H264Picture::PtrVector::iterator output_candidate = not_outputted.begin(); | 895 H264Picture::Vector::iterator output_candidate = not_outputted.begin(); |
1344 size_t num_remaining = not_outputted.size(); | 896 size_t num_remaining = not_outputted.size(); |
1345 while (num_remaining > max_num_reorder_frames_) { | 897 while (num_remaining > max_num_reorder_frames_) { |
1346 int poc = (*output_candidate)->pic_order_cnt; | 898 int poc = (*output_candidate)->pic_order_cnt; |
1347 DCHECK_GE(poc, last_output_poc_); | 899 DCHECK_GE(poc, last_output_poc_); |
1348 if (!OutputPic(*output_candidate)) | 900 OutputPic(*output_candidate); |
1349 return false; | |
1350 | 901 |
1351 if (!(*output_candidate)->ref) { | 902 if (!(*output_candidate)->ref) { |
1352 // Current picture hasn't been inserted into DPB yet, so don't remove it | 903 // Current picture hasn't been inserted into DPB yet, so don't remove it |
1353 // if we managed to output it immediately. | 904 // if we managed to output it immediately. |
1354 if (*output_candidate != pic) | 905 if ((*output_candidate)->pic_order_cnt != pic->pic_order_cnt) |
1355 dpb_.DeleteByPOC(poc); | 906 dpb_.DeleteByPOC(poc); |
1356 // Mark as unused. | |
1357 UnassignSurfaceFromPoC(poc); | |
1358 } | 907 } |
1359 | 908 |
1360 ++output_candidate; | 909 ++output_candidate; |
1361 --num_remaining; | 910 --num_remaining; |
1362 } | 911 } |
1363 | 912 |
1364 // If we haven't managed to output the picture that we just decoded, or if | 913 // If we haven't managed to output the picture that we just decoded, or if |
1365 // it's a reference picture, we have to store it in DPB. | 914 // it's a reference picture, we have to store it in DPB. |
1366 if (!pic->outputted || pic->ref) { | 915 if (!pic->outputted || pic->ref) { |
1367 if (dpb_.IsFull()) { | 916 if (dpb_.IsFull()) { |
1368 // If we haven't managed to output anything to free up space in DPB | 917 // If we haven't managed to output anything to free up space in DPB |
1369 // to store this picture, it's an error in the stream. | 918 // to store this picture, it's an error in the stream. |
1370 DVLOG(1) << "Could not free up space in DPB!"; | 919 DVLOG(1) << "Could not free up space in DPB!"; |
1371 return false; | 920 return false; |
1372 } | 921 } |
1373 | 922 |
1374 dpb_.StorePic(pic.release()); | 923 dpb_.StorePic(pic); |
1375 } | 924 } |
1376 | 925 |
1377 return true; | 926 return true; |
1378 } | 927 } |
1379 | 928 |
1380 static int LevelToMaxDpbMbs(int level) { | 929 static int LevelToMaxDpbMbs(int level) { |
1381 // See table A-1 in spec. | 930 // See table A-1 in spec. |
1382 switch (level) { | 931 switch (level) { |
1383 case 10: return 396; | 932 case 10: return 396; |
1384 case 11: return 900; | 933 case 11: return 900; |
(...skipping 10 matching lines...) Expand all Loading... |
1395 case 42: return 34816; | 944 case 42: return 34816; |
1396 case 50: return 110400; | 945 case 50: return 110400; |
1397 case 51: // fallthrough | 946 case 51: // fallthrough |
1398 case 52: return 184320; | 947 case 52: return 184320; |
1399 default: | 948 default: |
1400 DVLOG(1) << "Invalid codec level (" << level << ")"; | 949 DVLOG(1) << "Invalid codec level (" << level << ")"; |
1401 return 0; | 950 return 0; |
1402 } | 951 } |
1403 } | 952 } |
1404 | 953 |
1405 bool VaapiH264Decoder::UpdateMaxNumReorderFrames(const media::H264SPS* sps) { | 954 bool H264Decoder::UpdateMaxNumReorderFrames(const media::H264SPS* sps) { |
1406 if (sps->vui_parameters_present_flag && sps->bitstream_restriction_flag) { | 955 if (sps->vui_parameters_present_flag && sps->bitstream_restriction_flag) { |
1407 max_num_reorder_frames_ = | 956 max_num_reorder_frames_ = |
1408 base::checked_cast<size_t>(sps->max_num_reorder_frames); | 957 base::checked_cast<size_t>(sps->max_num_reorder_frames); |
1409 if (max_num_reorder_frames_ > dpb_.max_num_pics()) { | 958 if (max_num_reorder_frames_ > dpb_.max_num_pics()) { |
1410 DVLOG(1) | 959 DVLOG(1) |
1411 << "max_num_reorder_frames present, but larger than MaxDpbFrames (" | 960 << "max_num_reorder_frames present, but larger than MaxDpbFrames (" |
1412 << max_num_reorder_frames_ << " > " << dpb_.max_num_pics() << ")"; | 961 << max_num_reorder_frames_ << " > " << dpb_.max_num_pics() << ")"; |
1413 max_num_reorder_frames_ = 0; | 962 max_num_reorder_frames_ = 0; |
1414 return false; | 963 return false; |
1415 } | 964 } |
(...skipping 16 matching lines...) Expand all Loading... |
1432 max_num_reorder_frames_ = dpb_.max_num_pics(); | 981 max_num_reorder_frames_ = dpb_.max_num_pics(); |
1433 break; | 982 break; |
1434 } | 983 } |
1435 } else { | 984 } else { |
1436 max_num_reorder_frames_ = dpb_.max_num_pics(); | 985 max_num_reorder_frames_ = dpb_.max_num_pics(); |
1437 } | 986 } |
1438 | 987 |
1439 return true; | 988 return true; |
1440 } | 989 } |
1441 | 990 |
1442 bool VaapiH264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) { | 991 bool H264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) { |
1443 const media::H264SPS* sps = parser_.GetSPS(sps_id); | 992 const media::H264SPS* sps = parser_.GetSPS(sps_id); |
1444 DCHECK(sps); | 993 DCHECK(sps); |
1445 DVLOG(4) << "Processing SPS"; | 994 DVLOG(4) << "Processing SPS"; |
1446 | 995 |
1447 *need_new_buffers = false; | 996 *need_new_buffers = false; |
1448 | 997 |
1449 if (sps->frame_mbs_only_flag == 0) { | 998 if (sps->frame_mbs_only_flag == 0) { |
1450 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; | 999 DVLOG(1) << "frame_mbs_only_flag != 1 not supported"; |
1451 report_error_to_uma_cb_.Run(FRAME_MBS_ONLY_FLAG_NOT_ONE); | |
1452 return false; | 1000 return false; |
1453 } | 1001 } |
1454 | 1002 |
1455 if (sps->gaps_in_frame_num_value_allowed_flag) { | 1003 if (sps->gaps_in_frame_num_value_allowed_flag) { |
1456 DVLOG(1) << "Gaps in frame numbers not supported"; | 1004 DVLOG(1) << "Gaps in frame numbers not supported"; |
1457 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM); | |
1458 return false; | 1005 return false; |
1459 } | 1006 } |
1460 | 1007 |
1461 curr_sps_id_ = sps->seq_parameter_set_id; | 1008 curr_sps_id_ = sps->seq_parameter_set_id; |
1462 | 1009 |
1463 // Calculate picture height/width in macroblocks and pixels | 1010 // Calculate picture height/width in macroblocks and pixels |
1464 // (spec 7.4.2.1.1, 7.4.3). | 1011 // (spec 7.4.2.1.1, 7.4.3). |
1465 int width_mb = sps->pic_width_in_mbs_minus1 + 1; | 1012 int width_mb = sps->pic_width_in_mbs_minus1 + 1; |
1466 int height_mb = (2 - sps->frame_mbs_only_flag) * | 1013 int height_mb = (2 - sps->frame_mbs_only_flag) * |
1467 (sps->pic_height_in_map_units_minus1 + 1); | 1014 (sps->pic_height_in_map_units_minus1 + 1); |
1468 | 1015 |
1469 gfx::Size new_pic_size(16 * width_mb, 16 * height_mb); | 1016 gfx::Size new_pic_size(16 * width_mb, 16 * height_mb); |
1470 if (new_pic_size.IsEmpty()) { | 1017 if (new_pic_size.IsEmpty()) { |
1471 DVLOG(1) << "Invalid picture size: " << new_pic_size.ToString(); | 1018 DVLOG(1) << "Invalid picture size: " << new_pic_size.ToString(); |
1472 return false; | 1019 return false; |
1473 } | 1020 } |
1474 | 1021 |
1475 if (!pic_size_.IsEmpty() && new_pic_size == pic_size_) { | 1022 if (!pic_size_.IsEmpty() && new_pic_size == pic_size_) { |
1476 // Already have surfaces and this SPS keeps the same resolution, | 1023 // Already have surfaces and this SPS keeps the same resolution, |
1477 // no need to request a new set. | 1024 // no need to request a new set. |
(...skipping 22 matching lines...) Expand all Loading... |
1500 dpb_.set_max_num_pics(max_dpb_size); | 1047 dpb_.set_max_num_pics(max_dpb_size); |
1501 | 1048 |
1502 if (!UpdateMaxNumReorderFrames(sps)) | 1049 if (!UpdateMaxNumReorderFrames(sps)) |
1503 return false; | 1050 return false; |
1504 DVLOG(1) << "max_num_reorder_frames: " << max_num_reorder_frames_; | 1051 DVLOG(1) << "max_num_reorder_frames: " << max_num_reorder_frames_; |
1505 | 1052 |
1506 *need_new_buffers = true; | 1053 *need_new_buffers = true; |
1507 return true; | 1054 return true; |
1508 } | 1055 } |
1509 | 1056 |
1510 bool VaapiH264Decoder::ProcessPPS(int pps_id) { | 1057 bool H264Decoder::ProcessPPS(int pps_id) { |
1511 const media::H264PPS* pps = parser_.GetPPS(pps_id); | 1058 const media::H264PPS* pps = parser_.GetPPS(pps_id); |
1512 DCHECK(pps); | 1059 DCHECK(pps); |
1513 | 1060 |
1514 curr_pps_id_ = pps->pic_parameter_set_id; | 1061 curr_pps_id_ = pps->pic_parameter_set_id; |
1515 | 1062 |
1516 return true; | 1063 return true; |
1517 } | 1064 } |
1518 | 1065 |
1519 bool VaapiH264Decoder::FinishPrevFrameIfPresent() { | 1066 bool H264Decoder::FinishPrevFrameIfPresent() { |
1520 // If we already have a frame waiting to be decoded, decode it and finish. | 1067 // If we already have a frame waiting to be decoded, decode it and finish. |
1521 if (curr_pic_ != NULL) { | 1068 if (curr_pic_ != NULL) { |
1522 if (!DecodePicture()) | 1069 if (!DecodePicture()) |
1523 return false; | 1070 return false; |
1524 return FinishPicture(); | 1071 return FinishPicture(); |
1525 } | 1072 } |
1526 | 1073 |
1527 return true; | 1074 return true; |
1528 } | 1075 } |
1529 | 1076 |
1530 bool VaapiH264Decoder::ProcessSlice(media::H264SliceHeader* slice_hdr) { | 1077 bool H264Decoder::PreprocessSlice(media::H264SliceHeader* slice_hdr) { |
1531 prev_frame_num_ = frame_num_; | 1078 prev_frame_num_ = frame_num_; |
1532 frame_num_ = slice_hdr->frame_num; | 1079 frame_num_ = slice_hdr->frame_num; |
1533 | 1080 |
1534 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { | 1081 if (prev_frame_num_ > 0 && prev_frame_num_ < frame_num_ - 1) { |
1535 DVLOG(1) << "Gap in frame_num!"; | 1082 DVLOG(1) << "Gap in frame_num!"; |
1536 report_error_to_uma_cb_.Run(GAPS_IN_FRAME_NUM); | |
1537 return false; | 1083 return false; |
1538 } | 1084 } |
1539 | 1085 |
1540 if (slice_hdr->field_pic_flag == 0) | 1086 if (slice_hdr->field_pic_flag == 0) |
1541 max_pic_num_ = max_frame_num_; | 1087 max_pic_num_ = max_frame_num_; |
1542 else | 1088 else |
1543 max_pic_num_ = 2 * max_frame_num_; | 1089 max_pic_num_ = 2 * max_frame_num_; |
1544 | 1090 |
1545 // TODO posciak: switch to new picture detection per 7.4.1.2.4. | 1091 // TODO posciak: switch to new picture detection per 7.4.1.2.4. |
1546 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { | 1092 if (curr_pic_ != NULL && slice_hdr->first_mb_in_slice != 0) { |
1547 // This is just some more slice data of the current picture, so | 1093 // More slice data of the current picture. |
1548 // just queue it and return. | |
1549 QueueSlice(slice_hdr); | |
1550 return true; | 1094 return true; |
1551 } else { | 1095 } else { |
1552 // A new frame, so first finish the previous one before processing it... | 1096 // A new frame, so first finish the previous one before processing it... |
1553 if (!FinishPrevFrameIfPresent()) | 1097 if (!FinishPrevFrameIfPresent()) |
1554 return false; | 1098 return false; |
| 1099 } |
1555 | 1100 |
1556 // and then start a new one. | 1101 // If the new frame is an IDR, output what's left to output and clear DPB |
1557 return StartNewFrame(slice_hdr); | 1102 if (slice_hdr->idr_pic_flag) { |
| 1103 // (unless we are explicitly instructed not to do so). |
| 1104 if (!slice_hdr->no_output_of_prior_pics_flag) { |
| 1105 // Output DPB contents. |
| 1106 if (!Flush()) |
| 1107 return false; |
| 1108 } |
| 1109 dpb_.Clear(); |
| 1110 last_output_poc_ = std::numeric_limits<int>::min(); |
1558 } | 1111 } |
| 1112 |
| 1113 return true; |
1559 } | 1114 } |
1560 | 1115 |
1561 #define SET_ERROR_AND_RETURN() \ | 1116 bool H264Decoder::ProcessSlice(media::H264SliceHeader* slice_hdr) { |
1562 do { \ | 1117 DCHECK(curr_pic_.get()); |
1563 DVLOG(1) << "Error during decode"; \ | 1118 H264Picture::Vector ref_pic_list0, ref_pic_list1; |
1564 state_ = kError; \ | 1119 |
1565 return VaapiH264Decoder::kDecodeError; \ | 1120 if (!ModifyReferencePicLists(slice_hdr, &ref_pic_list0, &ref_pic_list1)) |
| 1121 return false; |
| 1122 |
| 1123 const media::H264PPS* pps = parser_.GetPPS(slice_hdr->pic_parameter_set_id); |
| 1124 DCHECK(pps); |
| 1125 |
| 1126 if (!accelerator_->SubmitSlice(pps, slice_hdr, ref_pic_list0, ref_pic_list1, |
| 1127 curr_pic_.get(), slice_hdr->nalu_data, |
| 1128 slice_hdr->nalu_size)) |
| 1129 return false; |
| 1130 |
| 1131 curr_slice_hdr_.reset(); |
| 1132 return true; |
| 1133 } |
| 1134 |
| 1135 #define SET_ERROR_AND_RETURN() \ |
| 1136 do { \ |
| 1137 DVLOG(1) << "Error during decode"; \ |
| 1138 state_ = kError; \ |
| 1139 return H264Decoder::kDecodeError; \ |
1566 } while (0) | 1140 } while (0) |
1567 | 1141 |
1568 void VaapiH264Decoder::SetStream(const uint8* ptr, | 1142 void H264Decoder::SetStream(const uint8_t* ptr, size_t size) { |
1569 size_t size, | |
1570 int32 input_id) { | |
1571 DCHECK(ptr); | 1143 DCHECK(ptr); |
1572 DCHECK(size); | 1144 DCHECK(size); |
1573 | 1145 |
1574 // Got new input stream data from the client. | 1146 DVLOG(4) << "New input stream at: " << (void*)ptr << " size: " << size; |
1575 DVLOG(4) << "New input stream id: " << input_id << " at: " << (void*) ptr | |
1576 << " size: " << size; | |
1577 parser_.SetStream(ptr, size); | 1147 parser_.SetStream(ptr, size); |
1578 curr_input_id_ = input_id; | |
1579 } | 1148 } |
1580 | 1149 |
1581 VaapiH264Decoder::DecResult VaapiH264Decoder::Decode() { | 1150 H264Decoder::DecodeResult H264Decoder::Decode() { |
1582 media::H264Parser::Result par_res; | |
1583 media::H264NALU nalu; | |
1584 DCHECK_NE(state_, kError); | 1151 DCHECK_NE(state_, kError); |
1585 | 1152 |
1586 while (1) { | 1153 while (1) { |
1587 // If we've already decoded some of the stream (after reset, i.e. we are | 1154 media::H264Parser::Result par_res; |
1588 // not in kNeedStreamMetadata state), we may be able to go back into | 1155 |
1589 // decoding state not only starting at/resuming from an SPS, but also from | 1156 if (!curr_nalu_) { |
1590 // other resume points, such as IDRs. In the latter case we need an output | 1157 curr_nalu_.reset(new media::H264NALU()); |
1591 // surface, because we will end up decoding that IDR in the process. | 1158 par_res = parser_.AdvanceToNextNALU(curr_nalu_.get()); |
1592 // Otherwise we just look for an SPS and don't produce any output frames. | 1159 if (par_res == media::H264Parser::kEOStream) |
1593 if (state_ != kNeedStreamMetadata && available_va_surfaces_.empty()) { | 1160 return kRanOutOfStreamData; |
1594 DVLOG(4) << "No output surfaces available"; | 1161 else if (par_res != media::H264Parser::kOk) |
1595 return kRanOutOfSurfaces; | 1162 SET_ERROR_AND_RETURN(); |
1596 } | 1163 } |
1597 | 1164 |
1598 par_res = parser_.AdvanceToNextNALU(&nalu); | 1165 DVLOG(4) << "NALU found: " << static_cast<int>(curr_nalu_->nal_unit_type); |
1599 if (par_res == media::H264Parser::kEOStream) | |
1600 return kRanOutOfStreamData; | |
1601 else if (par_res != media::H264Parser::kOk) | |
1602 SET_ERROR_AND_RETURN(); | |
1603 | 1166 |
1604 DVLOG(4) << "NALU found: " << static_cast<int>(nalu.nal_unit_type); | 1167 switch (curr_nalu_->nal_unit_type) { |
1605 | |
1606 switch (nalu.nal_unit_type) { | |
1607 case media::H264NALU::kNonIDRSlice: | 1168 case media::H264NALU::kNonIDRSlice: |
1608 // We can't resume from a non-IDR slice. | 1169 // We can't resume from a non-IDR slice. |
1609 if (state_ != kDecoding) | 1170 if (state_ != kDecoding) |
1610 break; | 1171 break; |
1611 // else fallthrough | 1172 // else fallthrough |
1612 case media::H264NALU::kIDRSlice: { | 1173 case media::H264NALU::kIDRSlice: { |
1613 // TODO(posciak): the IDR may require an SPS that we don't have | 1174 // TODO(posciak): the IDR may require an SPS that we don't have |
1614 // available. For now we'd fail if that happens, but ideally we'd like | 1175 // available. For now we'd fail if that happens, but ideally we'd like |
1615 // to keep going until the next SPS in the stream. | 1176 // to keep going until the next SPS in the stream. |
1616 if (state_ == kNeedStreamMetadata) { | 1177 if (state_ == kNeedStreamMetadata) { |
1617 // We need an SPS, skip this IDR and keep looking. | 1178 // We need an SPS, skip this IDR and keep looking. |
1618 break; | 1179 break; |
1619 } | 1180 } |
1620 | 1181 |
1621 // If after reset, we should be able to recover from an IDR. | 1182 // If after reset, we should be able to recover from an IDR. |
1622 media::H264SliceHeader slice_hdr; | 1183 if (!curr_slice_hdr_) { |
| 1184 curr_slice_hdr_.reset(new media::H264SliceHeader()); |
| 1185 par_res = |
| 1186 parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get()); |
| 1187 if (par_res != media::H264Parser::kOk) |
| 1188 SET_ERROR_AND_RETURN(); |
1623 | 1189 |
1624 par_res = parser_.ParseSliceHeader(nalu, &slice_hdr); | 1190 if (!PreprocessSlice(curr_slice_hdr_.get())) |
1625 if (par_res != media::H264Parser::kOk) | 1191 SET_ERROR_AND_RETURN(); |
1626 SET_ERROR_AND_RETURN(); | 1192 } |
1627 | 1193 |
1628 if (!ProcessSlice(&slice_hdr)) | 1194 if (!curr_pic_) { |
| 1195 // New picture/finished previous one, try to start a new one |
| 1196 // or tell the client we need more surfaces. |
| 1197 curr_pic_ = accelerator_->CreateH264Picture(); |
| 1198 if (!curr_pic_) |
| 1199 return kRanOutOfSurfaces; |
| 1200 |
| 1201 if (!StartNewFrame(curr_slice_hdr_.get())) |
| 1202 SET_ERROR_AND_RETURN(); |
| 1203 } |
| 1204 |
| 1205 if (!ProcessSlice(curr_slice_hdr_.get())) |
1629 SET_ERROR_AND_RETURN(); | 1206 SET_ERROR_AND_RETURN(); |
1630 | 1207 |
1631 state_ = kDecoding; | 1208 state_ = kDecoding; |
1632 break; | 1209 break; |
1633 } | 1210 } |
1634 | 1211 |
1635 case media::H264NALU::kSPS: { | 1212 case media::H264NALU::kSPS: { |
1636 int sps_id; | 1213 int sps_id; |
1637 | 1214 |
1638 if (!FinishPrevFrameIfPresent()) | 1215 if (!FinishPrevFrameIfPresent()) |
1639 SET_ERROR_AND_RETURN(); | 1216 SET_ERROR_AND_RETURN(); |
1640 | 1217 |
1641 par_res = parser_.ParseSPS(&sps_id); | 1218 par_res = parser_.ParseSPS(&sps_id); |
1642 if (par_res != media::H264Parser::kOk) | 1219 if (par_res != media::H264Parser::kOk) |
1643 SET_ERROR_AND_RETURN(); | 1220 SET_ERROR_AND_RETURN(); |
1644 | 1221 |
1645 bool need_new_buffers = false; | 1222 bool need_new_buffers = false; |
1646 if (!ProcessSPS(sps_id, &need_new_buffers)) | 1223 if (!ProcessSPS(sps_id, &need_new_buffers)) |
1647 SET_ERROR_AND_RETURN(); | 1224 SET_ERROR_AND_RETURN(); |
1648 | 1225 |
1649 state_ = kDecoding; | 1226 state_ = kDecoding; |
1650 | 1227 |
1651 if (need_new_buffers) { | 1228 if (need_new_buffers) { |
1652 if (!Flush()) | 1229 if (!Flush()) |
1653 return kDecodeError; | 1230 return kDecodeError; |
1654 | 1231 |
1655 available_va_surfaces_.clear(); | 1232 curr_pic_ = nullptr; |
| 1233 curr_nalu_ = nullptr; |
| 1234 ref_pic_list_p0_.clear(); |
| 1235 ref_pic_list_b0_.clear(); |
| 1236 ref_pic_list_b1_.clear(); |
| 1237 |
1656 return kAllocateNewSurfaces; | 1238 return kAllocateNewSurfaces; |
1657 } | 1239 } |
1658 break; | 1240 break; |
1659 } | 1241 } |
1660 | 1242 |
1661 case media::H264NALU::kPPS: { | 1243 case media::H264NALU::kPPS: { |
1662 if (state_ != kDecoding) | 1244 if (state_ != kDecoding) |
1663 break; | 1245 break; |
1664 | 1246 |
1665 int pps_id; | 1247 int pps_id; |
1666 | 1248 |
1667 if (!FinishPrevFrameIfPresent()) | 1249 if (!FinishPrevFrameIfPresent()) |
1668 SET_ERROR_AND_RETURN(); | 1250 SET_ERROR_AND_RETURN(); |
1669 | 1251 |
1670 par_res = parser_.ParsePPS(&pps_id); | 1252 par_res = parser_.ParsePPS(&pps_id); |
1671 if (par_res != media::H264Parser::kOk) | 1253 if (par_res != media::H264Parser::kOk) |
1672 SET_ERROR_AND_RETURN(); | 1254 SET_ERROR_AND_RETURN(); |
1673 | 1255 |
1674 if (!ProcessPPS(pps_id)) | 1256 if (!ProcessPPS(pps_id)) |
1675 SET_ERROR_AND_RETURN(); | 1257 SET_ERROR_AND_RETURN(); |
1676 break; | 1258 break; |
1677 } | 1259 } |
1678 | 1260 |
1679 case media::H264NALU::kAUD: | |
1680 case media::H264NALU::kEOSeq: | |
1681 case media::H264NALU::kEOStream: | |
1682 if (state_ != kDecoding) | |
1683 break; | |
1684 if (!FinishPrevFrameIfPresent()) | |
1685 SET_ERROR_AND_RETURN(); | |
1686 | |
1687 break; | |
1688 | |
1689 default: | 1261 default: |
1690 DVLOG(4) << "Skipping NALU type: " << nalu.nal_unit_type; | 1262 DVLOG(4) << "Skipping NALU type: " << curr_nalu_->nal_unit_type; |
1691 break; | 1263 break; |
1692 } | 1264 } |
| 1265 |
| 1266 DVLOG(4) << "Dropping nalu"; |
| 1267 curr_nalu_.reset(); |
1693 } | 1268 } |
1694 } | 1269 } |
1695 | 1270 |
1696 size_t VaapiH264Decoder::GetRequiredNumOfPictures() { | 1271 size_t H264Decoder::GetRequiredNumOfPictures() const { |
1697 return dpb_.max_num_pics() + kPicsInPipeline; | 1272 return dpb_.max_num_pics() + kPicsInPipeline; |
1698 } | 1273 } |
1699 | 1274 |
1700 } // namespace content | 1275 } // namespace content |
OLD | NEW |