OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "base/bind.h" | |
6 #include "base/callback.h" | |
7 #include "base/message_loop/message_loop.h" | |
wuchengli
2014/06/18 03:34:13
message_loop_proxy.h
Pawel Osciak
2014/06/18 07:05:08
Done.
| |
8 #include "base/metrics/histogram.h" | |
9 #include "base/numerics/safe_conversions.h" | |
10 #include "cc/base/util.h" | |
11 #include "content/common/gpu/media/h264_dpb.h" | |
12 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | |
wuchengli
2014/06/18 03:34:12
This should be the first include. http://google-st
Pawel Osciak
2014/06/18 07:05:07
There was a discussion about this and iirc the con
| |
13 #include "media/base/bind_to_current_loop.h" | |
14 #include "third_party/libva/va/va_enc_h264.h" | |
15 | |
16 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | |
17 | |
18 #define NOTIFY_ERROR(error, msg) \ | |
19 do { \ | |
20 SetState(kError); \ | |
21 DVLOGF(1) << msg; \ | |
22 DVLOGF(1) << "Calling NotifyError(" << error << ")"; \ | |
23 NotifyError(error); \ | |
24 } while (0) | |
25 | |
26 namespace content { | |
27 | |
28 static void ReportToUMA( | |
29 VaapiVideoEncodeAccelerator::VAVEAEncoderFailure failure) { | |
30 UMA_HISTOGRAM_ENUMERATION( | |
31 "Media.VAVEA.EncoderFailure", | |
32 failure, | |
33 VaapiVideoEncodeAccelerator::VAVEA_ENCODER_FAILURES_MAX); | |
34 } | |
35 | |
36 struct VaapiVideoEncodeAccelerator::InputFrameRef { | |
37 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, | |
38 bool force_keyframe) | |
39 : frame(frame), force_keyframe(force_keyframe) {} | |
40 const scoped_refptr<media::VideoFrame> frame; | |
41 const bool force_keyframe; | |
42 }; | |
43 | |
44 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { | |
45 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) | |
46 : id(id), shm(shm.Pass()), size(size) {} | |
47 const int32 id; | |
48 const scoped_ptr<base::SharedMemory> shm; | |
49 const size_t size; | |
50 }; | |
51 | |
52 // static | |
53 std::vector<media::VideoEncodeAccelerator::SupportedProfile> | |
54 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { | |
55 std::vector<SupportedProfile> profiles; | |
56 SupportedProfile profile; | |
57 | |
58 profile.profile = media::H264PROFILE_MAIN; | |
59 profile.max_resolution.SetSize(1920, 1088); | |
wuchengli
2014/06/18 03:34:12
define these as constants
Pawel Osciak
2014/06/18 07:05:07
I prefer to leave it here to emphasize that we nee
| |
60 profile.max_framerate.numerator = kDefaultFramerate; | |
61 profile.max_framerate.denominator = 1; | |
62 profiles.push_back(profile); | |
63 | |
64 // This is actually only constrained (see crbug.com/345569). | |
65 profile.profile = media::H264PROFILE_BASELINE; | |
66 profiles.push_back(profile); | |
67 | |
68 profile.profile = media::H264PROFILE_HIGH; | |
69 profiles.push_back(profile); | |
70 | |
71 return profiles; | |
72 } | |
73 | |
74 static unsigned int Log2OfPowerOf2(unsigned int x) { | |
75 CHECK_GT(x, 0); | |
76 DCHECK_EQ(x & (x - 1), 0); | |
77 | |
78 int log = 0; | |
79 while (x) { | |
80 x >>= 1; | |
81 ++log; | |
82 } | |
83 return log; | |
84 } | |
85 | |
86 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator(Display* x_display) | |
87 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | |
88 mb_width_(0), | |
89 mb_height_(0), | |
90 output_buffer_byte_size_(0), | |
91 x_display_(x_display), | |
92 state_(kUninitialized), | |
93 frame_num_(0), | |
94 last_idr_frame_num_(0), | |
95 bitrate_(0), | |
96 framerate_(0), | |
97 cpb_size_(0), | |
98 encoding_parameters_changed_(false), | |
99 encoder_thread_("VAVEAEncoderThread"), | |
100 child_message_loop_proxy_(base::MessageLoopProxy::current()), | |
101 weak_this_ptr_factory_(this) { | |
102 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); | |
wuchengli
2014/06/18 03:34:13
Move all these to constructor initialization list.
Pawel Osciak
2014/06/18 07:05:08
This is intentional. GetWeakPtr() cannot be called
| |
103 | |
104 max_ref_idx_l0_size_ = kMaxNumReferenceFrames; | |
wuchengli
2014/06/18 03:34:13
max_ref_idx_l0_size_ doesn't change. Just use kMax
Pawel Osciak
2014/06/18 07:05:08
That's not the same. Right now it's the same, but
| |
105 qp_ = kDefaultQP; | |
106 idr_period_ = kIDRPeriod; | |
107 i_period_ = kIPeriod; | |
108 ip_period_ = kIPPeriod; | |
109 } | |
110 | |
111 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { | |
112 DVLOGF(4); | |
113 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
114 DCHECK(!encoder_thread_.IsRunning()); | |
115 } | |
116 | |
117 bool VaapiVideoEncodeAccelerator::Initialize( | |
118 media::VideoFrame::Format format, | |
119 const gfx::Size& input_visible_size, | |
120 media::VideoCodecProfile output_profile, | |
121 uint32 initial_bitrate, | |
122 Client* client) { | |
123 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
124 DCHECK(!encoder_thread_.IsRunning()); | |
125 DCHECK_EQ(state_, kUninitialized); | |
126 | |
127 DVLOG(3) << "Initializing VAVEA, input_format: " | |
128 << media::VideoFrame::FormatToString(format) | |
129 << ", input_visible_size: " << input_visible_size.ToString() | |
130 << ", output_profile: " << output_profile | |
131 << ", initial_bitrate: " << initial_bitrate; | |
132 | |
133 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
134 client_ = client_ptr_factory_->GetWeakPtr(); | |
135 | |
136 if (output_profile < media::H264PROFILE_BASELINE || | |
137 output_profile > media::H264PROFILE_MAIN) { | |
138 DVLOGF(1) << "Unsupported output profile"; | |
wuchengli
2014/06/18 03:34:13
print |output_profile|
Pawel Osciak
2014/06/18 07:05:07
Done.
| |
139 return false; | |
140 } | |
141 | |
142 if (format != media::VideoFrame::I420) { | |
143 DVLOGF(1) << "Unsupported input format"; | |
wuchengli
2014/06/18 03:34:13
print |format|
Pawel Osciak
2014/06/18 07:05:07
Done.
| |
144 return false; | |
145 } | |
146 | |
147 profile_ = output_profile; | |
148 visible_size_ = input_visible_size; | |
149 // 4:2:0 format has to be 2-aligned. | |
150 DCHECK_EQ(visible_size_.width() % 2, 0); | |
151 DCHECK_EQ(visible_size_.height() % 2, 0); | |
152 mb_width_ = cc::RoundUp(visible_size_.width(), 16) / 16; | |
153 mb_height_ = cc::RoundUp(visible_size_.height(), 16) / 16; | |
154 coded_size_ = gfx::Size(mb_width_ * 16, mb_height_ * 16); | |
wuchengli
2014/06/18 03:34:12
Switch the order of coded_size_ and mb_width_ so w
Pawel Osciak
2014/06/18 07:05:08
This is intentional. That would involve a cast in
| |
155 output_buffer_byte_size_ = coded_size_.GetArea(); | |
156 | |
157 UpdateRates(initial_bitrate, kDefaultFramerate); | |
158 | |
159 vaapi_wrapper_ = VaapiWrapper::Create(VaapiWrapper::kEncode, | |
160 output_profile, | |
161 x_display_, | |
162 base::Bind(&ReportToUMA, VAAPI_ERROR)); | |
163 if (!vaapi_wrapper_.get()) { | |
wuchengli
2014/06/18 03:34:13
People were removing get() of scoped_ptr boolean c
Pawel Osciak
2014/06/18 07:05:07
Done.
| |
164 DVLOG(1) << "Failed initializing VAAPI"; | |
165 return false; | |
166 } | |
167 | |
168 if (!encoder_thread_.Start()) { | |
169 DVLOGF(1) << "Failed to start encoder thread"; | |
170 return false; | |
wuchengli
2014/06/18 03:34:13
Move encoder_thread creation before vaapi_wrapper_
Pawel Osciak
2014/06/18 07:05:07
This is intentional and defensive so that I don't
wuchengli
2014/06/18 15:57:43
I was thinking about vaapi_wrapper cleanup. I just
| |
171 } | |
172 encoder_thread_proxy_ = encoder_thread_.message_loop_proxy(); | |
173 | |
174 // Finish the remaining initialization on the encoder thread. | |
175 encoder_thread_proxy_->PostTask( | |
176 FROM_HERE, | |
177 base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, | |
178 base::Unretained(this))); | |
179 | |
180 return true; | |
181 } | |
182 | |
183 void VaapiVideoEncodeAccelerator::InitializeTask() { | |
184 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
185 DCHECK_EQ(state_, kUninitialized); | |
186 | |
187 va_surface_release_cb_ = media::BindToCurrentLoop( | |
188 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, weak_this_)); | |
189 | |
190 if (!vaapi_wrapper_->CreateSurfaces( | |
191 coded_size_, kNumSurfaces, &available_va_surface_ids_)) { | |
192 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); | |
193 return; | |
194 } | |
195 | |
196 if (!UpdateSPS() || !GeneratePackedSPS()) { | |
197 NOTIFY_ERROR(kPlatformFailureError, "Failed creating SPS"); | |
198 return; | |
199 } | |
200 | |
201 if (!UpdatePPS() || !GeneratePackedPPS()) { | |
202 NOTIFY_ERROR(kPlatformFailureError, "Failed creating PPS"); | |
203 return; | |
204 } | |
205 | |
206 child_message_loop_proxy_->PostTask( | |
207 FROM_HERE, | |
208 base::Bind(&Client::RequireBitstreamBuffers, | |
wuchengli
2014/06/18 03:34:12
Can this run at the end of VaapiVideoEncodeAcceler
Pawel Osciak
2014/06/18 07:05:07
No, we need to be ready to accept encode requests
wuchengli
2014/06/18 15:57:43
If we need to be ready to accept encode request af
Pawel Osciak
2014/06/19 01:31:11
Does not matter, because the encode task will be r
| |
209 client_, | |
210 kNumInputBuffers, | |
211 coded_size_, | |
212 output_buffer_byte_size_)); | |
213 | |
214 SetState(kEncoding); | |
215 } | |
216 | |
217 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( | |
218 VASurfaceID va_surface_id) { | |
219 DVLOGF(4) << "va_surface_id: " << va_surface_id; | |
220 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
221 | |
222 available_va_surface_ids_.push_back(va_surface_id); | |
223 EncodeFrameTask(); | |
224 } | |
225 | |
226 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { | |
227 memset(&curr_pic_, 0, sizeof(curr_pic_)); | |
228 | |
229 curr_pic_.frame_num = frame_num_++; | |
230 frame_num_ %= idr_period_; | |
231 | |
232 if (curr_pic_.frame_num % i_period_ == 0 || force_keyframe) | |
wuchengli
2014/06/18 07:32:51
nit: add () around curr_pic_.frame_num % i_period_
| |
233 curr_pic_.type = media::H264SliceHeader::kISlice; | |
234 else | |
235 curr_pic_.type = media::H264SliceHeader::kPSlice; | |
236 | |
237 if (curr_pic_.frame_num % idr_period_ == 0) { | |
238 curr_pic_.idr = true; | |
239 last_idr_frame_num_ = curr_pic_.frame_num; | |
240 ref_pic_list0_.clear(); | |
241 } | |
242 | |
243 if (curr_pic_.type != media::H264SliceHeader::kBSlice) | |
244 curr_pic_.ref = true; | |
245 | |
246 curr_pic_.pic_order_cnt = curr_pic_.frame_num * 2; | |
247 curr_pic_.top_field_order_cnt = curr_pic_.pic_order_cnt; | |
248 curr_pic_.pic_order_cnt_lsb = curr_pic_.pic_order_cnt; | |
249 | |
250 curr_encode_job_->keyframe = | |
251 (curr_pic_.type == media::H264SliceHeader::kISlice); | |
252 | |
253 DVLOG(4) << "Starting a new frame, type: " << curr_pic_.type | |
254 << (force_keyframe ? " (forced keyframe)" : "") | |
255 << " frame_num: " << curr_pic_.frame_num | |
256 << " POC: " << curr_pic_.pic_order_cnt; | |
257 } | |
258 | |
259 void VaapiVideoEncodeAccelerator::EndFrame() { | |
260 // Store the picture on the list of reference pictures and keep the list | |
261 // below maximum size, dropping oldest references. | |
262 if (curr_pic_.ref) | |
263 ref_pic_list0_.push_front(curr_encode_job_->recon_surface); | |
wuchengli
2014/06/18 07:32:51
reconstructed_surface is easier to understand
Pawel Osciak
2014/06/19 01:31:11
The comment in *.h should be enough. I don't think
| |
264 size_t max_num_ref_frames = | |
265 base::checked_cast<size_t>(curr_sps_.max_num_ref_frames); | |
266 while (ref_pic_list0_.size() > max_num_ref_frames) | |
267 ref_pic_list0_.pop_back(); | |
268 | |
269 submitted_encode_jobs_.push(make_linked_ptr(curr_encode_job_.release())); | |
270 } | |
271 | |
272 static void InitVAPicture(VAPictureH264* va_pic) { | |
273 memset(va_pic, 0, sizeof(*va_pic)); | |
274 va_pic->picture_id = VA_INVALID_ID; | |
275 va_pic->flags = VA_PICTURE_H264_INVALID; | |
276 } | |
277 | |
278 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { | |
279 VAEncSequenceParameterBufferH264 seq_param; | |
280 memset(&seq_param, 0, sizeof(seq_param)); | |
281 | |
282 #define SPS_TO_SP(a) seq_param.a = curr_sps_.a; | |
283 SPS_TO_SP(seq_parameter_set_id); | |
284 SPS_TO_SP(level_idc); | |
285 | |
286 seq_param.intra_period = i_period_; | |
287 seq_param.intra_idr_period = idr_period_; | |
288 seq_param.ip_period = ip_period_; | |
289 seq_param.bits_per_second = bitrate_; | |
290 | |
291 SPS_TO_SP(max_num_ref_frames); | |
292 seq_param.picture_width_in_mbs = mb_width_; | |
293 seq_param.picture_height_in_mbs = mb_height_; | |
294 | |
295 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = curr_sps_.a; | |
296 SPS_TO_SP_FS(chroma_format_idc); | |
297 SPS_TO_SP_FS(frame_mbs_only_flag); | |
298 SPS_TO_SP_FS(log2_max_frame_num_minus4); | |
299 SPS_TO_SP_FS(pic_order_cnt_type); | |
300 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); | |
301 #undef SPS_TO_SP_FS | |
302 | |
303 SPS_TO_SP(bit_depth_luma_minus8); | |
304 SPS_TO_SP(bit_depth_chroma_minus8); | |
305 | |
306 SPS_TO_SP(frame_cropping_flag); | |
307 if (curr_sps_.frame_cropping_flag) { | |
308 SPS_TO_SP(frame_crop_left_offset); | |
309 SPS_TO_SP(frame_crop_right_offset); | |
310 SPS_TO_SP(frame_crop_top_offset); | |
311 SPS_TO_SP(frame_crop_bottom_offset); | |
312 } | |
313 | |
314 SPS_TO_SP(vui_parameters_present_flag); | |
315 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = curr_sps_.a; | |
316 SPS_TO_SP_VF(timing_info_present_flag); | |
317 #undef SPS_TO_SP_VF | |
318 SPS_TO_SP(num_units_in_tick); | |
319 SPS_TO_SP(time_scale); | |
320 #undef SPS_TO_SP | |
321 | |
322 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, | |
323 sizeof(seq_param), | |
324 &seq_param)) | |
325 return false; | |
326 | |
327 VAEncPictureParameterBufferH264 pic_param; | |
328 memset(&pic_param, 0, sizeof(pic_param)); | |
329 | |
330 pic_param.CurrPic.picture_id = curr_encode_job_->recon_surface->id(); | |
331 pic_param.CurrPic.TopFieldOrderCnt = curr_pic_.top_field_order_cnt; | |
332 pic_param.CurrPic.BottomFieldOrderCnt = curr_pic_.bottom_field_order_cnt; | |
333 pic_param.CurrPic.flags = 0; | |
334 | |
335 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) | |
336 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
337 | |
338 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); | |
339 RefPicList::const_iterator iter = ref_pic_list0_.begin(); | |
340 for (size_t i = 0; | |
341 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); | |
342 ++iter, ++i) { | |
343 pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); | |
344 pic_param.ReferenceFrames[i].flags = 0; | |
345 } | |
346 | |
347 pic_param.coded_buf = curr_encode_job_->coded_buffer; | |
348 pic_param.pic_parameter_set_id = curr_pps_.pic_parameter_set_id; | |
349 pic_param.seq_parameter_set_id = curr_pps_.seq_parameter_set_id; | |
350 pic_param.frame_num = curr_pic_.frame_num; | |
351 pic_param.pic_init_qp = qp_; | |
352 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; | |
353 pic_param.pic_fields.bits.idr_pic_flag = curr_pic_.idr; | |
354 pic_param.pic_fields.bits.reference_pic_flag = curr_pic_.ref; | |
355 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = curr_pps_.a; | |
356 PPS_TO_PP_PF(entropy_coding_mode_flag); | |
357 PPS_TO_PP_PF(transform_8x8_mode_flag); | |
358 PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
359 #undef PPS_TO_PP_PF | |
360 | |
361 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, | |
362 sizeof(pic_param), | |
363 &pic_param)) | |
364 return false; | |
365 | |
366 VAEncSliceParameterBufferH264 slice_param; | |
367 memset(&slice_param, 0, sizeof(slice_param)); | |
368 | |
369 slice_param.num_macroblocks = mb_width_ * mb_height_; | |
370 slice_param.macroblock_info = VA_INVALID_ID; | |
371 slice_param.slice_type = curr_pic_.type; | |
372 slice_param.pic_parameter_set_id = curr_pps_.pic_parameter_set_id; | |
373 slice_param.idr_pic_id = last_idr_frame_num_; | |
374 slice_param.pic_order_cnt_lsb = curr_pic_.pic_order_cnt_lsb; | |
375 slice_param.num_ref_idx_active_override_flag = true; | |
376 | |
377 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) | |
378 InitVAPicture(&slice_param.RefPicList0[i]); | |
379 | |
380 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) | |
381 InitVAPicture(&slice_param.RefPicList1[i]); | |
382 | |
383 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); | |
384 iter = ref_pic_list0_.begin(); | |
385 for (size_t i = 0; | |
386 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); | |
387 ++iter, ++i) { | |
388 InitVAPicture(&slice_param.RefPicList0[i]); | |
389 slice_param.RefPicList0[i].picture_id = (*iter)->id(); | |
390 slice_param.RefPicList0[i].flags = 0; | |
391 } | |
392 | |
393 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, | |
394 sizeof(slice_param), | |
395 &slice_param)) | |
396 return false; | |
397 | |
398 VAEncMiscParameterRateControl rate_control_param; | |
399 memset(&rate_control_param, 0, sizeof(rate_control_param)); | |
400 rate_control_param.bits_per_second = bitrate_; | |
401 rate_control_param.target_percentage = 90; | |
402 rate_control_param.window_size = kCPBWindowSizeMs; | |
403 rate_control_param.initial_qp = qp_; | |
404 rate_control_param.rc_flags.bits.disable_frame_skip = true; | |
405 | |
406 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
407 VAEncMiscParameterTypeRateControl, | |
408 sizeof(rate_control_param), | |
409 &rate_control_param)) | |
410 return false; | |
411 | |
412 VAEncMiscParameterFrameRate framerate_param; | |
413 memset(&framerate_param, 0, sizeof(framerate_param)); | |
414 framerate_param.framerate = framerate_; | |
415 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
416 VAEncMiscParameterTypeFrameRate, | |
417 sizeof(framerate_param), | |
418 &framerate_param)) | |
419 return false; | |
420 | |
421 VAEncMiscParameterHRD hrd_param; | |
422 memset(&hrd_param, 0, sizeof(hrd_param)); | |
423 hrd_param.buffer_size = cpb_size_; | |
424 hrd_param.initial_buffer_fullness = cpb_size_ / 2; | |
425 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, | |
426 sizeof(hrd_param), | |
427 &hrd_param)) | |
428 return false; | |
429 | |
430 return true; | |
431 } | |
432 | |
433 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { | |
434 if (curr_pic_.type != media::H264SliceHeader::kISlice) | |
435 return true; | |
436 | |
437 // Submit PPS. | |
438 VAEncPackedHeaderParameterBuffer par_buffer; | |
439 memset(&par_buffer, 0, sizeof(par_buffer)); | |
440 par_buffer.type = VAEncPackedHeaderSequence; | |
441 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; | |
442 | |
443 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
444 sizeof(par_buffer), | |
445 &par_buffer)) | |
446 return false; | |
447 | |
448 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
449 packed_sps_.BytesInBuffer(), | |
450 packed_sps_.data())) | |
451 return false; | |
452 | |
453 // Submit PPS. | |
454 memset(&par_buffer, 0, sizeof(par_buffer)); | |
455 par_buffer.type = VAEncPackedHeaderPicture; | |
456 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; | |
457 | |
458 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
459 sizeof(par_buffer), | |
460 &par_buffer)) | |
461 return false; | |
462 | |
463 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
464 packed_pps_.BytesInBuffer(), | |
465 packed_pps_.data())) | |
466 return false; | |
467 | |
468 return true; | |
469 } | |
470 | |
471 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { | |
472 DVLOGF(3) << "Encoding frame_num: " << curr_pic_.frame_num; | |
473 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | |
474 curr_encode_job_->input_surface->id()); | |
475 } | |
476 | |
477 bool VaapiVideoEncodeAccelerator::UploadFrame( | |
478 const scoped_refptr<media::VideoFrame>& frame) { | |
479 return vaapi_wrapper_->UploadVideoFrameToSurface( | |
480 frame, curr_encode_job_->input_surface->id()); | |
481 } | |
482 | |
483 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { | |
484 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
485 | |
486 if (state_ != kEncoding) | |
487 return; | |
488 | |
489 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) | |
490 return; | |
491 | |
492 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); | |
493 available_bitstream_buffers_.pop(); | |
494 | |
495 uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory()); | |
496 | |
497 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); | |
498 submitted_encode_jobs_.pop(); | |
499 | |
500 size_t data_size = 0; | |
501 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( | |
502 encode_job->coded_buffer, | |
503 encode_job->input_surface->id(), | |
504 target_data, | |
505 buffer->size, | |
506 &data_size)) { | |
507 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); | |
508 return; | |
509 } | |
510 | |
511 DVLOG(3) << "Returning bitstream buffer " | |
512 << (encode_job->keyframe ? "(keyframe)" : "") | |
513 << " id: " << buffer->id << " size: " << data_size; | |
514 | |
515 child_message_loop_proxy_->PostTask(FROM_HERE, | |
516 base::Bind(&Client::BitstreamBufferReady, | |
517 client_, | |
518 buffer->id, | |
519 data_size, | |
520 encode_job->keyframe)); | |
521 } | |
522 | |
523 void VaapiVideoEncodeAccelerator::Encode( | |
524 const scoped_refptr<media::VideoFrame>& frame, | |
525 bool force_keyframe) { | |
526 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() | |
527 << " force_keyframe: " << force_keyframe; | |
528 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
529 | |
530 encoder_thread_proxy_->PostTask( | |
531 FROM_HERE, | |
532 base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, | |
533 base::Unretained(this), | |
534 frame, | |
535 force_keyframe)); | |
536 } | |
537 | |
538 bool VaapiVideoEncodeAccelerator::PrepareNextJob() { | |
wuchengli
2014/06/18 03:34:13
Add DCHECK(encoder_thread_proxy_->BelongsToCurrent
Pawel Osciak
2014/06/18 07:05:07
The idea is not to do these on methods that are ca
| |
539 if (available_va_surface_ids_.size() < kMinSurfacesToEncode) | |
540 return false; | |
541 | |
542 DCHECK(!curr_encode_job_.get()); | |
543 curr_encode_job_.reset(new EncodeJob()); | |
544 | |
545 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, | |
546 &curr_encode_job_->coded_buffer)) { | |
547 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); | |
548 return false; | |
549 } | |
550 | |
551 curr_encode_job_->input_surface = | |
552 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); | |
553 available_va_surface_ids_.pop_back(); | |
554 | |
555 curr_encode_job_->recon_surface = | |
556 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); | |
557 available_va_surface_ids_.pop_back(); | |
558 | |
559 // Reference surfaces are needed until the job is done, but they get | |
560 // removed from ref_pic_list0_ when it's full at the end of job submission. | |
561 // Keep refs to them along with the job and only release after sync. | |
562 RefPicList::const_iterator iter = ref_pic_list0_.begin(); | |
563 for (; iter != ref_pic_list0_.end(); ++iter) | |
564 curr_encode_job_->reference_surfaces.push_back(*iter); | |
565 | |
566 return true; | |
567 } | |
568 | |
569 void VaapiVideoEncodeAccelerator::EncodeTask( | |
570 const scoped_refptr<media::VideoFrame>& frame, | |
571 bool force_keyframe) { | |
572 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
573 DCHECK_NE(state_, kUninitialized); | |
574 | |
575 encoder_input_queue_.push( | |
576 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); | |
577 EncodeFrameTask(); | |
578 } | |
579 | |
580 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { | |
581 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
582 | |
583 if (state_ != kEncoding || encoder_input_queue_.empty()) | |
584 return; | |
585 | |
586 if (!PrepareNextJob()) { | |
587 DVLOGF(4) << "Not ready for next frame yet"; | |
588 return; | |
589 } | |
590 | |
591 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); | |
592 encoder_input_queue_.pop(); | |
593 | |
594 if (!UploadFrame(frame_ref->frame)) { | |
595 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); | |
596 return; | |
597 } | |
598 | |
599 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); | |
600 encoding_parameters_changed_ = false; | |
601 | |
602 if (!SubmitFrameParameters()) { | |
603 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); | |
604 return; | |
605 } | |
606 | |
607 if (!SubmitHeadersIfNeeded()) { | |
608 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); | |
609 return; | |
610 } | |
611 | |
612 if (!ExecuteEncode()) { | |
613 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); | |
614 return; | |
615 } | |
616 | |
617 EndFrame(); | |
618 TryToReturnBitstreamBuffer(); | |
619 } | |
620 | |
621 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
622 const media::BitstreamBuffer& buffer) { | |
623 DVLOGF(4) << "id: " << buffer.id(); | |
624 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
625 | |
626 if (buffer.size() < output_buffer_byte_size_) { | |
627 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); | |
628 return; | |
629 } | |
630 | |
631 scoped_ptr<base::SharedMemory> shm( | |
632 new base::SharedMemory(buffer.handle(), false)); | |
633 if (!shm->Map(buffer.size())) { | |
634 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); | |
635 return; | |
636 } | |
637 | |
638 scoped_ptr<BitstreamBufferRef> buffer_ref( | |
639 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); | |
640 | |
641 encoder_thread_proxy_->PostTask( | |
642 FROM_HERE, | |
643 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
644 base::Unretained(this), | |
645 base::Passed(&buffer_ref))); | |
646 } | |
647 | |
648 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
649 scoped_ptr<BitstreamBufferRef> buffer_ref) { | |
650 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
651 DCHECK_NE(state_, kUninitialized); | |
652 | |
653 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); | |
654 TryToReturnBitstreamBuffer(); | |
655 } | |
656 | |
657 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( | |
658 uint32 bitrate, | |
659 uint32 framerate) { | |
660 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
661 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
662 | |
663 encoder_thread_proxy_->PostTask( | |
664 FROM_HERE, | |
665 base::Bind( | |
666 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, | |
667 base::Unretained(this), | |
668 bitrate, | |
669 framerate)); | |
670 } | |
671 | |
672 void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate, | |
673 uint32 framerate) { | |
674 if (encoder_thread_.IsRunning()) | |
675 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
676 DCHECK_NE(bitrate, 0); | |
677 DCHECK_NE(framerate, 0); | |
678 bitrate_ = base::checked_cast<unsigned int>(bitrate); | |
679 framerate_ = base::checked_cast<unsigned int>(framerate); | |
680 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; | |
681 } | |
682 | |
683 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
684 uint32 bitrate, | |
685 uint32 framerate) { | |
686 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
687 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
688 DCHECK_NE(state_, kUninitialized); | |
689 | |
690 UpdateRates(bitrate, framerate); | |
691 | |
692 if (!UpdateSPS() || !GeneratePackedSPS()) { | |
693 NOTIFY_ERROR(kPlatformFailureError, "Failed creating SPS"); | |
694 return; | |
695 } | |
696 | |
697 // Submit new parameters along with next frame that will be processed. | |
698 encoding_parameters_changed_ = true; | |
699 } | |
700 | |
701 void VaapiVideoEncodeAccelerator::Destroy() { | |
702 DVLOGF(1); | |
703 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
704 | |
705 // Can't call client anymore after Destroy() returns. | |
706 client_ptr_factory_.reset(); | |
707 | |
708 // Early-exit encoder tasks if they are running and join the thread. | |
709 if (encoder_thread_.IsRunning()) { | |
710 encoder_thread_.message_loop()->PostTask( | |
711 FROM_HERE, | |
712 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, | |
713 base::Unretained(this))); | |
714 encoder_thread_.Stop(); | |
715 } | |
wuchengli
2014/06/18 03:34:12
delete |this| here according to API.
Pawel Osciak
2014/06/18 07:05:08
Done.
| |
716 } | |
717 | |
718 void VaapiVideoEncodeAccelerator::DestroyTask() { | |
719 DVLOGF(2); | |
720 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
721 SetState(kError); | |
722 } | |
723 | |
724 bool VaapiVideoEncodeAccelerator::UpdateSPS() { | |
725 memset(&curr_sps_, 0, sizeof(media::H264SPS)); | |
726 | |
727 // Spec A.2 and A.3. | |
728 switch (profile_) { | |
729 case media::H264PROFILE_BASELINE: | |
730 // Due to crbug.com/345569, we we don't distinguish between constrained | |
wuchengli
2014/06/16 15:51:52
remove extra we
Pawel Osciak
2014/06/18 07:05:08
Done.
| |
731 // and non-constrained baseline profiles. Since many codecs can't do | |
732 // non-constrained, and constrained is usually what we mean (and it's a | |
733 // subset of non-constrained), default to it. | |
734 curr_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; | |
735 curr_sps_.constraint_set0_flag = true; | |
736 break; | |
737 case media::H264PROFILE_MAIN: | |
738 curr_sps_.profile_idc = media::H264SPS::kProfileIDCMain; | |
739 curr_sps_.constraint_set1_flag = true; | |
740 break; | |
741 case media::H264PROFILE_HIGH: | |
742 curr_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; | |
743 break; | |
744 default: | |
745 NOTIMPLEMENTED(); | |
746 return false; | |
747 } | |
748 | |
749 curr_sps_.level_idc = kDefaultLevelIDC; | |
750 curr_sps_.seq_parameter_set_id = 0; | |
751 curr_sps_.chroma_format_idc = kChromaFormatIDC; | |
752 | |
753 DCHECK_GE(idr_period_, 1 << 4); | |
754 curr_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; | |
755 curr_sps_.pic_order_cnt_type = 0; | |
756 curr_sps_.log2_max_pic_order_cnt_lsb_minus4 = | |
757 Log2OfPowerOf2(idr_period_ * 2) - 4; | |
758 curr_sps_.max_num_ref_frames = max_ref_idx_l0_size_; | |
759 | |
760 curr_sps_.frame_mbs_only_flag = true; | |
761 | |
762 DCHECK_GT(mb_width_, 0); | |
763 DCHECK_GT(mb_height_, 0); | |
764 curr_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; | |
765 DCHECK(curr_sps_.frame_mbs_only_flag); | |
766 curr_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; | |
767 | |
768 if (visible_size_ != coded_size_) { | |
769 // Visible size differs from coded size, fill crop information. | |
770 curr_sps_.frame_cropping_flag = true; | |
771 DCHECK(!curr_sps_.separate_colour_plane_flag); | |
772 // Spec table 6-1. Only 4:2:0 for now. | |
773 DCHECK_EQ(curr_sps_.chroma_format_idc, 1); | |
774 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. | |
775 const unsigned int crop_unit_x = 2; | |
776 const unsigned int crop_unit_y = 2 * (2 - curr_sps_.frame_mbs_only_flag); | |
777 curr_sps_.frame_crop_left_offset = 0; | |
778 curr_sps_.frame_crop_right_offset = | |
779 (coded_size_.width() - visible_size_.width()) / crop_unit_x; | |
780 curr_sps_.frame_crop_top_offset = 0; | |
781 curr_sps_.frame_crop_bottom_offset = | |
782 (coded_size_.height() - visible_size_.height()) / crop_unit_y; | |
783 } | |
784 | |
785 curr_sps_.vui_parameters_present_flag = true; | |
786 curr_sps_.timing_info_present_flag = true; | |
787 curr_sps_.num_units_in_tick = 1; | |
788 curr_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. | |
789 curr_sps_.fixed_frame_rate_flag = true; | |
790 | |
791 curr_sps_.nal_hrd_parameters_present_flag = true; | |
792 // H.264 spec ch. E.2.2. | |
793 curr_sps_.cpb_cnt_minus1 = 0; | |
794 curr_sps_.bit_rate_scale = kBitRateScale; | |
795 curr_sps_.cpb_size_scale = kCPBSizeScale; | |
796 curr_sps_.bit_rate_value_minus1[0] = | |
797 (bitrate_ >> | |
798 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; | |
799 curr_sps_.cpb_size_value_minus1[0] = | |
800 (cpb_size_ >> | |
801 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; | |
802 curr_sps_.cbr_flag[0] = true; | |
803 curr_sps_.initial_cpb_removal_delay_length_minus_1 = | |
804 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
805 curr_sps_.cpb_removal_delay_length_minus1 = | |
806 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
807 curr_sps_.dpb_output_delay_length_minus1 = | |
808 media::H264SPS::kDefaultDPBOutputDelayLength - 1; | |
809 curr_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; | |
810 curr_sps_.low_delay_hrd_flag = false; | |
811 | |
812 return true; | |
813 } | |
814 | |
815 bool VaapiVideoEncodeAccelerator::GeneratePackedSPS() { | |
wuchengli
2014/06/18 03:34:13
This always return true. Remove bool return value.
Pawel Osciak
2014/06/18 07:05:08
Done.
| |
816 packed_sps_.Reset(); | |
817 | |
818 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); | |
819 | |
820 packed_sps_.AppendBits(8, curr_sps_.profile_idc); | |
821 packed_sps_.AppendBool(curr_sps_.constraint_set0_flag); | |
822 packed_sps_.AppendBool(curr_sps_.constraint_set1_flag); | |
823 packed_sps_.AppendBool(curr_sps_.constraint_set2_flag); | |
824 packed_sps_.AppendBool(curr_sps_.constraint_set3_flag); | |
825 packed_sps_.AppendBool(curr_sps_.constraint_set4_flag); | |
826 packed_sps_.AppendBool(curr_sps_.constraint_set5_flag); | |
827 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits | |
828 packed_sps_.AppendBits(8, curr_sps_.level_idc); | |
829 packed_sps_.AppendUE(curr_sps_.seq_parameter_set_id); | |
830 | |
831 if (curr_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { | |
832 packed_sps_.AppendUE(curr_sps_.chroma_format_idc); | |
833 if (curr_sps_.chroma_format_idc == 3) | |
834 packed_sps_.AppendBool(curr_sps_.separate_colour_plane_flag); | |
835 packed_sps_.AppendUE(curr_sps_.bit_depth_luma_minus8); | |
836 packed_sps_.AppendUE(curr_sps_.bit_depth_chroma_minus8); | |
837 packed_sps_.AppendBool(curr_sps_.qpprime_y_zero_transform_bypass_flag); | |
838 packed_sps_.AppendBool(curr_sps_.seq_scaling_matrix_present_flag); | |
839 CHECK(!curr_sps_.seq_scaling_matrix_present_flag); | |
840 } | |
841 | |
842 packed_sps_.AppendUE(curr_sps_.log2_max_frame_num_minus4); | |
843 packed_sps_.AppendUE(curr_sps_.pic_order_cnt_type); | |
844 if (curr_sps_.pic_order_cnt_type == 0) | |
845 packed_sps_.AppendUE(curr_sps_.log2_max_pic_order_cnt_lsb_minus4); | |
846 else if (curr_sps_.pic_order_cnt_type == 1) { | |
847 CHECK(1); | |
848 } | |
849 | |
850 packed_sps_.AppendUE(curr_sps_.max_num_ref_frames); | |
851 packed_sps_.AppendBool(curr_sps_.gaps_in_frame_num_value_allowed_flag); | |
852 packed_sps_.AppendUE(curr_sps_.pic_width_in_mbs_minus1); | |
853 packed_sps_.AppendUE(curr_sps_.pic_height_in_map_units_minus1); | |
854 | |
855 packed_sps_.AppendBool(curr_sps_.frame_mbs_only_flag); | |
856 if (!curr_sps_.frame_mbs_only_flag) | |
857 packed_sps_.AppendBool(curr_sps_.mb_adaptive_frame_field_flag); | |
858 | |
859 packed_sps_.AppendBool(curr_sps_.direct_8x8_inference_flag); | |
860 | |
861 packed_sps_.AppendBool(curr_sps_.frame_cropping_flag); | |
862 if (curr_sps_.frame_cropping_flag) { | |
863 packed_sps_.AppendUE(curr_sps_.frame_crop_left_offset); | |
864 packed_sps_.AppendUE(curr_sps_.frame_crop_right_offset); | |
865 packed_sps_.AppendUE(curr_sps_.frame_crop_top_offset); | |
866 packed_sps_.AppendUE(curr_sps_.frame_crop_bottom_offset); | |
867 } | |
868 | |
869 packed_sps_.AppendBool(curr_sps_.vui_parameters_present_flag); | |
870 if (curr_sps_.vui_parameters_present_flag) { | |
871 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag | |
872 packed_sps_.AppendBool(false); // overscan_info_present_flag | |
873 packed_sps_.AppendBool(false); // video_signal_type_present_flag | |
874 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag | |
875 | |
876 packed_sps_.AppendBool(curr_sps_.timing_info_present_flag); | |
877 if (curr_sps_.timing_info_present_flag) { | |
878 packed_sps_.AppendBits(32, curr_sps_.num_units_in_tick); | |
879 packed_sps_.AppendBits(32, curr_sps_.time_scale); | |
880 packed_sps_.AppendBool(curr_sps_.fixed_frame_rate_flag); | |
881 } | |
882 | |
883 packed_sps_.AppendBool(curr_sps_.nal_hrd_parameters_present_flag); | |
884 if (curr_sps_.nal_hrd_parameters_present_flag) { | |
885 packed_sps_.AppendUE(curr_sps_.cpb_cnt_minus1); | |
886 packed_sps_.AppendBits(4, curr_sps_.bit_rate_scale); | |
887 packed_sps_.AppendBits(4, curr_sps_.cpb_size_scale); | |
888 CHECK_LT(base::checked_cast<size_t>(curr_sps_.cpb_cnt_minus1), | |
889 arraysize(curr_sps_.bit_rate_value_minus1)); | |
890 for (int i = 0; i <= curr_sps_.cpb_cnt_minus1; ++i) { | |
891 packed_sps_.AppendUE(curr_sps_.bit_rate_value_minus1[i]); | |
892 packed_sps_.AppendUE(curr_sps_.cpb_size_value_minus1[i]); | |
893 packed_sps_.AppendBool(curr_sps_.cbr_flag[i]); | |
894 } | |
895 packed_sps_.AppendBits( | |
896 5, curr_sps_.initial_cpb_removal_delay_length_minus_1); | |
897 packed_sps_.AppendBits(5, curr_sps_.cpb_removal_delay_length_minus1); | |
898 packed_sps_.AppendBits(5, curr_sps_.dpb_output_delay_length_minus1); | |
899 packed_sps_.AppendBits(5, curr_sps_.time_offset_length); | |
900 } | |
901 | |
902 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag | |
903 if (curr_sps_.nal_hrd_parameters_present_flag) | |
904 packed_sps_.AppendBool(curr_sps_.low_delay_hrd_flag); | |
905 | |
906 packed_sps_.AppendBool(false); // pic_struct_present_flag | |
907 packed_sps_.AppendBool(false); // bitstream_restriction_flag | |
908 } | |
909 | |
910 packed_sps_.FinishNALU(); | |
911 return true; | |
912 } | |
913 | |
914 bool VaapiVideoEncodeAccelerator::UpdatePPS() { | |
915 memset(&curr_pps_, 0, sizeof(media::H264PPS)); | |
916 | |
917 curr_pps_.seq_parameter_set_id = curr_sps_.seq_parameter_set_id; | |
918 curr_pps_.pic_parameter_set_id = 0; | |
919 | |
920 curr_pps_.entropy_coding_mode_flag = | |
921 curr_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; | |
922 | |
923 CHECK_GT(max_ref_idx_l0_size_, 0); | |
924 curr_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; | |
925 curr_pps_.num_ref_idx_l1_default_active_minus1 = 0; | |
926 DCHECK_LE(qp_, 51); | |
927 curr_pps_.pic_init_qp_minus26 = qp_ - 26; | |
928 curr_pps_.deblocking_filter_control_present_flag = true; | |
929 curr_pps_.transform_8x8_mode_flag = | |
930 (curr_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); | |
931 | |
932 return true; | |
933 } | |
934 | |
935 bool VaapiVideoEncodeAccelerator::GeneratePackedPPS() { | |
936 packed_pps_.Reset(); | |
937 | |
938 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); | |
939 | |
940 packed_pps_.AppendUE(curr_pps_.pic_parameter_set_id); | |
941 packed_pps_.AppendUE(curr_pps_.seq_parameter_set_id); | |
942 packed_pps_.AppendBool(curr_pps_.entropy_coding_mode_flag); | |
943 packed_pps_.AppendBool( | |
944 curr_pps_.bottom_field_pic_order_in_frame_present_flag); | |
945 CHECK_EQ(curr_pps_.num_slice_groups_minus1, 0); | |
946 packed_pps_.AppendUE(curr_pps_.num_slice_groups_minus1); | |
947 | |
948 packed_pps_.AppendUE(curr_pps_.num_ref_idx_l0_default_active_minus1); | |
949 packed_pps_.AppendUE(curr_pps_.num_ref_idx_l1_default_active_minus1); | |
950 | |
951 packed_pps_.AppendBool(curr_pps_.weighted_pred_flag); | |
952 packed_pps_.AppendBits(2, curr_pps_.weighted_bipred_idc); | |
953 | |
954 packed_pps_.AppendSE(curr_pps_.pic_init_qp_minus26); | |
955 packed_pps_.AppendSE(curr_pps_.pic_init_qs_minus26); | |
956 packed_pps_.AppendSE(curr_pps_.chroma_qp_index_offset); | |
957 | |
958 packed_pps_.AppendBool(curr_pps_.deblocking_filter_control_present_flag); | |
959 packed_pps_.AppendBool(curr_pps_.constrained_intra_pred_flag); | |
960 packed_pps_.AppendBool(curr_pps_.redundant_pic_cnt_present_flag); | |
961 | |
962 packed_pps_.AppendBool(curr_pps_.transform_8x8_mode_flag); | |
963 packed_pps_.AppendBool(curr_pps_.pic_scaling_matrix_present_flag); | |
964 DCHECK(!curr_pps_.pic_scaling_matrix_present_flag); | |
965 packed_pps_.AppendSE(curr_pps_.second_chroma_qp_index_offset); | |
966 | |
967 packed_pps_.FinishNALU(); | |
968 | |
969 return true; | |
970 } | |
971 | |
972 void VaapiVideoEncodeAccelerator::SetState(State state) { | |
973 // Only touch state on encoder thread, unless it's not running. | |
wuchengli
2014/06/18 03:34:13
This is alloying. Any way to get ride of this? I c
Pawel Osciak
2014/06/18 07:05:07
This is a very widely used pattern in Chrome. I do
| |
974 if (encoder_thread_.IsRunning() && | |
975 !encoder_thread_proxy_->BelongsToCurrentThread()) { | |
976 encoder_thread_proxy_->PostTask( | |
977 FROM_HERE, | |
978 base::Bind(&VaapiVideoEncodeAccelerator::SetState, | |
979 base::Unretained(this), | |
980 state)); | |
981 return; | |
982 } | |
983 | |
984 DVLOGF(1) << "setting state to: " << state; | |
985 state_ = state; | |
986 } | |
987 | |
988 void VaapiVideoEncodeAccelerator::NotifyError(Error error) { | |
989 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { | |
990 child_message_loop_proxy_->PostTask( | |
991 FROM_HERE, | |
992 base::Bind( | |
993 &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error)); | |
994 return; | |
995 } | |
996 | |
997 if (client_) { | |
998 client_->NotifyError(error); | |
999 client_ptr_factory_.reset(); | |
1000 } | |
1001 } | |
1002 | |
1003 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() | |
1004 : coded_buffer(VA_INVALID_ID), keyframe(false) { | |
wuchengli
2014/06/18 03:34:12
initialize reference_surfaces?
Pawel Osciak
2014/06/18 07:05:07
Initialize with?
wuchengli
2014/06/18 15:57:43
I misread the code. Ignore this.
| |
1005 } | |
1006 | |
1007 } // namespace content | |
OLD | NEW |