OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" |
| 6 |
| 7 #include "base/bind.h" |
| 8 #include "base/callback.h" |
| 9 #include "base/command_line.h" |
| 10 #include "base/message_loop/message_loop_proxy.h" |
| 11 #include "base/metrics/histogram.h" |
| 12 #include "base/numerics/safe_conversions.h" |
| 13 #include "content/common/gpu/media/h264_dpb.h" |
| 14 #include "content/public/common/content_switches.h" |
| 15 #include "media/base/bind_to_current_loop.h" |
| 16 #include "third_party/libva/va/va_enc_h264.h" |
| 17 |
| 18 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
| 19 |
| 20 #define NOTIFY_ERROR(error, msg) \ |
| 21 do { \ |
| 22 SetState(kError); \ |
| 23 DVLOGF(1) << msg; \ |
| 24 DVLOGF(1) << "Calling NotifyError(" << error << ")"; \ |
| 25 NotifyError(error); \ |
| 26 } while (0) |
| 27 |
| 28 namespace content { |
| 29 |
| 30 namespace { |
| 31 // Need 2 surfaces for each frame: one for input data and one for |
| 32 // reconstructed picture, which is later used for reference. |
| 33 const int kMinSurfacesToEncode = 2; |
| 34 |
| 35 // Subjectively chosen. |
| 36 const int kNumInputBuffers = 4; |
| 37 const int kMaxNumReferenceFrames = 4; |
| 38 |
| 39 // We need up to kMaxNumReferenceFrames surfaces for reference, plus one |
| 40 // for input and one for encode (which will be added to the set of reference |
| 41 // frames for subsequent frames). Actual execution of HW encode is done |
| 42 // in parallel, and we want to process more frames in the meantime. |
| 43 // To have kNumInputBuffers in flight, we need a full set of reference + |
| 44 // encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and |
| 45 // (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames |
| 46 // in flight. |
| 47 const int kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode + |
| 48 kMinSurfacesToEncode * (kNumInputBuffers - 1); |
| 49 |
| 50 // An IDR every 128 frames, an I frame every 30 and no B frames. |
| 51 const int kIDRPeriod = 128; |
| 52 const int kIPeriod = 30; |
| 53 const int kIPPeriod = 1; |
| 54 |
| 55 const int kDefaultFramerate = 30; |
| 56 |
| 57 // HRD parameters (ch. E.2.2 in spec). |
| 58 const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters. |
| 59 const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters. |
| 60 |
| 61 const int kDefaultQP = 26; |
| 62 // All Intel codecs can do at least 4.1. |
| 63 const int kDefaultLevelIDC = 41; |
| 64 const int kChromaFormatIDC = 1; // 4:2:0 |
| 65 |
| 66 // Arbitrarily chosen bitrate window size for rate control, in ms. |
| 67 const int kCPBWindowSizeMs = 1500; |
| 68 } |
| 69 |
| 70 // Round |value| up to |alignment|, which must be a power of 2. |
| 71 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) { |
| 72 // Check that |alignment| is a power of 2. |
| 73 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); |
| 74 return ((value + (alignment - 1)) & ~(alignment - 1)); |
| 75 } |
| 76 |
| 77 static void ReportToUMA( |
| 78 VaapiVideoEncodeAccelerator::VAVEAEncoderFailure failure) { |
| 79 UMA_HISTOGRAM_ENUMERATION( |
| 80 "Media.VAVEA.EncoderFailure", |
| 81 failure, |
| 82 VaapiVideoEncodeAccelerator::VAVEA_ENCODER_FAILURES_MAX); |
| 83 } |
| 84 |
| 85 struct VaapiVideoEncodeAccelerator::InputFrameRef { |
| 86 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, |
| 87 bool force_keyframe) |
| 88 : frame(frame), force_keyframe(force_keyframe) {} |
| 89 const scoped_refptr<media::VideoFrame> frame; |
| 90 const bool force_keyframe; |
| 91 }; |
| 92 |
| 93 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { |
| 94 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) |
| 95 : id(id), shm(shm.Pass()), size(size) {} |
| 96 const int32 id; |
| 97 const scoped_ptr<base::SharedMemory> shm; |
| 98 const size_t size; |
| 99 }; |
| 100 |
| 101 // static |
| 102 std::vector<media::VideoEncodeAccelerator::SupportedProfile> |
| 103 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { |
| 104 std::vector<SupportedProfile> profiles; |
| 105 |
| 106 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
| 107 if (!cmd_line->HasSwitch(switches::kEnableVaapiAcceleratedVideoEncode)) |
| 108 return profiles; |
| 109 |
| 110 SupportedProfile profile; |
| 111 profile.profile = media::H264PROFILE_MAIN; |
| 112 profile.max_resolution.SetSize(1920, 1088); |
| 113 profile.max_framerate.numerator = kDefaultFramerate; |
| 114 profile.max_framerate.denominator = 1; |
| 115 profiles.push_back(profile); |
| 116 |
| 117 // This is actually only constrained (see crbug.com/345569). |
| 118 profile.profile = media::H264PROFILE_BASELINE; |
| 119 profiles.push_back(profile); |
| 120 |
| 121 profile.profile = media::H264PROFILE_HIGH; |
| 122 profiles.push_back(profile); |
| 123 |
| 124 return profiles; |
| 125 } |
| 126 |
| 127 static unsigned int Log2OfPowerOf2(unsigned int x) { |
| 128 CHECK_GT(x, 0); |
| 129 DCHECK_EQ(x & (x - 1), 0); |
| 130 |
| 131 int log = 0; |
| 132 while (x) { |
| 133 x >>= 1; |
| 134 ++log; |
| 135 } |
| 136 return log; |
| 137 } |
| 138 |
| 139 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator(Display* x_display) |
| 140 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
| 141 mb_width_(0), |
| 142 mb_height_(0), |
| 143 output_buffer_byte_size_(0), |
| 144 x_display_(x_display), |
| 145 state_(kUninitialized), |
| 146 frame_num_(0), |
| 147 last_idr_frame_num_(0), |
| 148 bitrate_(0), |
| 149 framerate_(0), |
| 150 cpb_size_(0), |
| 151 encoding_parameters_changed_(false), |
| 152 encoder_thread_("VAVEAEncoderThread"), |
| 153 child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 154 weak_this_ptr_factory_(this) { |
| 155 DVLOGF(4); |
| 156 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); |
| 157 |
| 158 max_ref_idx_l0_size_ = kMaxNumReferenceFrames; |
| 159 qp_ = kDefaultQP; |
| 160 idr_period_ = kIDRPeriod; |
| 161 i_period_ = kIPeriod; |
| 162 ip_period_ = kIPPeriod; |
| 163 } |
| 164 |
| 165 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { |
| 166 DVLOGF(4); |
| 167 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 168 DCHECK(!encoder_thread_.IsRunning()); |
| 169 } |
| 170 |
| 171 bool VaapiVideoEncodeAccelerator::Initialize( |
| 172 media::VideoFrame::Format format, |
| 173 const gfx::Size& input_visible_size, |
| 174 media::VideoCodecProfile output_profile, |
| 175 uint32 initial_bitrate, |
| 176 Client* client) { |
| 177 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 178 DCHECK(!encoder_thread_.IsRunning()); |
| 179 DCHECK_EQ(state_, kUninitialized); |
| 180 |
| 181 DVLOGF(1) << "Initializing VAVEA, input_format: " |
| 182 << media::VideoFrame::FormatToString(format) |
| 183 << ", input_visible_size: " << input_visible_size.ToString() |
| 184 << ", output_profile: " << output_profile |
| 185 << ", initial_bitrate: " << initial_bitrate; |
| 186 |
| 187 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
| 188 client_ = client_ptr_factory_->GetWeakPtr(); |
| 189 |
| 190 if (output_profile < media::H264PROFILE_BASELINE || |
| 191 output_profile > media::H264PROFILE_MAIN) { |
| 192 DVLOGF(1) << "Unsupported output profile: " << output_profile; |
| 193 return false; |
| 194 } |
| 195 |
| 196 if (format != media::VideoFrame::I420) { |
| 197 DVLOGF(1) << "Unsupported input format: " |
| 198 << media::VideoFrame::FormatToString(format); |
| 199 return false; |
| 200 } |
| 201 |
| 202 profile_ = output_profile; |
| 203 visible_size_ = input_visible_size; |
| 204 // 4:2:0 format has to be 2-aligned. |
| 205 DCHECK_EQ(visible_size_.width() % 2, 0); |
| 206 DCHECK_EQ(visible_size_.height() % 2, 0); |
| 207 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), |
| 208 RoundUpToPowerOf2(visible_size_.height(), 16)); |
| 209 mb_width_ = coded_size_.width() / 16; |
| 210 mb_height_ = coded_size_.height() / 16; |
| 211 output_buffer_byte_size_ = coded_size_.GetArea(); |
| 212 |
| 213 UpdateRates(initial_bitrate, kDefaultFramerate); |
| 214 |
| 215 vaapi_wrapper_ = VaapiWrapper::Create(VaapiWrapper::kEncode, |
| 216 output_profile, |
| 217 x_display_, |
| 218 base::Bind(&ReportToUMA, VAAPI_ERROR)); |
| 219 if (!vaapi_wrapper_) { |
| 220 DVLOGF(1) << "Failed initializing VAAPI"; |
| 221 return false; |
| 222 } |
| 223 |
| 224 if (!encoder_thread_.Start()) { |
| 225 DVLOGF(1) << "Failed to start encoder thread"; |
| 226 return false; |
| 227 } |
| 228 encoder_thread_proxy_ = encoder_thread_.message_loop_proxy(); |
| 229 |
| 230 // Finish the remaining initialization on the encoder thread. |
| 231 encoder_thread_proxy_->PostTask( |
| 232 FROM_HERE, |
| 233 base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, |
| 234 base::Unretained(this))); |
| 235 |
| 236 return true; |
| 237 } |
| 238 |
| 239 void VaapiVideoEncodeAccelerator::InitializeTask() { |
| 240 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 241 DCHECK_EQ(state_, kUninitialized); |
| 242 DVLOGF(4); |
| 243 |
| 244 va_surface_release_cb_ = media::BindToCurrentLoop( |
| 245 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, |
| 246 base::Unretained(this))); |
| 247 |
| 248 if (!vaapi_wrapper_->CreateSurfaces( |
| 249 coded_size_, kNumSurfaces, &available_va_surface_ids_)) { |
| 250 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); |
| 251 return; |
| 252 } |
| 253 |
| 254 UpdateSPS(); |
| 255 GeneratePackedSPS(); |
| 256 |
| 257 UpdatePPS(); |
| 258 GeneratePackedPPS(); |
| 259 |
| 260 child_message_loop_proxy_->PostTask( |
| 261 FROM_HERE, |
| 262 base::Bind(&Client::RequireBitstreamBuffers, |
| 263 client_, |
| 264 kNumInputBuffers, |
| 265 coded_size_, |
| 266 output_buffer_byte_size_)); |
| 267 |
| 268 SetState(kEncoding); |
| 269 } |
| 270 |
| 271 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( |
| 272 VASurfaceID va_surface_id) { |
| 273 DVLOGF(4) << "va_surface_id: " << va_surface_id; |
| 274 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 275 |
| 276 available_va_surface_ids_.push_back(va_surface_id); |
| 277 EncodeFrameTask(); |
| 278 } |
| 279 |
| 280 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { |
| 281 memset(¤t_pic_, 0, sizeof(current_pic_)); |
| 282 |
| 283 current_pic_.frame_num = frame_num_++; |
| 284 frame_num_ %= idr_period_; |
| 285 |
| 286 if (current_pic_.frame_num % i_period_ == 0 || force_keyframe) |
| 287 current_pic_.type = media::H264SliceHeader::kISlice; |
| 288 else |
| 289 current_pic_.type = media::H264SliceHeader::kPSlice; |
| 290 |
| 291 if (current_pic_.frame_num % idr_period_ == 0) { |
| 292 current_pic_.idr = true; |
| 293 last_idr_frame_num_ = current_pic_.frame_num; |
| 294 ref_pic_list0_.clear(); |
| 295 } |
| 296 |
| 297 if (current_pic_.type != media::H264SliceHeader::kBSlice) |
| 298 current_pic_.ref = true; |
| 299 |
| 300 current_pic_.pic_order_cnt = current_pic_.frame_num * 2; |
| 301 current_pic_.top_field_order_cnt = current_pic_.pic_order_cnt; |
| 302 current_pic_.pic_order_cnt_lsb = current_pic_.pic_order_cnt; |
| 303 |
| 304 current_encode_job_->keyframe = |
| 305 (current_pic_.type == media::H264SliceHeader::kISlice); |
| 306 |
| 307 DVLOGF(4) << "Starting a new frame, type: " << current_pic_.type |
| 308 << (force_keyframe ? " (forced keyframe)" : "") |
| 309 << " frame_num: " << current_pic_.frame_num |
| 310 << " POC: " << current_pic_.pic_order_cnt; |
| 311 } |
| 312 |
| 313 void VaapiVideoEncodeAccelerator::EndFrame() { |
| 314 // Store the picture on the list of reference pictures and keep the list |
| 315 // below maximum size, dropping oldest references. |
| 316 if (current_pic_.ref) |
| 317 ref_pic_list0_.push_front(current_encode_job_->recon_surface); |
| 318 size_t max_num_ref_frames = |
| 319 base::checked_cast<size_t>(current_sps_.max_num_ref_frames); |
| 320 while (ref_pic_list0_.size() > max_num_ref_frames) |
| 321 ref_pic_list0_.pop_back(); |
| 322 |
| 323 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release())); |
| 324 } |
| 325 |
| 326 static void InitVAPicture(VAPictureH264* va_pic) { |
| 327 memset(va_pic, 0, sizeof(*va_pic)); |
| 328 va_pic->picture_id = VA_INVALID_ID; |
| 329 va_pic->flags = VA_PICTURE_H264_INVALID; |
| 330 } |
| 331 |
| 332 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { |
| 333 VAEncSequenceParameterBufferH264 seq_param; |
| 334 memset(&seq_param, 0, sizeof(seq_param)); |
| 335 |
| 336 #define SPS_TO_SP(a) seq_param.a = current_sps_.a; |
| 337 SPS_TO_SP(seq_parameter_set_id); |
| 338 SPS_TO_SP(level_idc); |
| 339 |
| 340 seq_param.intra_period = i_period_; |
| 341 seq_param.intra_idr_period = idr_period_; |
| 342 seq_param.ip_period = ip_period_; |
| 343 seq_param.bits_per_second = bitrate_; |
| 344 |
| 345 SPS_TO_SP(max_num_ref_frames); |
| 346 seq_param.picture_width_in_mbs = mb_width_; |
| 347 seq_param.picture_height_in_mbs = mb_height_; |
| 348 |
| 349 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a; |
| 350 SPS_TO_SP_FS(chroma_format_idc); |
| 351 SPS_TO_SP_FS(frame_mbs_only_flag); |
| 352 SPS_TO_SP_FS(log2_max_frame_num_minus4); |
| 353 SPS_TO_SP_FS(pic_order_cnt_type); |
| 354 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); |
| 355 #undef SPS_TO_SP_FS |
| 356 |
| 357 SPS_TO_SP(bit_depth_luma_minus8); |
| 358 SPS_TO_SP(bit_depth_chroma_minus8); |
| 359 |
| 360 SPS_TO_SP(frame_cropping_flag); |
| 361 if (current_sps_.frame_cropping_flag) { |
| 362 SPS_TO_SP(frame_crop_left_offset); |
| 363 SPS_TO_SP(frame_crop_right_offset); |
| 364 SPS_TO_SP(frame_crop_top_offset); |
| 365 SPS_TO_SP(frame_crop_bottom_offset); |
| 366 } |
| 367 |
| 368 SPS_TO_SP(vui_parameters_present_flag); |
| 369 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a; |
| 370 SPS_TO_SP_VF(timing_info_present_flag); |
| 371 #undef SPS_TO_SP_VF |
| 372 SPS_TO_SP(num_units_in_tick); |
| 373 SPS_TO_SP(time_scale); |
| 374 #undef SPS_TO_SP |
| 375 |
| 376 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, |
| 377 sizeof(seq_param), |
| 378 &seq_param)) |
| 379 return false; |
| 380 |
| 381 VAEncPictureParameterBufferH264 pic_param; |
| 382 memset(&pic_param, 0, sizeof(pic_param)); |
| 383 |
| 384 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id(); |
| 385 pic_param.CurrPic.TopFieldOrderCnt = current_pic_.top_field_order_cnt; |
| 386 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_.bottom_field_order_cnt; |
| 387 pic_param.CurrPic.flags = 0; |
| 388 |
| 389 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) |
| 390 InitVAPicture(&pic_param.ReferenceFrames[i]); |
| 391 |
| 392 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); |
| 393 RefPicList::const_iterator iter = ref_pic_list0_.begin(); |
| 394 for (size_t i = 0; |
| 395 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); |
| 396 ++iter, ++i) { |
| 397 pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); |
| 398 pic_param.ReferenceFrames[i].flags = 0; |
| 399 } |
| 400 |
| 401 pic_param.coded_buf = current_encode_job_->coded_buffer; |
| 402 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; |
| 403 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id; |
| 404 pic_param.frame_num = current_pic_.frame_num; |
| 405 pic_param.pic_init_qp = qp_; |
| 406 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; |
| 407 pic_param.pic_fields.bits.idr_pic_flag = current_pic_.idr; |
| 408 pic_param.pic_fields.bits.reference_pic_flag = current_pic_.ref; |
| 409 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a; |
| 410 PPS_TO_PP_PF(entropy_coding_mode_flag); |
| 411 PPS_TO_PP_PF(transform_8x8_mode_flag); |
| 412 PPS_TO_PP_PF(deblocking_filter_control_present_flag); |
| 413 #undef PPS_TO_PP_PF |
| 414 |
| 415 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, |
| 416 sizeof(pic_param), |
| 417 &pic_param)) |
| 418 return false; |
| 419 |
| 420 VAEncSliceParameterBufferH264 slice_param; |
| 421 memset(&slice_param, 0, sizeof(slice_param)); |
| 422 |
| 423 slice_param.num_macroblocks = mb_width_ * mb_height_; |
| 424 slice_param.macroblock_info = VA_INVALID_ID; |
| 425 slice_param.slice_type = current_pic_.type; |
| 426 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; |
| 427 slice_param.idr_pic_id = last_idr_frame_num_; |
| 428 slice_param.pic_order_cnt_lsb = current_pic_.pic_order_cnt_lsb; |
| 429 slice_param.num_ref_idx_active_override_flag = true; |
| 430 |
| 431 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) |
| 432 InitVAPicture(&slice_param.RefPicList0[i]); |
| 433 |
| 434 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) |
| 435 InitVAPicture(&slice_param.RefPicList1[i]); |
| 436 |
| 437 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); |
| 438 iter = ref_pic_list0_.begin(); |
| 439 for (size_t i = 0; |
| 440 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); |
| 441 ++iter, ++i) { |
| 442 InitVAPicture(&slice_param.RefPicList0[i]); |
| 443 slice_param.RefPicList0[i].picture_id = (*iter)->id(); |
| 444 slice_param.RefPicList0[i].flags = 0; |
| 445 } |
| 446 |
| 447 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, |
| 448 sizeof(slice_param), |
| 449 &slice_param)) |
| 450 return false; |
| 451 |
| 452 VAEncMiscParameterRateControl rate_control_param; |
| 453 memset(&rate_control_param, 0, sizeof(rate_control_param)); |
| 454 rate_control_param.bits_per_second = bitrate_; |
| 455 rate_control_param.target_percentage = 90; |
| 456 rate_control_param.window_size = kCPBWindowSizeMs; |
| 457 rate_control_param.initial_qp = qp_; |
| 458 rate_control_param.rc_flags.bits.disable_frame_skip = true; |
| 459 |
| 460 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| 461 VAEncMiscParameterTypeRateControl, |
| 462 sizeof(rate_control_param), |
| 463 &rate_control_param)) |
| 464 return false; |
| 465 |
| 466 VAEncMiscParameterFrameRate framerate_param; |
| 467 memset(&framerate_param, 0, sizeof(framerate_param)); |
| 468 framerate_param.framerate = framerate_; |
| 469 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| 470 VAEncMiscParameterTypeFrameRate, |
| 471 sizeof(framerate_param), |
| 472 &framerate_param)) |
| 473 return false; |
| 474 |
| 475 VAEncMiscParameterHRD hrd_param; |
| 476 memset(&hrd_param, 0, sizeof(hrd_param)); |
| 477 hrd_param.buffer_size = cpb_size_; |
| 478 hrd_param.initial_buffer_fullness = cpb_size_ / 2; |
| 479 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, |
| 480 sizeof(hrd_param), |
| 481 &hrd_param)) |
| 482 return false; |
| 483 |
| 484 return true; |
| 485 } |
| 486 |
| 487 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { |
| 488 if (current_pic_.type != media::H264SliceHeader::kISlice) |
| 489 return true; |
| 490 |
| 491 // Submit PPS. |
| 492 VAEncPackedHeaderParameterBuffer par_buffer; |
| 493 memset(&par_buffer, 0, sizeof(par_buffer)); |
| 494 par_buffer.type = VAEncPackedHeaderSequence; |
| 495 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; |
| 496 |
| 497 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| 498 sizeof(par_buffer), |
| 499 &par_buffer)) |
| 500 return false; |
| 501 |
| 502 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| 503 packed_sps_.BytesInBuffer(), |
| 504 packed_sps_.data())) |
| 505 return false; |
| 506 |
| 507 // Submit PPS. |
| 508 memset(&par_buffer, 0, sizeof(par_buffer)); |
| 509 par_buffer.type = VAEncPackedHeaderPicture; |
| 510 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; |
| 511 |
| 512 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| 513 sizeof(par_buffer), |
| 514 &par_buffer)) |
| 515 return false; |
| 516 |
| 517 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| 518 packed_pps_.BytesInBuffer(), |
| 519 packed_pps_.data())) |
| 520 return false; |
| 521 |
| 522 return true; |
| 523 } |
| 524 |
| 525 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { |
| 526 DVLOGF(3) << "Encoding frame_num: " << current_pic_.frame_num; |
| 527 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( |
| 528 current_encode_job_->input_surface->id()); |
| 529 } |
| 530 |
| 531 bool VaapiVideoEncodeAccelerator::UploadFrame( |
| 532 const scoped_refptr<media::VideoFrame>& frame) { |
| 533 return vaapi_wrapper_->UploadVideoFrameToSurface( |
| 534 frame, current_encode_job_->input_surface->id()); |
| 535 } |
| 536 |
| 537 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { |
| 538 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 539 |
| 540 if (state_ != kEncoding) |
| 541 return; |
| 542 |
| 543 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) |
| 544 return; |
| 545 |
| 546 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); |
| 547 available_bitstream_buffers_.pop(); |
| 548 |
| 549 uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory()); |
| 550 |
| 551 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); |
| 552 submitted_encode_jobs_.pop(); |
| 553 |
| 554 size_t data_size = 0; |
| 555 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( |
| 556 encode_job->coded_buffer, |
| 557 encode_job->input_surface->id(), |
| 558 target_data, |
| 559 buffer->size, |
| 560 &data_size)) { |
| 561 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); |
| 562 return; |
| 563 } |
| 564 |
| 565 DVLOGF(3) << "Returning bitstream buffer " |
| 566 << (encode_job->keyframe ? "(keyframe)" : "") |
| 567 << " id: " << buffer->id << " size: " << data_size; |
| 568 |
| 569 child_message_loop_proxy_->PostTask(FROM_HERE, |
| 570 base::Bind(&Client::BitstreamBufferReady, |
| 571 client_, |
| 572 buffer->id, |
| 573 data_size, |
| 574 encode_job->keyframe)); |
| 575 } |
| 576 |
| 577 void VaapiVideoEncodeAccelerator::Encode( |
| 578 const scoped_refptr<media::VideoFrame>& frame, |
| 579 bool force_keyframe) { |
| 580 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() |
| 581 << " force_keyframe: " << force_keyframe; |
| 582 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 583 |
| 584 encoder_thread_proxy_->PostTask( |
| 585 FROM_HERE, |
| 586 base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, |
| 587 base::Unretained(this), |
| 588 frame, |
| 589 force_keyframe)); |
| 590 } |
| 591 |
| 592 bool VaapiVideoEncodeAccelerator::PrepareNextJob() { |
| 593 if (available_va_surface_ids_.size() < kMinSurfacesToEncode) |
| 594 return false; |
| 595 |
| 596 DCHECK(!current_encode_job_); |
| 597 current_encode_job_.reset(new EncodeJob()); |
| 598 |
| 599 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, |
| 600 ¤t_encode_job_->coded_buffer)) { |
| 601 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); |
| 602 return false; |
| 603 } |
| 604 |
| 605 current_encode_job_->input_surface = |
| 606 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| 607 available_va_surface_ids_.pop_back(); |
| 608 |
| 609 current_encode_job_->recon_surface = |
| 610 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| 611 available_va_surface_ids_.pop_back(); |
| 612 |
| 613 // Reference surfaces are needed until the job is done, but they get |
| 614 // removed from ref_pic_list0_ when it's full at the end of job submission. |
| 615 // Keep refs to them along with the job and only release after sync. |
| 616 current_encode_job_->reference_surfaces = ref_pic_list0_; |
| 617 |
| 618 return true; |
| 619 } |
| 620 |
| 621 void VaapiVideoEncodeAccelerator::EncodeTask( |
| 622 const scoped_refptr<media::VideoFrame>& frame, |
| 623 bool force_keyframe) { |
| 624 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 625 DCHECK_NE(state_, kUninitialized); |
| 626 |
| 627 encoder_input_queue_.push( |
| 628 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); |
| 629 EncodeFrameTask(); |
| 630 } |
| 631 |
| 632 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { |
| 633 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 634 |
| 635 if (state_ != kEncoding || encoder_input_queue_.empty()) |
| 636 return; |
| 637 |
| 638 if (!PrepareNextJob()) { |
| 639 DVLOGF(4) << "Not ready for next frame yet"; |
| 640 return; |
| 641 } |
| 642 |
| 643 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); |
| 644 encoder_input_queue_.pop(); |
| 645 |
| 646 if (!UploadFrame(frame_ref->frame)) { |
| 647 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); |
| 648 return; |
| 649 } |
| 650 |
| 651 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); |
| 652 encoding_parameters_changed_ = false; |
| 653 |
| 654 if (!SubmitFrameParameters()) { |
| 655 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); |
| 656 return; |
| 657 } |
| 658 |
| 659 if (!SubmitHeadersIfNeeded()) { |
| 660 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); |
| 661 return; |
| 662 } |
| 663 |
| 664 if (!ExecuteEncode()) { |
| 665 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); |
| 666 return; |
| 667 } |
| 668 |
| 669 EndFrame(); |
| 670 TryToReturnBitstreamBuffer(); |
| 671 } |
| 672 |
| 673 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
| 674 const media::BitstreamBuffer& buffer) { |
| 675 DVLOGF(4) << "id: " << buffer.id(); |
| 676 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 677 |
| 678 if (buffer.size() < output_buffer_byte_size_) { |
| 679 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); |
| 680 return; |
| 681 } |
| 682 |
| 683 scoped_ptr<base::SharedMemory> shm( |
| 684 new base::SharedMemory(buffer.handle(), false)); |
| 685 if (!shm->Map(buffer.size())) { |
| 686 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); |
| 687 return; |
| 688 } |
| 689 |
| 690 scoped_ptr<BitstreamBufferRef> buffer_ref( |
| 691 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); |
| 692 |
| 693 encoder_thread_proxy_->PostTask( |
| 694 FROM_HERE, |
| 695 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, |
| 696 base::Unretained(this), |
| 697 base::Passed(&buffer_ref))); |
| 698 } |
| 699 |
| 700 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( |
| 701 scoped_ptr<BitstreamBufferRef> buffer_ref) { |
| 702 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 703 DCHECK_NE(state_, kUninitialized); |
| 704 |
| 705 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); |
| 706 TryToReturnBitstreamBuffer(); |
| 707 } |
| 708 |
| 709 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( |
| 710 uint32 bitrate, |
| 711 uint32 framerate) { |
| 712 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| 713 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 714 |
| 715 encoder_thread_proxy_->PostTask( |
| 716 FROM_HERE, |
| 717 base::Bind( |
| 718 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, |
| 719 base::Unretained(this), |
| 720 bitrate, |
| 721 framerate)); |
| 722 } |
| 723 |
| 724 void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate, |
| 725 uint32 framerate) { |
| 726 if (encoder_thread_.IsRunning()) |
| 727 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 728 DCHECK_NE(bitrate, 0); |
| 729 DCHECK_NE(framerate, 0); |
| 730 bitrate_ = bitrate; |
| 731 framerate_ = framerate; |
| 732 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; |
| 733 } |
| 734 |
| 735 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( |
| 736 uint32 bitrate, |
| 737 uint32 framerate) { |
| 738 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| 739 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 740 DCHECK_NE(state_, kUninitialized); |
| 741 |
| 742 UpdateRates(bitrate, framerate); |
| 743 |
| 744 UpdateSPS(); |
| 745 GeneratePackedSPS(); |
| 746 |
| 747 // Submit new parameters along with next frame that will be processed. |
| 748 encoding_parameters_changed_ = true; |
| 749 } |
| 750 |
| 751 void VaapiVideoEncodeAccelerator::Destroy() { |
| 752 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 753 |
| 754 // Can't call client anymore after Destroy() returns. |
| 755 client_ptr_factory_.reset(); |
| 756 weak_this_ptr_factory_.InvalidateWeakPtrs(); |
| 757 |
| 758 // Early-exit encoder tasks if they are running and join the thread. |
| 759 if (encoder_thread_.IsRunning()) { |
| 760 encoder_thread_.message_loop()->PostTask( |
| 761 FROM_HERE, |
| 762 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, |
| 763 base::Unretained(this))); |
| 764 encoder_thread_.Stop(); |
| 765 } |
| 766 |
| 767 delete this; |
| 768 } |
| 769 |
| 770 void VaapiVideoEncodeAccelerator::DestroyTask() { |
| 771 DVLOGF(2); |
| 772 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 773 SetState(kError); |
| 774 } |
| 775 |
| 776 void VaapiVideoEncodeAccelerator::UpdateSPS() { |
| 777 memset(¤t_sps_, 0, sizeof(media::H264SPS)); |
| 778 |
| 779 // Spec A.2 and A.3. |
| 780 switch (profile_) { |
| 781 case media::H264PROFILE_BASELINE: |
| 782 // Due to crbug.com/345569, we don't distinguish between constrained |
| 783 // and non-constrained baseline profiles. Since many codecs can't do |
| 784 // non-constrained, and constrained is usually what we mean (and it's a |
| 785 // subset of non-constrained), default to it. |
| 786 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; |
| 787 current_sps_.constraint_set0_flag = true; |
| 788 break; |
| 789 case media::H264PROFILE_MAIN: |
| 790 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain; |
| 791 current_sps_.constraint_set1_flag = true; |
| 792 break; |
| 793 case media::H264PROFILE_HIGH: |
| 794 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; |
| 795 break; |
| 796 default: |
| 797 NOTIMPLEMENTED(); |
| 798 return; |
| 799 } |
| 800 |
| 801 current_sps_.level_idc = kDefaultLevelIDC; |
| 802 current_sps_.seq_parameter_set_id = 0; |
| 803 current_sps_.chroma_format_idc = kChromaFormatIDC; |
| 804 |
| 805 DCHECK_GE(idr_period_, 1 << 4); |
| 806 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; |
| 807 current_sps_.pic_order_cnt_type = 0; |
| 808 current_sps_.log2_max_pic_order_cnt_lsb_minus4 = |
| 809 Log2OfPowerOf2(idr_period_ * 2) - 4; |
| 810 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_; |
| 811 |
| 812 current_sps_.frame_mbs_only_flag = true; |
| 813 |
| 814 DCHECK_GT(mb_width_, 0); |
| 815 DCHECK_GT(mb_height_, 0); |
| 816 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; |
| 817 DCHECK(current_sps_.frame_mbs_only_flag); |
| 818 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; |
| 819 |
| 820 if (visible_size_ != coded_size_) { |
| 821 // Visible size differs from coded size, fill crop information. |
| 822 current_sps_.frame_cropping_flag = true; |
| 823 DCHECK(!current_sps_.separate_colour_plane_flag); |
| 824 // Spec table 6-1. Only 4:2:0 for now. |
| 825 DCHECK_EQ(current_sps_.chroma_format_idc, 1); |
| 826 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. |
| 827 const unsigned int crop_unit_x = 2; |
| 828 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag); |
| 829 current_sps_.frame_crop_left_offset = 0; |
| 830 current_sps_.frame_crop_right_offset = |
| 831 (coded_size_.width() - visible_size_.width()) / crop_unit_x; |
| 832 current_sps_.frame_crop_top_offset = 0; |
| 833 current_sps_.frame_crop_bottom_offset = |
| 834 (coded_size_.height() - visible_size_.height()) / crop_unit_y; |
| 835 } |
| 836 |
| 837 current_sps_.vui_parameters_present_flag = true; |
| 838 current_sps_.timing_info_present_flag = true; |
| 839 current_sps_.num_units_in_tick = 1; |
| 840 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. |
| 841 current_sps_.fixed_frame_rate_flag = true; |
| 842 |
| 843 current_sps_.nal_hrd_parameters_present_flag = true; |
| 844 // H.264 spec ch. E.2.2. |
| 845 current_sps_.cpb_cnt_minus1 = 0; |
| 846 current_sps_.bit_rate_scale = kBitRateScale; |
| 847 current_sps_.cpb_size_scale = kCPBSizeScale; |
| 848 current_sps_.bit_rate_value_minus1[0] = |
| 849 (bitrate_ >> |
| 850 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; |
| 851 current_sps_.cpb_size_value_minus1[0] = |
| 852 (cpb_size_ >> |
| 853 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; |
| 854 current_sps_.cbr_flag[0] = true; |
| 855 current_sps_.initial_cpb_removal_delay_length_minus_1 = |
| 856 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| 857 current_sps_.cpb_removal_delay_length_minus1 = |
| 858 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| 859 current_sps_.dpb_output_delay_length_minus1 = |
| 860 media::H264SPS::kDefaultDPBOutputDelayLength - 1; |
| 861 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; |
| 862 current_sps_.low_delay_hrd_flag = false; |
| 863 } |
| 864 |
| 865 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { |
| 866 packed_sps_.Reset(); |
| 867 |
| 868 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); |
| 869 |
| 870 packed_sps_.AppendBits(8, current_sps_.profile_idc); |
| 871 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); |
| 872 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); |
| 873 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); |
| 874 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); |
| 875 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); |
| 876 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); |
| 877 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits |
| 878 packed_sps_.AppendBits(8, current_sps_.level_idc); |
| 879 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); |
| 880 |
| 881 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { |
| 882 packed_sps_.AppendUE(current_sps_.chroma_format_idc); |
| 883 if (current_sps_.chroma_format_idc == 3) |
| 884 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); |
| 885 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); |
| 886 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); |
| 887 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); |
| 888 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); |
| 889 CHECK(!current_sps_.seq_scaling_matrix_present_flag); |
| 890 } |
| 891 |
| 892 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4); |
| 893 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type); |
| 894 if (current_sps_.pic_order_cnt_type == 0) |
| 895 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4); |
| 896 else if (current_sps_.pic_order_cnt_type == 1) { |
| 897 CHECK(1); |
| 898 } |
| 899 |
| 900 packed_sps_.AppendUE(current_sps_.max_num_ref_frames); |
| 901 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag); |
| 902 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1); |
| 903 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1); |
| 904 |
| 905 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag); |
| 906 if (!current_sps_.frame_mbs_only_flag) |
| 907 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag); |
| 908 |
| 909 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag); |
| 910 |
| 911 packed_sps_.AppendBool(current_sps_.frame_cropping_flag); |
| 912 if (current_sps_.frame_cropping_flag) { |
| 913 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset); |
| 914 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset); |
| 915 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset); |
| 916 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset); |
| 917 } |
| 918 |
| 919 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag); |
| 920 if (current_sps_.vui_parameters_present_flag) { |
| 921 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag |
| 922 packed_sps_.AppendBool(false); // overscan_info_present_flag |
| 923 packed_sps_.AppendBool(false); // video_signal_type_present_flag |
| 924 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag |
| 925 |
| 926 packed_sps_.AppendBool(current_sps_.timing_info_present_flag); |
| 927 if (current_sps_.timing_info_present_flag) { |
| 928 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick); |
| 929 packed_sps_.AppendBits(32, current_sps_.time_scale); |
| 930 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag); |
| 931 } |
| 932 |
| 933 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag); |
| 934 if (current_sps_.nal_hrd_parameters_present_flag) { |
| 935 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1); |
| 936 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale); |
| 937 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale); |
| 938 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1), |
| 939 arraysize(current_sps_.bit_rate_value_minus1)); |
| 940 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) { |
| 941 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]); |
| 942 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]); |
| 943 packed_sps_.AppendBool(current_sps_.cbr_flag[i]); |
| 944 } |
| 945 packed_sps_.AppendBits( |
| 946 5, current_sps_.initial_cpb_removal_delay_length_minus_1); |
| 947 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1); |
| 948 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1); |
| 949 packed_sps_.AppendBits(5, current_sps_.time_offset_length); |
| 950 } |
| 951 |
| 952 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag |
| 953 if (current_sps_.nal_hrd_parameters_present_flag) |
| 954 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag); |
| 955 |
| 956 packed_sps_.AppendBool(false); // pic_struct_present_flag |
| 957 packed_sps_.AppendBool(false); // bitstream_restriction_flag |
| 958 } |
| 959 |
| 960 packed_sps_.FinishNALU(); |
| 961 } |
| 962 |
| 963 void VaapiVideoEncodeAccelerator::UpdatePPS() { |
| 964 memset(¤t_pps_, 0, sizeof(media::H264PPS)); |
| 965 |
| 966 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; |
| 967 current_pps_.pic_parameter_set_id = 0; |
| 968 |
| 969 current_pps_.entropy_coding_mode_flag = |
| 970 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; |
| 971 |
| 972 CHECK_GT(max_ref_idx_l0_size_, 0); |
| 973 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; |
| 974 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; |
| 975 DCHECK_LE(qp_, 51); |
| 976 current_pps_.pic_init_qp_minus26 = qp_ - 26; |
| 977 current_pps_.deblocking_filter_control_present_flag = true; |
| 978 current_pps_.transform_8x8_mode_flag = |
| 979 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); |
| 980 } |
| 981 |
| 982 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { |
| 983 packed_pps_.Reset(); |
| 984 |
| 985 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); |
| 986 |
| 987 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); |
| 988 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); |
| 989 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); |
| 990 packed_pps_.AppendBool( |
| 991 current_pps_.bottom_field_pic_order_in_frame_present_flag); |
| 992 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); |
| 993 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); |
| 994 |
| 995 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); |
| 996 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1); |
| 997 |
| 998 packed_pps_.AppendBool(current_pps_.weighted_pred_flag); |
| 999 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc); |
| 1000 |
| 1001 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26); |
| 1002 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26); |
| 1003 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset); |
| 1004 |
| 1005 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag); |
| 1006 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag); |
| 1007 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag); |
| 1008 |
| 1009 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag); |
| 1010 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag); |
| 1011 DCHECK(!current_pps_.pic_scaling_matrix_present_flag); |
| 1012 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset); |
| 1013 |
| 1014 packed_pps_.FinishNALU(); |
| 1015 } |
| 1016 |
| 1017 void VaapiVideoEncodeAccelerator::SetState(State state) { |
| 1018 // Only touch state on encoder thread, unless it's not running. |
| 1019 if (encoder_thread_.IsRunning() && |
| 1020 !encoder_thread_proxy_->BelongsToCurrentThread()) { |
| 1021 encoder_thread_proxy_->PostTask( |
| 1022 FROM_HERE, |
| 1023 base::Bind(&VaapiVideoEncodeAccelerator::SetState, |
| 1024 base::Unretained(this), |
| 1025 state)); |
| 1026 return; |
| 1027 } |
| 1028 |
| 1029 DVLOGF(1) << "setting state to: " << state; |
| 1030 state_ = state; |
| 1031 } |
| 1032 |
| 1033 void VaapiVideoEncodeAccelerator::NotifyError(Error error) { |
| 1034 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| 1035 child_message_loop_proxy_->PostTask( |
| 1036 FROM_HERE, |
| 1037 base::Bind( |
| 1038 &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error)); |
| 1039 return; |
| 1040 } |
| 1041 |
| 1042 if (client_) { |
| 1043 client_->NotifyError(error); |
| 1044 client_ptr_factory_.reset(); |
| 1045 } |
| 1046 } |
| 1047 |
| 1048 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() |
| 1049 : coded_buffer(VA_INVALID_ID), keyframe(false) { |
| 1050 } |
| 1051 |
| 1052 } // namespace content |
OLD | NEW |