OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" |
| 6 |
| 7 #include "base/bind.h" |
| 8 #include "base/callback.h" |
| 9 #include "base/command_line.h" |
| 10 #include "base/message_loop/message_loop_proxy.h" |
| 11 #include "base/metrics/histogram.h" |
| 12 #include "base/numerics/safe_conversions.h" |
| 13 #include "content/common/gpu/media/h264_dpb.h" |
| 14 #include "content/public/common/content_switches.h" |
| 15 #include "media/base/bind_to_current_loop.h" |
| 16 #include "third_party/libva/va/va_enc_h264.h" |
| 17 |
| 18 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
| 19 |
| 20 #define NOTIFY_ERROR(error, msg) \ |
| 21 do { \ |
| 22 SetState(kError); \ |
| 23 DVLOGF(1) << msg; \ |
| 24 DVLOGF(1) << "Calling NotifyError(" << error << ")"; \ |
| 25 NotifyError(error); \ |
| 26 } while (0) |
| 27 |
| 28 namespace content { |
| 29 |
| 30 namespace { |
| 31 // Need 2 surfaces for each frame: one for input data and one for |
| 32 // reconstructed picture, which is later used for reference. |
| 33 const size_t kMinSurfacesToEncode = 2; |
| 34 |
| 35 // Subjectively chosen. |
| 36 const size_t kNumInputBuffers = 4; |
| 37 const size_t kMaxNumReferenceFrames = 4; |
| 38 |
| 39 // We need up to kMaxNumReferenceFrames surfaces for reference, plus one |
| 40 // for input and one for encode (which will be added to the set of reference |
| 41 // frames for subsequent frames). Actual execution of HW encode is done |
| 42 // in parallel, and we want to process more frames in the meantime. |
| 43 // To have kNumInputBuffers in flight, we need a full set of reference + |
| 44 // encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and |
| 45 // (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames |
| 46 // in flight. |
| 47 const size_t kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode + |
| 48 kMinSurfacesToEncode * (kNumInputBuffers - 1); |
| 49 |
| 50 // An IDR every 128 frames, an I frame every 30 and no B frames. |
| 51 const int kIDRPeriod = 128; |
| 52 const int kIPeriod = 30; |
| 53 const int kIPPeriod = 1; |
| 54 |
| 55 const int kDefaultFramerate = 30; |
| 56 |
| 57 // HRD parameters (ch. E.2.2 in spec). |
| 58 const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters. |
| 59 const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters. |
| 60 |
| 61 const int kDefaultQP = 26; |
| 62 // All Intel codecs can do at least 4.1. |
| 63 const int kDefaultLevelIDC = 41; |
| 64 const int kChromaFormatIDC = 1; // 4:2:0 |
| 65 |
| 66 // Arbitrarily chosen bitrate window size for rate control, in ms. |
| 67 const int kCPBWindowSizeMs = 1500; |
| 68 |
| 69 // UMA errors that the VaapiVideoEncodeAccelerator class reports. |
| 70 enum VAVEAEncoderFailure { |
| 71 VAAPI_ERROR = 0, |
| 72 VAVEA_ENCODER_FAILURES_MAX, |
| 73 }; |
| 74 |
| 75 } |
| 76 |
| 77 // Round |value| up to |alignment|, which must be a power of 2. |
| 78 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) { |
| 79 // Check that |alignment| is a power of 2. |
| 80 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); |
| 81 return ((value + (alignment - 1)) & ~(alignment - 1)); |
| 82 } |
| 83 |
| 84 static void ReportToUMA(VAVEAEncoderFailure failure) { |
| 85 UMA_HISTOGRAM_ENUMERATION( |
| 86 "Media.VAVEA.EncoderFailure", |
| 87 failure, |
| 88 VAVEA_ENCODER_FAILURES_MAX); |
| 89 } |
| 90 |
| 91 struct VaapiVideoEncodeAccelerator::InputFrameRef { |
| 92 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, |
| 93 bool force_keyframe) |
| 94 : frame(frame), force_keyframe(force_keyframe) {} |
| 95 const scoped_refptr<media::VideoFrame> frame; |
| 96 const bool force_keyframe; |
| 97 }; |
| 98 |
| 99 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { |
| 100 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) |
| 101 : id(id), shm(shm.Pass()), size(size) {} |
| 102 const int32 id; |
| 103 const scoped_ptr<base::SharedMemory> shm; |
| 104 const size_t size; |
| 105 }; |
| 106 |
| 107 // static |
| 108 std::vector<media::VideoEncodeAccelerator::SupportedProfile> |
| 109 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { |
| 110 std::vector<SupportedProfile> profiles; |
| 111 |
| 112 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
| 113 if (!cmd_line->HasSwitch(switches::kEnableVaapiAcceleratedVideoEncode)) |
| 114 return profiles; |
| 115 |
| 116 SupportedProfile profile; |
| 117 profile.profile = media::H264PROFILE_MAIN; |
| 118 profile.max_resolution.SetSize(1920, 1088); |
| 119 profile.max_framerate.numerator = kDefaultFramerate; |
| 120 profile.max_framerate.denominator = 1; |
| 121 profiles.push_back(profile); |
| 122 |
| 123 // This is actually only constrained (see crbug.com/345569). |
| 124 profile.profile = media::H264PROFILE_BASELINE; |
| 125 profiles.push_back(profile); |
| 126 |
| 127 profile.profile = media::H264PROFILE_HIGH; |
| 128 profiles.push_back(profile); |
| 129 |
| 130 return profiles; |
| 131 } |
| 132 |
| 133 static unsigned int Log2OfPowerOf2(unsigned int x) { |
| 134 CHECK_GT(x, 0u); |
| 135 DCHECK_EQ(x & (x - 1), 0u); |
| 136 |
| 137 int log = 0; |
| 138 while (x) { |
| 139 x >>= 1; |
| 140 ++log; |
| 141 } |
| 142 return log; |
| 143 } |
| 144 |
| 145 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator(Display* x_display) |
| 146 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
| 147 mb_width_(0), |
| 148 mb_height_(0), |
| 149 output_buffer_byte_size_(0), |
| 150 x_display_(x_display), |
| 151 state_(kUninitialized), |
| 152 frame_num_(0), |
| 153 last_idr_frame_num_(0), |
| 154 bitrate_(0), |
| 155 framerate_(0), |
| 156 cpb_size_(0), |
| 157 encoding_parameters_changed_(false), |
| 158 encoder_thread_("VAVEAEncoderThread"), |
| 159 child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 160 weak_this_ptr_factory_(this) { |
| 161 DVLOGF(4); |
| 162 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); |
| 163 |
| 164 max_ref_idx_l0_size_ = kMaxNumReferenceFrames; |
| 165 qp_ = kDefaultQP; |
| 166 idr_period_ = kIDRPeriod; |
| 167 i_period_ = kIPeriod; |
| 168 ip_period_ = kIPPeriod; |
| 169 } |
| 170 |
| 171 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { |
| 172 DVLOGF(4); |
| 173 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 174 DCHECK(!encoder_thread_.IsRunning()); |
| 175 } |
| 176 |
| 177 bool VaapiVideoEncodeAccelerator::Initialize( |
| 178 media::VideoFrame::Format format, |
| 179 const gfx::Size& input_visible_size, |
| 180 media::VideoCodecProfile output_profile, |
| 181 uint32 initial_bitrate, |
| 182 Client* client) { |
| 183 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 184 DCHECK(!encoder_thread_.IsRunning()); |
| 185 DCHECK_EQ(state_, kUninitialized); |
| 186 |
| 187 DVLOGF(1) << "Initializing VAVEA, input_format: " |
| 188 << media::VideoFrame::FormatToString(format) |
| 189 << ", input_visible_size: " << input_visible_size.ToString() |
| 190 << ", output_profile: " << output_profile |
| 191 << ", initial_bitrate: " << initial_bitrate; |
| 192 |
| 193 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
| 194 client_ = client_ptr_factory_->GetWeakPtr(); |
| 195 |
| 196 if (output_profile < media::H264PROFILE_BASELINE || |
| 197 output_profile > media::H264PROFILE_MAIN) { |
| 198 DVLOGF(1) << "Unsupported output profile: " << output_profile; |
| 199 return false; |
| 200 } |
| 201 |
| 202 if (format != media::VideoFrame::I420) { |
| 203 DVLOGF(1) << "Unsupported input format: " |
| 204 << media::VideoFrame::FormatToString(format); |
| 205 return false; |
| 206 } |
| 207 |
| 208 profile_ = output_profile; |
| 209 visible_size_ = input_visible_size; |
| 210 // 4:2:0 format has to be 2-aligned. |
| 211 DCHECK_EQ(visible_size_.width() % 2, 0); |
| 212 DCHECK_EQ(visible_size_.height() % 2, 0); |
| 213 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), |
| 214 RoundUpToPowerOf2(visible_size_.height(), 16)); |
| 215 mb_width_ = coded_size_.width() / 16; |
| 216 mb_height_ = coded_size_.height() / 16; |
| 217 output_buffer_byte_size_ = coded_size_.GetArea(); |
| 218 |
| 219 UpdateRates(initial_bitrate, kDefaultFramerate); |
| 220 |
| 221 vaapi_wrapper_ = VaapiWrapper::Create(VaapiWrapper::kEncode, |
| 222 output_profile, |
| 223 x_display_, |
| 224 base::Bind(&ReportToUMA, VAAPI_ERROR)); |
| 225 if (!vaapi_wrapper_) { |
| 226 DVLOGF(1) << "Failed initializing VAAPI"; |
| 227 return false; |
| 228 } |
| 229 |
| 230 if (!encoder_thread_.Start()) { |
| 231 DVLOGF(1) << "Failed to start encoder thread"; |
| 232 return false; |
| 233 } |
| 234 encoder_thread_proxy_ = encoder_thread_.message_loop_proxy(); |
| 235 |
| 236 // Finish the remaining initialization on the encoder thread. |
| 237 encoder_thread_proxy_->PostTask( |
| 238 FROM_HERE, |
| 239 base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, |
| 240 base::Unretained(this))); |
| 241 |
| 242 return true; |
| 243 } |
| 244 |
| 245 void VaapiVideoEncodeAccelerator::InitializeTask() { |
| 246 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 247 DCHECK_EQ(state_, kUninitialized); |
| 248 DVLOGF(4); |
| 249 |
| 250 va_surface_release_cb_ = media::BindToCurrentLoop( |
| 251 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, |
| 252 base::Unretained(this))); |
| 253 |
| 254 if (!vaapi_wrapper_->CreateSurfaces( |
| 255 coded_size_, kNumSurfaces, &available_va_surface_ids_)) { |
| 256 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); |
| 257 return; |
| 258 } |
| 259 |
| 260 UpdateSPS(); |
| 261 GeneratePackedSPS(); |
| 262 |
| 263 UpdatePPS(); |
| 264 GeneratePackedPPS(); |
| 265 |
| 266 child_message_loop_proxy_->PostTask( |
| 267 FROM_HERE, |
| 268 base::Bind(&Client::RequireBitstreamBuffers, |
| 269 client_, |
| 270 kNumInputBuffers, |
| 271 coded_size_, |
| 272 output_buffer_byte_size_)); |
| 273 |
| 274 SetState(kEncoding); |
| 275 } |
| 276 |
| 277 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( |
| 278 VASurfaceID va_surface_id) { |
| 279 DVLOGF(4) << "va_surface_id: " << va_surface_id; |
| 280 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 281 |
| 282 available_va_surface_ids_.push_back(va_surface_id); |
| 283 EncodeFrameTask(); |
| 284 } |
| 285 |
| 286 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { |
| 287 memset(¤t_pic_, 0, sizeof(current_pic_)); |
| 288 |
| 289 current_pic_.frame_num = frame_num_++; |
| 290 frame_num_ %= idr_period_; |
| 291 |
| 292 if (current_pic_.frame_num % i_period_ == 0 || force_keyframe) |
| 293 current_pic_.type = media::H264SliceHeader::kISlice; |
| 294 else |
| 295 current_pic_.type = media::H264SliceHeader::kPSlice; |
| 296 |
| 297 if (current_pic_.frame_num % idr_period_ == 0) { |
| 298 current_pic_.idr = true; |
| 299 last_idr_frame_num_ = current_pic_.frame_num; |
| 300 ref_pic_list0_.clear(); |
| 301 } |
| 302 |
| 303 if (current_pic_.type != media::H264SliceHeader::kBSlice) |
| 304 current_pic_.ref = true; |
| 305 |
| 306 current_pic_.pic_order_cnt = current_pic_.frame_num * 2; |
| 307 current_pic_.top_field_order_cnt = current_pic_.pic_order_cnt; |
| 308 current_pic_.pic_order_cnt_lsb = current_pic_.pic_order_cnt; |
| 309 |
| 310 current_encode_job_->keyframe = |
| 311 (current_pic_.type == media::H264SliceHeader::kISlice); |
| 312 |
| 313 DVLOGF(4) << "Starting a new frame, type: " << current_pic_.type |
| 314 << (force_keyframe ? " (forced keyframe)" : "") |
| 315 << " frame_num: " << current_pic_.frame_num |
| 316 << " POC: " << current_pic_.pic_order_cnt; |
| 317 } |
| 318 |
| 319 void VaapiVideoEncodeAccelerator::EndFrame() { |
| 320 // Store the picture on the list of reference pictures and keep the list |
| 321 // below maximum size, dropping oldest references. |
| 322 if (current_pic_.ref) |
| 323 ref_pic_list0_.push_front(current_encode_job_->recon_surface); |
| 324 size_t max_num_ref_frames = |
| 325 base::checked_cast<size_t>(current_sps_.max_num_ref_frames); |
| 326 while (ref_pic_list0_.size() > max_num_ref_frames) |
| 327 ref_pic_list0_.pop_back(); |
| 328 |
| 329 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release())); |
| 330 } |
| 331 |
| 332 static void InitVAPicture(VAPictureH264* va_pic) { |
| 333 memset(va_pic, 0, sizeof(*va_pic)); |
| 334 va_pic->picture_id = VA_INVALID_ID; |
| 335 va_pic->flags = VA_PICTURE_H264_INVALID; |
| 336 } |
| 337 |
| 338 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { |
| 339 VAEncSequenceParameterBufferH264 seq_param; |
| 340 memset(&seq_param, 0, sizeof(seq_param)); |
| 341 |
| 342 #define SPS_TO_SP(a) seq_param.a = current_sps_.a; |
| 343 SPS_TO_SP(seq_parameter_set_id); |
| 344 SPS_TO_SP(level_idc); |
| 345 |
| 346 seq_param.intra_period = i_period_; |
| 347 seq_param.intra_idr_period = idr_period_; |
| 348 seq_param.ip_period = ip_period_; |
| 349 seq_param.bits_per_second = bitrate_; |
| 350 |
| 351 SPS_TO_SP(max_num_ref_frames); |
| 352 seq_param.picture_width_in_mbs = mb_width_; |
| 353 seq_param.picture_height_in_mbs = mb_height_; |
| 354 |
| 355 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a; |
| 356 SPS_TO_SP_FS(chroma_format_idc); |
| 357 SPS_TO_SP_FS(frame_mbs_only_flag); |
| 358 SPS_TO_SP_FS(log2_max_frame_num_minus4); |
| 359 SPS_TO_SP_FS(pic_order_cnt_type); |
| 360 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); |
| 361 #undef SPS_TO_SP_FS |
| 362 |
| 363 SPS_TO_SP(bit_depth_luma_minus8); |
| 364 SPS_TO_SP(bit_depth_chroma_minus8); |
| 365 |
| 366 SPS_TO_SP(frame_cropping_flag); |
| 367 if (current_sps_.frame_cropping_flag) { |
| 368 SPS_TO_SP(frame_crop_left_offset); |
| 369 SPS_TO_SP(frame_crop_right_offset); |
| 370 SPS_TO_SP(frame_crop_top_offset); |
| 371 SPS_TO_SP(frame_crop_bottom_offset); |
| 372 } |
| 373 |
| 374 SPS_TO_SP(vui_parameters_present_flag); |
| 375 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a; |
| 376 SPS_TO_SP_VF(timing_info_present_flag); |
| 377 #undef SPS_TO_SP_VF |
| 378 SPS_TO_SP(num_units_in_tick); |
| 379 SPS_TO_SP(time_scale); |
| 380 #undef SPS_TO_SP |
| 381 |
| 382 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, |
| 383 sizeof(seq_param), |
| 384 &seq_param)) |
| 385 return false; |
| 386 |
| 387 VAEncPictureParameterBufferH264 pic_param; |
| 388 memset(&pic_param, 0, sizeof(pic_param)); |
| 389 |
| 390 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id(); |
| 391 pic_param.CurrPic.TopFieldOrderCnt = current_pic_.top_field_order_cnt; |
| 392 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_.bottom_field_order_cnt; |
| 393 pic_param.CurrPic.flags = 0; |
| 394 |
| 395 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) |
| 396 InitVAPicture(&pic_param.ReferenceFrames[i]); |
| 397 |
| 398 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); |
| 399 RefPicList::const_iterator iter = ref_pic_list0_.begin(); |
| 400 for (size_t i = 0; |
| 401 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); |
| 402 ++iter, ++i) { |
| 403 pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); |
| 404 pic_param.ReferenceFrames[i].flags = 0; |
| 405 } |
| 406 |
| 407 pic_param.coded_buf = current_encode_job_->coded_buffer; |
| 408 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; |
| 409 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id; |
| 410 pic_param.frame_num = current_pic_.frame_num; |
| 411 pic_param.pic_init_qp = qp_; |
| 412 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; |
| 413 pic_param.pic_fields.bits.idr_pic_flag = current_pic_.idr; |
| 414 pic_param.pic_fields.bits.reference_pic_flag = current_pic_.ref; |
| 415 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a; |
| 416 PPS_TO_PP_PF(entropy_coding_mode_flag); |
| 417 PPS_TO_PP_PF(transform_8x8_mode_flag); |
| 418 PPS_TO_PP_PF(deblocking_filter_control_present_flag); |
| 419 #undef PPS_TO_PP_PF |
| 420 |
| 421 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, |
| 422 sizeof(pic_param), |
| 423 &pic_param)) |
| 424 return false; |
| 425 |
| 426 VAEncSliceParameterBufferH264 slice_param; |
| 427 memset(&slice_param, 0, sizeof(slice_param)); |
| 428 |
| 429 slice_param.num_macroblocks = mb_width_ * mb_height_; |
| 430 slice_param.macroblock_info = VA_INVALID_ID; |
| 431 slice_param.slice_type = current_pic_.type; |
| 432 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; |
| 433 slice_param.idr_pic_id = last_idr_frame_num_; |
| 434 slice_param.pic_order_cnt_lsb = current_pic_.pic_order_cnt_lsb; |
| 435 slice_param.num_ref_idx_active_override_flag = true; |
| 436 |
| 437 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) |
| 438 InitVAPicture(&slice_param.RefPicList0[i]); |
| 439 |
| 440 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) |
| 441 InitVAPicture(&slice_param.RefPicList1[i]); |
| 442 |
| 443 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); |
| 444 iter = ref_pic_list0_.begin(); |
| 445 for (size_t i = 0; |
| 446 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); |
| 447 ++iter, ++i) { |
| 448 InitVAPicture(&slice_param.RefPicList0[i]); |
| 449 slice_param.RefPicList0[i].picture_id = (*iter)->id(); |
| 450 slice_param.RefPicList0[i].flags = 0; |
| 451 } |
| 452 |
| 453 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, |
| 454 sizeof(slice_param), |
| 455 &slice_param)) |
| 456 return false; |
| 457 |
| 458 VAEncMiscParameterRateControl rate_control_param; |
| 459 memset(&rate_control_param, 0, sizeof(rate_control_param)); |
| 460 rate_control_param.bits_per_second = bitrate_; |
| 461 rate_control_param.target_percentage = 90; |
| 462 rate_control_param.window_size = kCPBWindowSizeMs; |
| 463 rate_control_param.initial_qp = qp_; |
| 464 rate_control_param.rc_flags.bits.disable_frame_skip = true; |
| 465 |
| 466 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| 467 VAEncMiscParameterTypeRateControl, |
| 468 sizeof(rate_control_param), |
| 469 &rate_control_param)) |
| 470 return false; |
| 471 |
| 472 VAEncMiscParameterFrameRate framerate_param; |
| 473 memset(&framerate_param, 0, sizeof(framerate_param)); |
| 474 framerate_param.framerate = framerate_; |
| 475 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| 476 VAEncMiscParameterTypeFrameRate, |
| 477 sizeof(framerate_param), |
| 478 &framerate_param)) |
| 479 return false; |
| 480 |
| 481 VAEncMiscParameterHRD hrd_param; |
| 482 memset(&hrd_param, 0, sizeof(hrd_param)); |
| 483 hrd_param.buffer_size = cpb_size_; |
| 484 hrd_param.initial_buffer_fullness = cpb_size_ / 2; |
| 485 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, |
| 486 sizeof(hrd_param), |
| 487 &hrd_param)) |
| 488 return false; |
| 489 |
| 490 return true; |
| 491 } |
| 492 |
| 493 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { |
| 494 if (current_pic_.type != media::H264SliceHeader::kISlice) |
| 495 return true; |
| 496 |
| 497 // Submit PPS. |
| 498 VAEncPackedHeaderParameterBuffer par_buffer; |
| 499 memset(&par_buffer, 0, sizeof(par_buffer)); |
| 500 par_buffer.type = VAEncPackedHeaderSequence; |
| 501 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; |
| 502 |
| 503 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| 504 sizeof(par_buffer), |
| 505 &par_buffer)) |
| 506 return false; |
| 507 |
| 508 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| 509 packed_sps_.BytesInBuffer(), |
| 510 packed_sps_.data())) |
| 511 return false; |
| 512 |
| 513 // Submit PPS. |
| 514 memset(&par_buffer, 0, sizeof(par_buffer)); |
| 515 par_buffer.type = VAEncPackedHeaderPicture; |
| 516 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; |
| 517 |
| 518 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| 519 sizeof(par_buffer), |
| 520 &par_buffer)) |
| 521 return false; |
| 522 |
| 523 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| 524 packed_pps_.BytesInBuffer(), |
| 525 packed_pps_.data())) |
| 526 return false; |
| 527 |
| 528 return true; |
| 529 } |
| 530 |
| 531 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { |
| 532 DVLOGF(3) << "Encoding frame_num: " << current_pic_.frame_num; |
| 533 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( |
| 534 current_encode_job_->input_surface->id()); |
| 535 } |
| 536 |
| 537 bool VaapiVideoEncodeAccelerator::UploadFrame( |
| 538 const scoped_refptr<media::VideoFrame>& frame) { |
| 539 return vaapi_wrapper_->UploadVideoFrameToSurface( |
| 540 frame, current_encode_job_->input_surface->id()); |
| 541 } |
| 542 |
| 543 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { |
| 544 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 545 |
| 546 if (state_ != kEncoding) |
| 547 return; |
| 548 |
| 549 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) |
| 550 return; |
| 551 |
| 552 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); |
| 553 available_bitstream_buffers_.pop(); |
| 554 |
| 555 uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory()); |
| 556 |
| 557 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); |
| 558 submitted_encode_jobs_.pop(); |
| 559 |
| 560 size_t data_size = 0; |
| 561 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( |
| 562 encode_job->coded_buffer, |
| 563 encode_job->input_surface->id(), |
| 564 target_data, |
| 565 buffer->size, |
| 566 &data_size)) { |
| 567 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); |
| 568 return; |
| 569 } |
| 570 |
| 571 DVLOGF(3) << "Returning bitstream buffer " |
| 572 << (encode_job->keyframe ? "(keyframe)" : "") |
| 573 << " id: " << buffer->id << " size: " << data_size; |
| 574 |
| 575 child_message_loop_proxy_->PostTask(FROM_HERE, |
| 576 base::Bind(&Client::BitstreamBufferReady, |
| 577 client_, |
| 578 buffer->id, |
| 579 data_size, |
| 580 encode_job->keyframe)); |
| 581 } |
| 582 |
| 583 void VaapiVideoEncodeAccelerator::Encode( |
| 584 const scoped_refptr<media::VideoFrame>& frame, |
| 585 bool force_keyframe) { |
| 586 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() |
| 587 << " force_keyframe: " << force_keyframe; |
| 588 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 589 |
| 590 encoder_thread_proxy_->PostTask( |
| 591 FROM_HERE, |
| 592 base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, |
| 593 base::Unretained(this), |
| 594 frame, |
| 595 force_keyframe)); |
| 596 } |
| 597 |
| 598 bool VaapiVideoEncodeAccelerator::PrepareNextJob() { |
| 599 if (available_va_surface_ids_.size() < kMinSurfacesToEncode) |
| 600 return false; |
| 601 |
| 602 DCHECK(!current_encode_job_); |
| 603 current_encode_job_.reset(new EncodeJob()); |
| 604 |
| 605 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, |
| 606 ¤t_encode_job_->coded_buffer)) { |
| 607 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); |
| 608 return false; |
| 609 } |
| 610 |
| 611 current_encode_job_->input_surface = |
| 612 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| 613 available_va_surface_ids_.pop_back(); |
| 614 |
| 615 current_encode_job_->recon_surface = |
| 616 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| 617 available_va_surface_ids_.pop_back(); |
| 618 |
| 619 // Reference surfaces are needed until the job is done, but they get |
| 620 // removed from ref_pic_list0_ when it's full at the end of job submission. |
| 621 // Keep refs to them along with the job and only release after sync. |
| 622 current_encode_job_->reference_surfaces = ref_pic_list0_; |
| 623 |
| 624 return true; |
| 625 } |
| 626 |
| 627 void VaapiVideoEncodeAccelerator::EncodeTask( |
| 628 const scoped_refptr<media::VideoFrame>& frame, |
| 629 bool force_keyframe) { |
| 630 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 631 DCHECK_NE(state_, kUninitialized); |
| 632 |
| 633 encoder_input_queue_.push( |
| 634 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); |
| 635 EncodeFrameTask(); |
| 636 } |
| 637 |
| 638 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { |
| 639 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 640 |
| 641 if (state_ != kEncoding || encoder_input_queue_.empty()) |
| 642 return; |
| 643 |
| 644 if (!PrepareNextJob()) { |
| 645 DVLOGF(4) << "Not ready for next frame yet"; |
| 646 return; |
| 647 } |
| 648 |
| 649 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); |
| 650 encoder_input_queue_.pop(); |
| 651 |
| 652 if (!UploadFrame(frame_ref->frame)) { |
| 653 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); |
| 654 return; |
| 655 } |
| 656 |
| 657 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); |
| 658 encoding_parameters_changed_ = false; |
| 659 |
| 660 if (!SubmitFrameParameters()) { |
| 661 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); |
| 662 return; |
| 663 } |
| 664 |
| 665 if (!SubmitHeadersIfNeeded()) { |
| 666 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); |
| 667 return; |
| 668 } |
| 669 |
| 670 if (!ExecuteEncode()) { |
| 671 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); |
| 672 return; |
| 673 } |
| 674 |
| 675 EndFrame(); |
| 676 TryToReturnBitstreamBuffer(); |
| 677 } |
| 678 |
| 679 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
| 680 const media::BitstreamBuffer& buffer) { |
| 681 DVLOGF(4) << "id: " << buffer.id(); |
| 682 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 683 |
| 684 if (buffer.size() < output_buffer_byte_size_) { |
| 685 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); |
| 686 return; |
| 687 } |
| 688 |
| 689 scoped_ptr<base::SharedMemory> shm( |
| 690 new base::SharedMemory(buffer.handle(), false)); |
| 691 if (!shm->Map(buffer.size())) { |
| 692 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); |
| 693 return; |
| 694 } |
| 695 |
| 696 scoped_ptr<BitstreamBufferRef> buffer_ref( |
| 697 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); |
| 698 |
| 699 encoder_thread_proxy_->PostTask( |
| 700 FROM_HERE, |
| 701 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, |
| 702 base::Unretained(this), |
| 703 base::Passed(&buffer_ref))); |
| 704 } |
| 705 |
| 706 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( |
| 707 scoped_ptr<BitstreamBufferRef> buffer_ref) { |
| 708 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 709 DCHECK_NE(state_, kUninitialized); |
| 710 |
| 711 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); |
| 712 TryToReturnBitstreamBuffer(); |
| 713 } |
| 714 |
| 715 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( |
| 716 uint32 bitrate, |
| 717 uint32 framerate) { |
| 718 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| 719 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 720 |
| 721 encoder_thread_proxy_->PostTask( |
| 722 FROM_HERE, |
| 723 base::Bind( |
| 724 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, |
| 725 base::Unretained(this), |
| 726 bitrate, |
| 727 framerate)); |
| 728 } |
| 729 |
| 730 void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate, |
| 731 uint32 framerate) { |
| 732 if (encoder_thread_.IsRunning()) |
| 733 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 734 DCHECK_NE(bitrate, 0u); |
| 735 DCHECK_NE(framerate, 0u); |
| 736 bitrate_ = bitrate; |
| 737 framerate_ = framerate; |
| 738 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; |
| 739 } |
| 740 |
| 741 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( |
| 742 uint32 bitrate, |
| 743 uint32 framerate) { |
| 744 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| 745 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 746 DCHECK_NE(state_, kUninitialized); |
| 747 |
| 748 UpdateRates(bitrate, framerate); |
| 749 |
| 750 UpdateSPS(); |
| 751 GeneratePackedSPS(); |
| 752 |
| 753 // Submit new parameters along with next frame that will be processed. |
| 754 encoding_parameters_changed_ = true; |
| 755 } |
| 756 |
| 757 void VaapiVideoEncodeAccelerator::Destroy() { |
| 758 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 759 |
| 760 // Can't call client anymore after Destroy() returns. |
| 761 client_ptr_factory_.reset(); |
| 762 weak_this_ptr_factory_.InvalidateWeakPtrs(); |
| 763 |
| 764 // Early-exit encoder tasks if they are running and join the thread. |
| 765 if (encoder_thread_.IsRunning()) { |
| 766 encoder_thread_.message_loop()->PostTask( |
| 767 FROM_HERE, |
| 768 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, |
| 769 base::Unretained(this))); |
| 770 encoder_thread_.Stop(); |
| 771 } |
| 772 |
| 773 delete this; |
| 774 } |
| 775 |
| 776 void VaapiVideoEncodeAccelerator::DestroyTask() { |
| 777 DVLOGF(2); |
| 778 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| 779 SetState(kError); |
| 780 } |
| 781 |
| 782 void VaapiVideoEncodeAccelerator::UpdateSPS() { |
| 783 memset(¤t_sps_, 0, sizeof(media::H264SPS)); |
| 784 |
| 785 // Spec A.2 and A.3. |
| 786 switch (profile_) { |
| 787 case media::H264PROFILE_BASELINE: |
| 788 // Due to crbug.com/345569, we don't distinguish between constrained |
| 789 // and non-constrained baseline profiles. Since many codecs can't do |
| 790 // non-constrained, and constrained is usually what we mean (and it's a |
| 791 // subset of non-constrained), default to it. |
| 792 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; |
| 793 current_sps_.constraint_set0_flag = true; |
| 794 break; |
| 795 case media::H264PROFILE_MAIN: |
| 796 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain; |
| 797 current_sps_.constraint_set1_flag = true; |
| 798 break; |
| 799 case media::H264PROFILE_HIGH: |
| 800 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; |
| 801 break; |
| 802 default: |
| 803 NOTIMPLEMENTED(); |
| 804 return; |
| 805 } |
| 806 |
| 807 current_sps_.level_idc = kDefaultLevelIDC; |
| 808 current_sps_.seq_parameter_set_id = 0; |
| 809 current_sps_.chroma_format_idc = kChromaFormatIDC; |
| 810 |
| 811 DCHECK_GE(idr_period_, 1u << 4); |
| 812 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; |
| 813 current_sps_.pic_order_cnt_type = 0; |
| 814 current_sps_.log2_max_pic_order_cnt_lsb_minus4 = |
| 815 Log2OfPowerOf2(idr_period_ * 2) - 4; |
| 816 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_; |
| 817 |
| 818 current_sps_.frame_mbs_only_flag = true; |
| 819 |
| 820 DCHECK_GT(mb_width_, 0u); |
| 821 DCHECK_GT(mb_height_, 0u); |
| 822 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; |
| 823 DCHECK(current_sps_.frame_mbs_only_flag); |
| 824 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; |
| 825 |
| 826 if (visible_size_ != coded_size_) { |
| 827 // Visible size differs from coded size, fill crop information. |
| 828 current_sps_.frame_cropping_flag = true; |
| 829 DCHECK(!current_sps_.separate_colour_plane_flag); |
| 830 // Spec table 6-1. Only 4:2:0 for now. |
| 831 DCHECK_EQ(current_sps_.chroma_format_idc, 1); |
| 832 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. |
| 833 const unsigned int crop_unit_x = 2; |
| 834 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag); |
| 835 current_sps_.frame_crop_left_offset = 0; |
| 836 current_sps_.frame_crop_right_offset = |
| 837 (coded_size_.width() - visible_size_.width()) / crop_unit_x; |
| 838 current_sps_.frame_crop_top_offset = 0; |
| 839 current_sps_.frame_crop_bottom_offset = |
| 840 (coded_size_.height() - visible_size_.height()) / crop_unit_y; |
| 841 } |
| 842 |
| 843 current_sps_.vui_parameters_present_flag = true; |
| 844 current_sps_.timing_info_present_flag = true; |
| 845 current_sps_.num_units_in_tick = 1; |
| 846 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. |
| 847 current_sps_.fixed_frame_rate_flag = true; |
| 848 |
| 849 current_sps_.nal_hrd_parameters_present_flag = true; |
| 850 // H.264 spec ch. E.2.2. |
| 851 current_sps_.cpb_cnt_minus1 = 0; |
| 852 current_sps_.bit_rate_scale = kBitRateScale; |
| 853 current_sps_.cpb_size_scale = kCPBSizeScale; |
| 854 current_sps_.bit_rate_value_minus1[0] = |
| 855 (bitrate_ >> |
| 856 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; |
| 857 current_sps_.cpb_size_value_minus1[0] = |
| 858 (cpb_size_ >> |
| 859 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; |
| 860 current_sps_.cbr_flag[0] = true; |
| 861 current_sps_.initial_cpb_removal_delay_length_minus_1 = |
| 862 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| 863 current_sps_.cpb_removal_delay_length_minus1 = |
| 864 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| 865 current_sps_.dpb_output_delay_length_minus1 = |
| 866 media::H264SPS::kDefaultDPBOutputDelayLength - 1; |
| 867 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; |
| 868 current_sps_.low_delay_hrd_flag = false; |
| 869 } |
| 870 |
| 871 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { |
| 872 packed_sps_.Reset(); |
| 873 |
| 874 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); |
| 875 |
| 876 packed_sps_.AppendBits(8, current_sps_.profile_idc); |
| 877 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); |
| 878 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); |
| 879 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); |
| 880 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); |
| 881 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); |
| 882 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); |
| 883 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits |
| 884 packed_sps_.AppendBits(8, current_sps_.level_idc); |
| 885 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); |
| 886 |
| 887 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { |
| 888 packed_sps_.AppendUE(current_sps_.chroma_format_idc); |
| 889 if (current_sps_.chroma_format_idc == 3) |
| 890 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); |
| 891 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); |
| 892 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); |
| 893 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); |
| 894 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); |
| 895 CHECK(!current_sps_.seq_scaling_matrix_present_flag); |
| 896 } |
| 897 |
| 898 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4); |
| 899 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type); |
| 900 if (current_sps_.pic_order_cnt_type == 0) |
| 901 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4); |
| 902 else if (current_sps_.pic_order_cnt_type == 1) { |
| 903 CHECK(1); |
| 904 } |
| 905 |
| 906 packed_sps_.AppendUE(current_sps_.max_num_ref_frames); |
| 907 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag); |
| 908 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1); |
| 909 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1); |
| 910 |
| 911 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag); |
| 912 if (!current_sps_.frame_mbs_only_flag) |
| 913 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag); |
| 914 |
| 915 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag); |
| 916 |
| 917 packed_sps_.AppendBool(current_sps_.frame_cropping_flag); |
| 918 if (current_sps_.frame_cropping_flag) { |
| 919 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset); |
| 920 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset); |
| 921 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset); |
| 922 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset); |
| 923 } |
| 924 |
| 925 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag); |
| 926 if (current_sps_.vui_parameters_present_flag) { |
| 927 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag |
| 928 packed_sps_.AppendBool(false); // overscan_info_present_flag |
| 929 packed_sps_.AppendBool(false); // video_signal_type_present_flag |
| 930 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag |
| 931 |
| 932 packed_sps_.AppendBool(current_sps_.timing_info_present_flag); |
| 933 if (current_sps_.timing_info_present_flag) { |
| 934 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick); |
| 935 packed_sps_.AppendBits(32, current_sps_.time_scale); |
| 936 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag); |
| 937 } |
| 938 |
| 939 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag); |
| 940 if (current_sps_.nal_hrd_parameters_present_flag) { |
| 941 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1); |
| 942 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale); |
| 943 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale); |
| 944 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1), |
| 945 arraysize(current_sps_.bit_rate_value_minus1)); |
| 946 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) { |
| 947 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]); |
| 948 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]); |
| 949 packed_sps_.AppendBool(current_sps_.cbr_flag[i]); |
| 950 } |
| 951 packed_sps_.AppendBits( |
| 952 5, current_sps_.initial_cpb_removal_delay_length_minus_1); |
| 953 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1); |
| 954 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1); |
| 955 packed_sps_.AppendBits(5, current_sps_.time_offset_length); |
| 956 } |
| 957 |
| 958 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag |
| 959 if (current_sps_.nal_hrd_parameters_present_flag) |
| 960 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag); |
| 961 |
| 962 packed_sps_.AppendBool(false); // pic_struct_present_flag |
| 963 packed_sps_.AppendBool(false); // bitstream_restriction_flag |
| 964 } |
| 965 |
| 966 packed_sps_.FinishNALU(); |
| 967 } |
| 968 |
| 969 void VaapiVideoEncodeAccelerator::UpdatePPS() { |
| 970 memset(¤t_pps_, 0, sizeof(media::H264PPS)); |
| 971 |
| 972 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; |
| 973 current_pps_.pic_parameter_set_id = 0; |
| 974 |
| 975 current_pps_.entropy_coding_mode_flag = |
| 976 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; |
| 977 |
| 978 CHECK_GT(max_ref_idx_l0_size_, 0u); |
| 979 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; |
| 980 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; |
| 981 DCHECK_LE(qp_, 51u); |
| 982 current_pps_.pic_init_qp_minus26 = qp_ - 26; |
| 983 current_pps_.deblocking_filter_control_present_flag = true; |
| 984 current_pps_.transform_8x8_mode_flag = |
| 985 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); |
| 986 } |
| 987 |
| 988 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { |
| 989 packed_pps_.Reset(); |
| 990 |
| 991 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); |
| 992 |
| 993 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); |
| 994 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); |
| 995 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); |
| 996 packed_pps_.AppendBool( |
| 997 current_pps_.bottom_field_pic_order_in_frame_present_flag); |
| 998 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); |
| 999 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); |
| 1000 |
| 1001 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); |
| 1002 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1); |
| 1003 |
| 1004 packed_pps_.AppendBool(current_pps_.weighted_pred_flag); |
| 1005 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc); |
| 1006 |
| 1007 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26); |
| 1008 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26); |
| 1009 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset); |
| 1010 |
| 1011 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag); |
| 1012 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag); |
| 1013 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag); |
| 1014 |
| 1015 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag); |
| 1016 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag); |
| 1017 DCHECK(!current_pps_.pic_scaling_matrix_present_flag); |
| 1018 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset); |
| 1019 |
| 1020 packed_pps_.FinishNALU(); |
| 1021 } |
| 1022 |
| 1023 void VaapiVideoEncodeAccelerator::SetState(State state) { |
| 1024 // Only touch state on encoder thread, unless it's not running. |
| 1025 if (encoder_thread_.IsRunning() && |
| 1026 !encoder_thread_proxy_->BelongsToCurrentThread()) { |
| 1027 encoder_thread_proxy_->PostTask( |
| 1028 FROM_HERE, |
| 1029 base::Bind(&VaapiVideoEncodeAccelerator::SetState, |
| 1030 base::Unretained(this), |
| 1031 state)); |
| 1032 return; |
| 1033 } |
| 1034 |
| 1035 DVLOGF(1) << "setting state to: " << state; |
| 1036 state_ = state; |
| 1037 } |
| 1038 |
| 1039 void VaapiVideoEncodeAccelerator::NotifyError(Error error) { |
| 1040 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| 1041 child_message_loop_proxy_->PostTask( |
| 1042 FROM_HERE, |
| 1043 base::Bind( |
| 1044 &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error)); |
| 1045 return; |
| 1046 } |
| 1047 |
| 1048 if (client_) { |
| 1049 client_->NotifyError(error); |
| 1050 client_ptr_factory_.reset(); |
| 1051 } |
| 1052 } |
| 1053 |
| 1054 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() |
| 1055 : coded_buffer(VA_INVALID_ID), keyframe(false) { |
| 1056 } |
| 1057 |
| 1058 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() { |
| 1059 } |
| 1060 |
| 1061 } // namespace content |
OLD | NEW |