Chromium Code Reviews| Index: content/common/gpu/media/vaapi_video_encode_accelerator.cc |
| diff --git a/content/common/gpu/media/vaapi_video_encode_accelerator.cc b/content/common/gpu/media/vaapi_video_encode_accelerator.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..ebe32d45e6200873cb1aba55cd0e3e1c9fa6087a |
| --- /dev/null |
| +++ b/content/common/gpu/media/vaapi_video_encode_accelerator.cc |
| @@ -0,0 +1,1007 @@ |
| +// Copyright (c) 2014 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "base/bind.h" |
| +#include "base/callback.h" |
| +#include "base/message_loop/message_loop.h" |
|
wuchengli
2014/06/18 03:34:13
message_loop_proxy.h
Pawel Osciak
2014/06/18 07:05:08
Done.
|
| +#include "base/metrics/histogram.h" |
| +#include "base/numerics/safe_conversions.h" |
| +#include "cc/base/util.h" |
| +#include "content/common/gpu/media/h264_dpb.h" |
| +#include "content/common/gpu/media/vaapi_video_encode_accelerator.h" |
|
wuchengli
2014/06/18 03:34:12
This should be the first include. http://google-st
Pawel Osciak
2014/06/18 07:05:07
There was a discussion about this and iirc the con
|
| +#include "media/base/bind_to_current_loop.h" |
| +#include "third_party/libva/va/va_enc_h264.h" |
| + |
| +#define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " |
| + |
| +#define NOTIFY_ERROR(error, msg) \ |
| + do { \ |
| + SetState(kError); \ |
| + DVLOGF(1) << msg; \ |
| + DVLOGF(1) << "Calling NotifyError(" << error << ")"; \ |
| + NotifyError(error); \ |
| + } while (0) |
| + |
| +namespace content { |
| + |
| +static void ReportToUMA( |
| + VaapiVideoEncodeAccelerator::VAVEAEncoderFailure failure) { |
| + UMA_HISTOGRAM_ENUMERATION( |
| + "Media.VAVEA.EncoderFailure", |
| + failure, |
| + VaapiVideoEncodeAccelerator::VAVEA_ENCODER_FAILURES_MAX); |
| +} |
| + |
| +struct VaapiVideoEncodeAccelerator::InputFrameRef { |
| + InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, |
| + bool force_keyframe) |
| + : frame(frame), force_keyframe(force_keyframe) {} |
| + const scoped_refptr<media::VideoFrame> frame; |
| + const bool force_keyframe; |
| +}; |
| + |
| +struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { |
| + BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) |
| + : id(id), shm(shm.Pass()), size(size) {} |
| + const int32 id; |
| + const scoped_ptr<base::SharedMemory> shm; |
| + const size_t size; |
| +}; |
| + |
| +// static |
| +std::vector<media::VideoEncodeAccelerator::SupportedProfile> |
| +VaapiVideoEncodeAccelerator::GetSupportedProfiles() { |
| + std::vector<SupportedProfile> profiles; |
| + SupportedProfile profile; |
| + |
| + profile.profile = media::H264PROFILE_MAIN; |
| + profile.max_resolution.SetSize(1920, 1088); |
|
wuchengli
2014/06/18 03:34:12
define these as constants
Pawel Osciak
2014/06/18 07:05:07
I prefer to leave it here to emphasize that we nee
|
| + profile.max_framerate.numerator = kDefaultFramerate; |
| + profile.max_framerate.denominator = 1; |
| + profiles.push_back(profile); |
| + |
| + // This is actually only constrained (see crbug.com/345569). |
| + profile.profile = media::H264PROFILE_BASELINE; |
| + profiles.push_back(profile); |
| + |
| + profile.profile = media::H264PROFILE_HIGH; |
| + profiles.push_back(profile); |
| + |
| + return profiles; |
| +} |
| + |
| +static unsigned int Log2OfPowerOf2(unsigned int x) { |
| + CHECK_GT(x, 0); |
| + DCHECK_EQ(x & (x - 1), 0); |
| + |
| + int log = 0; |
| + while (x) { |
| + x >>= 1; |
| + ++log; |
| + } |
| + return log; |
| +} |
| + |
| +VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator(Display* x_display) |
| + : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), |
| + mb_width_(0), |
| + mb_height_(0), |
| + output_buffer_byte_size_(0), |
| + x_display_(x_display), |
| + state_(kUninitialized), |
| + frame_num_(0), |
| + last_idr_frame_num_(0), |
| + bitrate_(0), |
| + framerate_(0), |
| + cpb_size_(0), |
| + encoding_parameters_changed_(false), |
| + encoder_thread_("VAVEAEncoderThread"), |
| + child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| + weak_this_ptr_factory_(this) { |
| + weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); |
|
wuchengli
2014/06/18 03:34:13
Move all these to constructor initialization list.
Pawel Osciak
2014/06/18 07:05:08
This is intentional. GetWeakPtr() cannot be called
|
| + |
| + max_ref_idx_l0_size_ = kMaxNumReferenceFrames; |
|
wuchengli
2014/06/18 03:34:13
max_ref_idx_l0_size_ doesn't change. Just use kMax
Pawel Osciak
2014/06/18 07:05:08
That's not the same. Right now it's the same, but
|
| + qp_ = kDefaultQP; |
| + idr_period_ = kIDRPeriod; |
| + i_period_ = kIPeriod; |
| + ip_period_ = kIPPeriod; |
| +} |
| + |
| +VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { |
| + DVLOGF(4); |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!encoder_thread_.IsRunning()); |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::Initialize( |
| + media::VideoFrame::Format format, |
| + const gfx::Size& input_visible_size, |
| + media::VideoCodecProfile output_profile, |
| + uint32 initial_bitrate, |
| + Client* client) { |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!encoder_thread_.IsRunning()); |
| + DCHECK_EQ(state_, kUninitialized); |
| + |
| + DVLOG(3) << "Initializing VAVEA, input_format: " |
| + << media::VideoFrame::FormatToString(format) |
| + << ", input_visible_size: " << input_visible_size.ToString() |
| + << ", output_profile: " << output_profile |
| + << ", initial_bitrate: " << initial_bitrate; |
| + |
| + client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
| + client_ = client_ptr_factory_->GetWeakPtr(); |
| + |
| + if (output_profile < media::H264PROFILE_BASELINE || |
| + output_profile > media::H264PROFILE_MAIN) { |
| + DVLOGF(1) << "Unsupported output profile"; |
|
wuchengli
2014/06/18 03:34:13
print |output_profile|
Pawel Osciak
2014/06/18 07:05:07
Done.
|
| + return false; |
| + } |
| + |
| + if (format != media::VideoFrame::I420) { |
| + DVLOGF(1) << "Unsupported input format"; |
|
wuchengli
2014/06/18 03:34:13
print |format|
Pawel Osciak
2014/06/18 07:05:07
Done.
|
| + return false; |
| + } |
| + |
| + profile_ = output_profile; |
| + visible_size_ = input_visible_size; |
| + // 4:2:0 format has to be 2-aligned. |
| + DCHECK_EQ(visible_size_.width() % 2, 0); |
| + DCHECK_EQ(visible_size_.height() % 2, 0); |
| + mb_width_ = cc::RoundUp(visible_size_.width(), 16) / 16; |
| + mb_height_ = cc::RoundUp(visible_size_.height(), 16) / 16; |
| + coded_size_ = gfx::Size(mb_width_ * 16, mb_height_ * 16); |
|
wuchengli
2014/06/18 03:34:12
Switch the order of coded_size_ and mb_width_ so w
Pawel Osciak
2014/06/18 07:05:08
This is intentional. That would involve a cast in
|
| + output_buffer_byte_size_ = coded_size_.GetArea(); |
| + |
| + UpdateRates(initial_bitrate, kDefaultFramerate); |
| + |
| + vaapi_wrapper_ = VaapiWrapper::Create(VaapiWrapper::kEncode, |
| + output_profile, |
| + x_display_, |
| + base::Bind(&ReportToUMA, VAAPI_ERROR)); |
| + if (!vaapi_wrapper_.get()) { |
|
wuchengli
2014/06/18 03:34:13
People were removing get() of scoped_ptr boolean c
Pawel Osciak
2014/06/18 07:05:07
Done.
|
| + DVLOG(1) << "Failed initializing VAAPI"; |
| + return false; |
| + } |
| + |
| + if (!encoder_thread_.Start()) { |
| + DVLOGF(1) << "Failed to start encoder thread"; |
| + return false; |
|
wuchengli
2014/06/18 03:34:13
Move encoder_thread creation before vaapi_wrapper_
Pawel Osciak
2014/06/18 07:05:07
This is intentional and defensive so that I don't
wuchengli
2014/06/18 15:57:43
I was thinking about vaapi_wrapper cleanup. I just
|
| + } |
| + encoder_thread_proxy_ = encoder_thread_.message_loop_proxy(); |
| + |
| + // Finish the remaining initialization on the encoder thread. |
| + encoder_thread_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, |
| + base::Unretained(this))); |
| + |
| + return true; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::InitializeTask() { |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + DCHECK_EQ(state_, kUninitialized); |
| + |
| + va_surface_release_cb_ = media::BindToCurrentLoop( |
| + base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, weak_this_)); |
| + |
| + if (!vaapi_wrapper_->CreateSurfaces( |
| + coded_size_, kNumSurfaces, &available_va_surface_ids_)) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); |
| + return; |
| + } |
| + |
| + if (!UpdateSPS() || !GeneratePackedSPS()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed creating SPS"); |
| + return; |
| + } |
| + |
| + if (!UpdatePPS() || !GeneratePackedPPS()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed creating PPS"); |
| + return; |
| + } |
| + |
| + child_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&Client::RequireBitstreamBuffers, |
|
wuchengli
2014/06/18 03:34:12
Can this run at the end of VaapiVideoEncodeAcceler
Pawel Osciak
2014/06/18 07:05:07
No, we need to be ready to accept encode requests
wuchengli
2014/06/18 15:57:43
If we need to be ready to accept encode request af
Pawel Osciak
2014/06/19 01:31:11
Does not matter, because the encode task will be r
|
| + client_, |
| + kNumInputBuffers, |
| + coded_size_, |
| + output_buffer_byte_size_)); |
| + |
| + SetState(kEncoding); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( |
| + VASurfaceID va_surface_id) { |
| + DVLOGF(4) << "va_surface_id: " << va_surface_id; |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + |
| + available_va_surface_ids_.push_back(va_surface_id); |
| + EncodeFrameTask(); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { |
| + memset(&curr_pic_, 0, sizeof(curr_pic_)); |
| + |
| + curr_pic_.frame_num = frame_num_++; |
| + frame_num_ %= idr_period_; |
| + |
| + if (curr_pic_.frame_num % i_period_ == 0 || force_keyframe) |
|
wuchengli
2014/06/18 07:32:51
nit: add () around curr_pic_.frame_num % i_period_
|
| + curr_pic_.type = media::H264SliceHeader::kISlice; |
| + else |
| + curr_pic_.type = media::H264SliceHeader::kPSlice; |
| + |
| + if (curr_pic_.frame_num % idr_period_ == 0) { |
| + curr_pic_.idr = true; |
| + last_idr_frame_num_ = curr_pic_.frame_num; |
| + ref_pic_list0_.clear(); |
| + } |
| + |
| + if (curr_pic_.type != media::H264SliceHeader::kBSlice) |
| + curr_pic_.ref = true; |
| + |
| + curr_pic_.pic_order_cnt = curr_pic_.frame_num * 2; |
| + curr_pic_.top_field_order_cnt = curr_pic_.pic_order_cnt; |
| + curr_pic_.pic_order_cnt_lsb = curr_pic_.pic_order_cnt; |
| + |
| + curr_encode_job_->keyframe = |
| + (curr_pic_.type == media::H264SliceHeader::kISlice); |
| + |
| + DVLOG(4) << "Starting a new frame, type: " << curr_pic_.type |
| + << (force_keyframe ? " (forced keyframe)" : "") |
| + << " frame_num: " << curr_pic_.frame_num |
| + << " POC: " << curr_pic_.pic_order_cnt; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::EndFrame() { |
| + // Store the picture on the list of reference pictures and keep the list |
| + // below maximum size, dropping oldest references. |
| + if (curr_pic_.ref) |
| + ref_pic_list0_.push_front(curr_encode_job_->recon_surface); |
|
wuchengli
2014/06/18 07:32:51
reconstructed_surface is easier to understand
Pawel Osciak
2014/06/19 01:31:11
The comment in *.h should be enough. I don't think
|
| + size_t max_num_ref_frames = |
| + base::checked_cast<size_t>(curr_sps_.max_num_ref_frames); |
| + while (ref_pic_list0_.size() > max_num_ref_frames) |
| + ref_pic_list0_.pop_back(); |
| + |
| + submitted_encode_jobs_.push(make_linked_ptr(curr_encode_job_.release())); |
| +} |
| + |
| +static void InitVAPicture(VAPictureH264* va_pic) { |
| + memset(va_pic, 0, sizeof(*va_pic)); |
| + va_pic->picture_id = VA_INVALID_ID; |
| + va_pic->flags = VA_PICTURE_H264_INVALID; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { |
| + VAEncSequenceParameterBufferH264 seq_param; |
| + memset(&seq_param, 0, sizeof(seq_param)); |
| + |
| +#define SPS_TO_SP(a) seq_param.a = curr_sps_.a; |
| + SPS_TO_SP(seq_parameter_set_id); |
| + SPS_TO_SP(level_idc); |
| + |
| + seq_param.intra_period = i_period_; |
| + seq_param.intra_idr_period = idr_period_; |
| + seq_param.ip_period = ip_period_; |
| + seq_param.bits_per_second = bitrate_; |
| + |
| + SPS_TO_SP(max_num_ref_frames); |
| + seq_param.picture_width_in_mbs = mb_width_; |
| + seq_param.picture_height_in_mbs = mb_height_; |
| + |
| +#define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = curr_sps_.a; |
| + SPS_TO_SP_FS(chroma_format_idc); |
| + SPS_TO_SP_FS(frame_mbs_only_flag); |
| + SPS_TO_SP_FS(log2_max_frame_num_minus4); |
| + SPS_TO_SP_FS(pic_order_cnt_type); |
| + SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); |
| +#undef SPS_TO_SP_FS |
| + |
| + SPS_TO_SP(bit_depth_luma_minus8); |
| + SPS_TO_SP(bit_depth_chroma_minus8); |
| + |
| + SPS_TO_SP(frame_cropping_flag); |
| + if (curr_sps_.frame_cropping_flag) { |
| + SPS_TO_SP(frame_crop_left_offset); |
| + SPS_TO_SP(frame_crop_right_offset); |
| + SPS_TO_SP(frame_crop_top_offset); |
| + SPS_TO_SP(frame_crop_bottom_offset); |
| + } |
| + |
| + SPS_TO_SP(vui_parameters_present_flag); |
| +#define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = curr_sps_.a; |
| + SPS_TO_SP_VF(timing_info_present_flag); |
| +#undef SPS_TO_SP_VF |
| + SPS_TO_SP(num_units_in_tick); |
| + SPS_TO_SP(time_scale); |
| +#undef SPS_TO_SP |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, |
| + sizeof(seq_param), |
| + &seq_param)) |
| + return false; |
| + |
| + VAEncPictureParameterBufferH264 pic_param; |
| + memset(&pic_param, 0, sizeof(pic_param)); |
| + |
| + pic_param.CurrPic.picture_id = curr_encode_job_->recon_surface->id(); |
| + pic_param.CurrPic.TopFieldOrderCnt = curr_pic_.top_field_order_cnt; |
| + pic_param.CurrPic.BottomFieldOrderCnt = curr_pic_.bottom_field_order_cnt; |
| + pic_param.CurrPic.flags = 0; |
| + |
| + for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) |
| + InitVAPicture(&pic_param.ReferenceFrames[i]); |
| + |
| + DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); |
| + RefPicList::const_iterator iter = ref_pic_list0_.begin(); |
| + for (size_t i = 0; |
| + i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); |
| + ++iter, ++i) { |
| + pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); |
| + pic_param.ReferenceFrames[i].flags = 0; |
| + } |
| + |
| + pic_param.coded_buf = curr_encode_job_->coded_buffer; |
| + pic_param.pic_parameter_set_id = curr_pps_.pic_parameter_set_id; |
| + pic_param.seq_parameter_set_id = curr_pps_.seq_parameter_set_id; |
| + pic_param.frame_num = curr_pic_.frame_num; |
| + pic_param.pic_init_qp = qp_; |
| + pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; |
| + pic_param.pic_fields.bits.idr_pic_flag = curr_pic_.idr; |
| + pic_param.pic_fields.bits.reference_pic_flag = curr_pic_.ref; |
| +#define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = curr_pps_.a; |
| + PPS_TO_PP_PF(entropy_coding_mode_flag); |
| + PPS_TO_PP_PF(transform_8x8_mode_flag); |
| + PPS_TO_PP_PF(deblocking_filter_control_present_flag); |
| +#undef PPS_TO_PP_PF |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, |
| + sizeof(pic_param), |
| + &pic_param)) |
| + return false; |
| + |
| + VAEncSliceParameterBufferH264 slice_param; |
| + memset(&slice_param, 0, sizeof(slice_param)); |
| + |
| + slice_param.num_macroblocks = mb_width_ * mb_height_; |
| + slice_param.macroblock_info = VA_INVALID_ID; |
| + slice_param.slice_type = curr_pic_.type; |
| + slice_param.pic_parameter_set_id = curr_pps_.pic_parameter_set_id; |
| + slice_param.idr_pic_id = last_idr_frame_num_; |
| + slice_param.pic_order_cnt_lsb = curr_pic_.pic_order_cnt_lsb; |
| + slice_param.num_ref_idx_active_override_flag = true; |
| + |
| + for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) |
| + InitVAPicture(&slice_param.RefPicList0[i]); |
| + |
| + for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) |
| + InitVAPicture(&slice_param.RefPicList1[i]); |
| + |
| + DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); |
| + iter = ref_pic_list0_.begin(); |
| + for (size_t i = 0; |
| + i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); |
| + ++iter, ++i) { |
| + InitVAPicture(&slice_param.RefPicList0[i]); |
| + slice_param.RefPicList0[i].picture_id = (*iter)->id(); |
| + slice_param.RefPicList0[i].flags = 0; |
| + } |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, |
| + sizeof(slice_param), |
| + &slice_param)) |
| + return false; |
| + |
| + VAEncMiscParameterRateControl rate_control_param; |
| + memset(&rate_control_param, 0, sizeof(rate_control_param)); |
| + rate_control_param.bits_per_second = bitrate_; |
| + rate_control_param.target_percentage = 90; |
| + rate_control_param.window_size = kCPBWindowSizeMs; |
| + rate_control_param.initial_qp = qp_; |
| + rate_control_param.rc_flags.bits.disable_frame_skip = true; |
| + |
| + if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| + VAEncMiscParameterTypeRateControl, |
| + sizeof(rate_control_param), |
| + &rate_control_param)) |
| + return false; |
| + |
| + VAEncMiscParameterFrameRate framerate_param; |
| + memset(&framerate_param, 0, sizeof(framerate_param)); |
| + framerate_param.framerate = framerate_; |
| + if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
| + VAEncMiscParameterTypeFrameRate, |
| + sizeof(framerate_param), |
| + &framerate_param)) |
| + return false; |
| + |
| + VAEncMiscParameterHRD hrd_param; |
| + memset(&hrd_param, 0, sizeof(hrd_param)); |
| + hrd_param.buffer_size = cpb_size_; |
| + hrd_param.initial_buffer_fullness = cpb_size_ / 2; |
| + if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, |
| + sizeof(hrd_param), |
| + &hrd_param)) |
| + return false; |
| + |
| + return true; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { |
| + if (curr_pic_.type != media::H264SliceHeader::kISlice) |
| + return true; |
| + |
| + // Submit PPS. |
| + VAEncPackedHeaderParameterBuffer par_buffer; |
| + memset(&par_buffer, 0, sizeof(par_buffer)); |
| + par_buffer.type = VAEncPackedHeaderSequence; |
| + par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| + sizeof(par_buffer), |
| + &par_buffer)) |
| + return false; |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| + packed_sps_.BytesInBuffer(), |
| + packed_sps_.data())) |
| + return false; |
| + |
| + // Submit PPS. |
| + memset(&par_buffer, 0, sizeof(par_buffer)); |
| + par_buffer.type = VAEncPackedHeaderPicture; |
| + par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
| + sizeof(par_buffer), |
| + &par_buffer)) |
| + return false; |
| + |
| + if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, |
| + packed_pps_.BytesInBuffer(), |
| + packed_pps_.data())) |
| + return false; |
| + |
| + return true; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::ExecuteEncode() { |
| + DVLOGF(3) << "Encoding frame_num: " << curr_pic_.frame_num; |
| + return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( |
| + curr_encode_job_->input_surface->id()); |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::UploadFrame( |
| + const scoped_refptr<media::VideoFrame>& frame) { |
| + return vaapi_wrapper_->UploadVideoFrameToSurface( |
| + frame, curr_encode_job_->input_surface->id()); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + |
| + if (state_ != kEncoding) |
| + return; |
| + |
| + if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) |
| + return; |
| + |
| + linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); |
| + available_bitstream_buffers_.pop(); |
| + |
| + uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory()); |
| + |
| + linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); |
| + submitted_encode_jobs_.pop(); |
| + |
| + size_t data_size = 0; |
| + if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( |
| + encode_job->coded_buffer, |
| + encode_job->input_surface->id(), |
| + target_data, |
| + buffer->size, |
| + &data_size)) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); |
| + return; |
| + } |
| + |
| + DVLOG(3) << "Returning bitstream buffer " |
| + << (encode_job->keyframe ? "(keyframe)" : "") |
| + << " id: " << buffer->id << " size: " << data_size; |
| + |
| + child_message_loop_proxy_->PostTask(FROM_HERE, |
| + base::Bind(&Client::BitstreamBufferReady, |
| + client_, |
| + buffer->id, |
| + data_size, |
| + encode_job->keyframe)); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::Encode( |
| + const scoped_refptr<media::VideoFrame>& frame, |
| + bool force_keyframe) { |
| + DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() |
| + << " force_keyframe: " << force_keyframe; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + encoder_thread_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, |
| + base::Unretained(this), |
| + frame, |
| + force_keyframe)); |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::PrepareNextJob() { |
|
wuchengli
2014/06/18 03:34:13
Add DCHECK(encoder_thread_proxy_->BelongsToCurrent
Pawel Osciak
2014/06/18 07:05:07
The idea is not to do these on methods that are ca
|
| + if (available_va_surface_ids_.size() < kMinSurfacesToEncode) |
| + return false; |
| + |
| + DCHECK(!curr_encode_job_.get()); |
| + curr_encode_job_.reset(new EncodeJob()); |
| + |
| + if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, |
| + &curr_encode_job_->coded_buffer)) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); |
| + return false; |
| + } |
| + |
| + curr_encode_job_->input_surface = |
| + new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| + available_va_surface_ids_.pop_back(); |
| + |
| + curr_encode_job_->recon_surface = |
| + new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); |
| + available_va_surface_ids_.pop_back(); |
| + |
| + // Reference surfaces are needed until the job is done, but they get |
| + // removed from ref_pic_list0_ when it's full at the end of job submission. |
| + // Keep refs to them along with the job and only release after sync. |
| + RefPicList::const_iterator iter = ref_pic_list0_.begin(); |
| + for (; iter != ref_pic_list0_.end(); ++iter) |
| + curr_encode_job_->reference_surfaces.push_back(*iter); |
| + |
| + return true; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::EncodeTask( |
| + const scoped_refptr<media::VideoFrame>& frame, |
| + bool force_keyframe) { |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + DCHECK_NE(state_, kUninitialized); |
| + |
| + encoder_input_queue_.push( |
| + make_linked_ptr(new InputFrameRef(frame, force_keyframe))); |
| + EncodeFrameTask(); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::EncodeFrameTask() { |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + |
| + if (state_ != kEncoding || encoder_input_queue_.empty()) |
| + return; |
| + |
| + if (!PrepareNextJob()) { |
| + DVLOGF(4) << "Not ready for next frame yet"; |
| + return; |
| + } |
| + |
| + linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); |
| + encoder_input_queue_.pop(); |
| + |
| + if (!UploadFrame(frame_ref->frame)) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); |
| + return; |
| + } |
| + |
| + BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); |
| + encoding_parameters_changed_ = false; |
| + |
| + if (!SubmitFrameParameters()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); |
| + return; |
| + } |
| + |
| + if (!SubmitHeadersIfNeeded()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); |
| + return; |
| + } |
| + |
| + if (!ExecuteEncode()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); |
| + return; |
| + } |
| + |
| + EndFrame(); |
| + TryToReturnBitstreamBuffer(); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
| + const media::BitstreamBuffer& buffer) { |
| + DVLOGF(4) << "id: " << buffer.id(); |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + if (buffer.size() < output_buffer_byte_size_) { |
| + NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); |
| + return; |
| + } |
| + |
| + scoped_ptr<base::SharedMemory> shm( |
| + new base::SharedMemory(buffer.handle(), false)); |
| + if (!shm->Map(buffer.size())) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); |
| + return; |
| + } |
| + |
| + scoped_ptr<BitstreamBufferRef> buffer_ref( |
| + new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); |
| + |
| + encoder_thread_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, |
| + base::Unretained(this), |
| + base::Passed(&buffer_ref))); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( |
| + scoped_ptr<BitstreamBufferRef> buffer_ref) { |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + DCHECK_NE(state_, kUninitialized); |
| + |
| + available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); |
| + TryToReturnBitstreamBuffer(); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( |
| + uint32 bitrate, |
| + uint32 framerate) { |
| + DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + encoder_thread_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind( |
| + &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, |
| + base::Unretained(this), |
| + bitrate, |
| + framerate)); |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate, |
| + uint32 framerate) { |
| + if (encoder_thread_.IsRunning()) |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + DCHECK_NE(bitrate, 0); |
| + DCHECK_NE(framerate, 0); |
| + bitrate_ = base::checked_cast<unsigned int>(bitrate); |
| + framerate_ = base::checked_cast<unsigned int>(framerate); |
| + cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( |
| + uint32 bitrate, |
| + uint32 framerate) { |
| + DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + DCHECK_NE(state_, kUninitialized); |
| + |
| + UpdateRates(bitrate, framerate); |
| + |
| + if (!UpdateSPS() || !GeneratePackedSPS()) { |
| + NOTIFY_ERROR(kPlatformFailureError, "Failed creating SPS"); |
| + return; |
| + } |
| + |
| + // Submit new parameters along with next frame that will be processed. |
| + encoding_parameters_changed_ = true; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::Destroy() { |
| + DVLOGF(1); |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + // Can't call client anymore after Destroy() returns. |
| + client_ptr_factory_.reset(); |
| + |
| + // Early-exit encoder tasks if they are running and join the thread. |
| + if (encoder_thread_.IsRunning()) { |
| + encoder_thread_.message_loop()->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, |
| + base::Unretained(this))); |
| + encoder_thread_.Stop(); |
| + } |
|
wuchengli
2014/06/18 03:34:12
delete |this| here according to API.
Pawel Osciak
2014/06/18 07:05:08
Done.
|
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::DestroyTask() { |
| + DVLOGF(2); |
| + DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); |
| + SetState(kError); |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::UpdateSPS() { |
| + memset(&curr_sps_, 0, sizeof(media::H264SPS)); |
| + |
| + // Spec A.2 and A.3. |
| + switch (profile_) { |
| + case media::H264PROFILE_BASELINE: |
| + // Due to crbug.com/345569, we we don't distinguish between constrained |
|
wuchengli
2014/06/16 15:51:52
remove extra we
Pawel Osciak
2014/06/18 07:05:08
Done.
|
| + // and non-constrained baseline profiles. Since many codecs can't do |
| + // non-constrained, and constrained is usually what we mean (and it's a |
| + // subset of non-constrained), default to it. |
| + curr_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; |
| + curr_sps_.constraint_set0_flag = true; |
| + break; |
| + case media::H264PROFILE_MAIN: |
| + curr_sps_.profile_idc = media::H264SPS::kProfileIDCMain; |
| + curr_sps_.constraint_set1_flag = true; |
| + break; |
| + case media::H264PROFILE_HIGH: |
| + curr_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; |
| + break; |
| + default: |
| + NOTIMPLEMENTED(); |
| + return false; |
| + } |
| + |
| + curr_sps_.level_idc = kDefaultLevelIDC; |
| + curr_sps_.seq_parameter_set_id = 0; |
| + curr_sps_.chroma_format_idc = kChromaFormatIDC; |
| + |
| + DCHECK_GE(idr_period_, 1 << 4); |
| + curr_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; |
| + curr_sps_.pic_order_cnt_type = 0; |
| + curr_sps_.log2_max_pic_order_cnt_lsb_minus4 = |
| + Log2OfPowerOf2(idr_period_ * 2) - 4; |
| + curr_sps_.max_num_ref_frames = max_ref_idx_l0_size_; |
| + |
| + curr_sps_.frame_mbs_only_flag = true; |
| + |
| + DCHECK_GT(mb_width_, 0); |
| + DCHECK_GT(mb_height_, 0); |
| + curr_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; |
| + DCHECK(curr_sps_.frame_mbs_only_flag); |
| + curr_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; |
| + |
| + if (visible_size_ != coded_size_) { |
| + // Visible size differs from coded size, fill crop information. |
| + curr_sps_.frame_cropping_flag = true; |
| + DCHECK(!curr_sps_.separate_colour_plane_flag); |
| + // Spec table 6-1. Only 4:2:0 for now. |
| + DCHECK_EQ(curr_sps_.chroma_format_idc, 1); |
| + // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. |
| + const unsigned int crop_unit_x = 2; |
| + const unsigned int crop_unit_y = 2 * (2 - curr_sps_.frame_mbs_only_flag); |
| + curr_sps_.frame_crop_left_offset = 0; |
| + curr_sps_.frame_crop_right_offset = |
| + (coded_size_.width() - visible_size_.width()) / crop_unit_x; |
| + curr_sps_.frame_crop_top_offset = 0; |
| + curr_sps_.frame_crop_bottom_offset = |
| + (coded_size_.height() - visible_size_.height()) / crop_unit_y; |
| + } |
| + |
| + curr_sps_.vui_parameters_present_flag = true; |
| + curr_sps_.timing_info_present_flag = true; |
| + curr_sps_.num_units_in_tick = 1; |
| + curr_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. |
| + curr_sps_.fixed_frame_rate_flag = true; |
| + |
| + curr_sps_.nal_hrd_parameters_present_flag = true; |
| + // H.264 spec ch. E.2.2. |
| + curr_sps_.cpb_cnt_minus1 = 0; |
| + curr_sps_.bit_rate_scale = kBitRateScale; |
| + curr_sps_.cpb_size_scale = kCPBSizeScale; |
| + curr_sps_.bit_rate_value_minus1[0] = |
| + (bitrate_ >> |
| + (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; |
| + curr_sps_.cpb_size_value_minus1[0] = |
| + (cpb_size_ >> |
| + (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; |
| + curr_sps_.cbr_flag[0] = true; |
| + curr_sps_.initial_cpb_removal_delay_length_minus_1 = |
| + media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| + curr_sps_.cpb_removal_delay_length_minus1 = |
| + media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
| + curr_sps_.dpb_output_delay_length_minus1 = |
| + media::H264SPS::kDefaultDPBOutputDelayLength - 1; |
| + curr_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; |
| + curr_sps_.low_delay_hrd_flag = false; |
| + |
| + return true; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::GeneratePackedSPS() { |
|
wuchengli
2014/06/18 03:34:13
This always return true. Remove bool return value.
Pawel Osciak
2014/06/18 07:05:08
Done.
|
| + packed_sps_.Reset(); |
| + |
| + packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); |
| + |
| + packed_sps_.AppendBits(8, curr_sps_.profile_idc); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set0_flag); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set1_flag); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set2_flag); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set3_flag); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set4_flag); |
| + packed_sps_.AppendBool(curr_sps_.constraint_set5_flag); |
| + packed_sps_.AppendBits(2, 0); // reserved_zero_2bits |
| + packed_sps_.AppendBits(8, curr_sps_.level_idc); |
| + packed_sps_.AppendUE(curr_sps_.seq_parameter_set_id); |
| + |
| + if (curr_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { |
| + packed_sps_.AppendUE(curr_sps_.chroma_format_idc); |
| + if (curr_sps_.chroma_format_idc == 3) |
| + packed_sps_.AppendBool(curr_sps_.separate_colour_plane_flag); |
| + packed_sps_.AppendUE(curr_sps_.bit_depth_luma_minus8); |
| + packed_sps_.AppendUE(curr_sps_.bit_depth_chroma_minus8); |
| + packed_sps_.AppendBool(curr_sps_.qpprime_y_zero_transform_bypass_flag); |
| + packed_sps_.AppendBool(curr_sps_.seq_scaling_matrix_present_flag); |
| + CHECK(!curr_sps_.seq_scaling_matrix_present_flag); |
| + } |
| + |
| + packed_sps_.AppendUE(curr_sps_.log2_max_frame_num_minus4); |
| + packed_sps_.AppendUE(curr_sps_.pic_order_cnt_type); |
| + if (curr_sps_.pic_order_cnt_type == 0) |
| + packed_sps_.AppendUE(curr_sps_.log2_max_pic_order_cnt_lsb_minus4); |
| + else if (curr_sps_.pic_order_cnt_type == 1) { |
| + CHECK(1); |
| + } |
| + |
| + packed_sps_.AppendUE(curr_sps_.max_num_ref_frames); |
| + packed_sps_.AppendBool(curr_sps_.gaps_in_frame_num_value_allowed_flag); |
| + packed_sps_.AppendUE(curr_sps_.pic_width_in_mbs_minus1); |
| + packed_sps_.AppendUE(curr_sps_.pic_height_in_map_units_minus1); |
| + |
| + packed_sps_.AppendBool(curr_sps_.frame_mbs_only_flag); |
| + if (!curr_sps_.frame_mbs_only_flag) |
| + packed_sps_.AppendBool(curr_sps_.mb_adaptive_frame_field_flag); |
| + |
| + packed_sps_.AppendBool(curr_sps_.direct_8x8_inference_flag); |
| + |
| + packed_sps_.AppendBool(curr_sps_.frame_cropping_flag); |
| + if (curr_sps_.frame_cropping_flag) { |
| + packed_sps_.AppendUE(curr_sps_.frame_crop_left_offset); |
| + packed_sps_.AppendUE(curr_sps_.frame_crop_right_offset); |
| + packed_sps_.AppendUE(curr_sps_.frame_crop_top_offset); |
| + packed_sps_.AppendUE(curr_sps_.frame_crop_bottom_offset); |
| + } |
| + |
| + packed_sps_.AppendBool(curr_sps_.vui_parameters_present_flag); |
| + if (curr_sps_.vui_parameters_present_flag) { |
| + packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag |
| + packed_sps_.AppendBool(false); // overscan_info_present_flag |
| + packed_sps_.AppendBool(false); // video_signal_type_present_flag |
| + packed_sps_.AppendBool(false); // chroma_loc_info_present_flag |
| + |
| + packed_sps_.AppendBool(curr_sps_.timing_info_present_flag); |
| + if (curr_sps_.timing_info_present_flag) { |
| + packed_sps_.AppendBits(32, curr_sps_.num_units_in_tick); |
| + packed_sps_.AppendBits(32, curr_sps_.time_scale); |
| + packed_sps_.AppendBool(curr_sps_.fixed_frame_rate_flag); |
| + } |
| + |
| + packed_sps_.AppendBool(curr_sps_.nal_hrd_parameters_present_flag); |
| + if (curr_sps_.nal_hrd_parameters_present_flag) { |
| + packed_sps_.AppendUE(curr_sps_.cpb_cnt_minus1); |
| + packed_sps_.AppendBits(4, curr_sps_.bit_rate_scale); |
| + packed_sps_.AppendBits(4, curr_sps_.cpb_size_scale); |
| + CHECK_LT(base::checked_cast<size_t>(curr_sps_.cpb_cnt_minus1), |
| + arraysize(curr_sps_.bit_rate_value_minus1)); |
| + for (int i = 0; i <= curr_sps_.cpb_cnt_minus1; ++i) { |
| + packed_sps_.AppendUE(curr_sps_.bit_rate_value_minus1[i]); |
| + packed_sps_.AppendUE(curr_sps_.cpb_size_value_minus1[i]); |
| + packed_sps_.AppendBool(curr_sps_.cbr_flag[i]); |
| + } |
| + packed_sps_.AppendBits( |
| + 5, curr_sps_.initial_cpb_removal_delay_length_minus_1); |
| + packed_sps_.AppendBits(5, curr_sps_.cpb_removal_delay_length_minus1); |
| + packed_sps_.AppendBits(5, curr_sps_.dpb_output_delay_length_minus1); |
| + packed_sps_.AppendBits(5, curr_sps_.time_offset_length); |
| + } |
| + |
| + packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag |
| + if (curr_sps_.nal_hrd_parameters_present_flag) |
| + packed_sps_.AppendBool(curr_sps_.low_delay_hrd_flag); |
| + |
| + packed_sps_.AppendBool(false); // pic_struct_present_flag |
| + packed_sps_.AppendBool(false); // bitstream_restriction_flag |
| + } |
| + |
| + packed_sps_.FinishNALU(); |
| + return true; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::UpdatePPS() { |
| + memset(&curr_pps_, 0, sizeof(media::H264PPS)); |
| + |
| + curr_pps_.seq_parameter_set_id = curr_sps_.seq_parameter_set_id; |
| + curr_pps_.pic_parameter_set_id = 0; |
| + |
| + curr_pps_.entropy_coding_mode_flag = |
| + curr_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; |
| + |
| + CHECK_GT(max_ref_idx_l0_size_, 0); |
| + curr_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; |
| + curr_pps_.num_ref_idx_l1_default_active_minus1 = 0; |
| + DCHECK_LE(qp_, 51); |
| + curr_pps_.pic_init_qp_minus26 = qp_ - 26; |
| + curr_pps_.deblocking_filter_control_present_flag = true; |
| + curr_pps_.transform_8x8_mode_flag = |
| + (curr_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); |
| + |
| + return true; |
| +} |
| + |
| +bool VaapiVideoEncodeAccelerator::GeneratePackedPPS() { |
| + packed_pps_.Reset(); |
| + |
| + packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); |
| + |
| + packed_pps_.AppendUE(curr_pps_.pic_parameter_set_id); |
| + packed_pps_.AppendUE(curr_pps_.seq_parameter_set_id); |
| + packed_pps_.AppendBool(curr_pps_.entropy_coding_mode_flag); |
| + packed_pps_.AppendBool( |
| + curr_pps_.bottom_field_pic_order_in_frame_present_flag); |
| + CHECK_EQ(curr_pps_.num_slice_groups_minus1, 0); |
| + packed_pps_.AppendUE(curr_pps_.num_slice_groups_minus1); |
| + |
| + packed_pps_.AppendUE(curr_pps_.num_ref_idx_l0_default_active_minus1); |
| + packed_pps_.AppendUE(curr_pps_.num_ref_idx_l1_default_active_minus1); |
| + |
| + packed_pps_.AppendBool(curr_pps_.weighted_pred_flag); |
| + packed_pps_.AppendBits(2, curr_pps_.weighted_bipred_idc); |
| + |
| + packed_pps_.AppendSE(curr_pps_.pic_init_qp_minus26); |
| + packed_pps_.AppendSE(curr_pps_.pic_init_qs_minus26); |
| + packed_pps_.AppendSE(curr_pps_.chroma_qp_index_offset); |
| + |
| + packed_pps_.AppendBool(curr_pps_.deblocking_filter_control_present_flag); |
| + packed_pps_.AppendBool(curr_pps_.constrained_intra_pred_flag); |
| + packed_pps_.AppendBool(curr_pps_.redundant_pic_cnt_present_flag); |
| + |
| + packed_pps_.AppendBool(curr_pps_.transform_8x8_mode_flag); |
| + packed_pps_.AppendBool(curr_pps_.pic_scaling_matrix_present_flag); |
| + DCHECK(!curr_pps_.pic_scaling_matrix_present_flag); |
| + packed_pps_.AppendSE(curr_pps_.second_chroma_qp_index_offset); |
| + |
| + packed_pps_.FinishNALU(); |
| + |
| + return true; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::SetState(State state) { |
| + // Only touch state on encoder thread, unless it's not running. |
|
wuchengli
2014/06/18 03:34:13
This is alloying. Any way to get ride of this? I c
Pawel Osciak
2014/06/18 07:05:07
This is a very widely used pattern in Chrome. I do
|
| + if (encoder_thread_.IsRunning() && |
| + !encoder_thread_proxy_->BelongsToCurrentThread()) { |
| + encoder_thread_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&VaapiVideoEncodeAccelerator::SetState, |
| + base::Unretained(this), |
| + state)); |
| + return; |
| + } |
| + |
| + DVLOGF(1) << "setting state to: " << state; |
| + state_ = state; |
| +} |
| + |
| +void VaapiVideoEncodeAccelerator::NotifyError(Error error) { |
| + if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| + child_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind( |
| + &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error)); |
| + return; |
| + } |
| + |
| + if (client_) { |
| + client_->NotifyError(error); |
| + client_ptr_factory_.reset(); |
| + } |
| +} |
| + |
| +VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() |
| + : coded_buffer(VA_INVALID_ID), keyframe(false) { |
|
wuchengli
2014/06/18 03:34:12
initialize reference_surfaces?
Pawel Osciak
2014/06/18 07:05:07
Initialize with?
wuchengli
2014/06/18 15:57:43
I misread the code. Ignore this.
|
| +} |
| + |
| +} // namespace content |