OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/vaapi_video_encode_accelerator.h" | 5 #include "media/gpu/vaapi_video_encode_accelerator.h" |
6 | 6 |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include <memory> | 9 #include <memory> |
10 #include <utility> | 10 #include <utility> |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
84 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); | 84 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); |
85 return ((value + (alignment - 1)) & ~(alignment - 1)); | 85 return ((value + (alignment - 1)) & ~(alignment - 1)); |
86 } | 86 } |
87 | 87 |
88 static void ReportToUMA(VAVEAEncoderFailure failure) { | 88 static void ReportToUMA(VAVEAEncoderFailure failure) { |
89 UMA_HISTOGRAM_ENUMERATION("Media.VAVEA.EncoderFailure", failure, | 89 UMA_HISTOGRAM_ENUMERATION("Media.VAVEA.EncoderFailure", failure, |
90 VAVEA_ENCODER_FAILURES_MAX + 1); | 90 VAVEA_ENCODER_FAILURES_MAX + 1); |
91 } | 91 } |
92 | 92 |
93 struct VaapiVideoEncodeAccelerator::InputFrameRef { | 93 struct VaapiVideoEncodeAccelerator::InputFrameRef { |
94 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, | 94 InputFrameRef(const scoped_refptr<VideoFrame>& frame, bool force_keyframe) |
95 bool force_keyframe) | |
96 : frame(frame), force_keyframe(force_keyframe) {} | 95 : frame(frame), force_keyframe(force_keyframe) {} |
97 const scoped_refptr<media::VideoFrame> frame; | 96 const scoped_refptr<VideoFrame> frame; |
98 const bool force_keyframe; | 97 const bool force_keyframe; |
99 }; | 98 }; |
100 | 99 |
101 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { | 100 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { |
102 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm) | 101 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm) |
103 : id(id), shm(std::move(shm)) {} | 102 : id(id), shm(std::move(shm)) {} |
104 const int32_t id; | 103 const int32_t id; |
105 const std::unique_ptr<SharedMemoryRegion> shm; | 104 const std::unique_ptr<SharedMemoryRegion> shm; |
106 }; | 105 }; |
107 | 106 |
108 media::VideoEncodeAccelerator::SupportedProfiles | 107 VideoEncodeAccelerator::SupportedProfiles |
109 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { | 108 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { |
110 return VaapiWrapper::GetSupportedEncodeProfiles(); | 109 return VaapiWrapper::GetSupportedEncodeProfiles(); |
111 } | 110 } |
112 | 111 |
113 static unsigned int Log2OfPowerOf2(unsigned int x) { | 112 static unsigned int Log2OfPowerOf2(unsigned int x) { |
114 CHECK_GT(x, 0u); | 113 CHECK_GT(x, 0u); |
115 DCHECK_EQ(x & (x - 1), 0u); | 114 DCHECK_EQ(x & (x - 1), 0u); |
116 | 115 |
117 int log = 0; | 116 int log = 0; |
118 while (x > 1) { | 117 while (x > 1) { |
119 x >>= 1; | 118 x >>= 1; |
120 ++log; | 119 ++log; |
121 } | 120 } |
122 return log; | 121 return log; |
123 } | 122 } |
124 | 123 |
125 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator() | 124 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator() |
126 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 125 : profile_(VIDEO_CODEC_PROFILE_UNKNOWN), |
127 mb_width_(0), | 126 mb_width_(0), |
128 mb_height_(0), | 127 mb_height_(0), |
129 output_buffer_byte_size_(0), | 128 output_buffer_byte_size_(0), |
130 state_(kUninitialized), | 129 state_(kUninitialized), |
131 frame_num_(0), | 130 frame_num_(0), |
132 idr_pic_id_(0), | 131 idr_pic_id_(0), |
133 bitrate_(0), | 132 bitrate_(0), |
134 framerate_(0), | 133 framerate_(0), |
135 cpb_size_(0), | 134 cpb_size_(0), |
136 encoding_parameters_changed_(false), | 135 encoding_parameters_changed_(false), |
(...skipping 10 matching lines...) Expand all Loading... |
147 ip_period_ = kIPPeriod; | 146 ip_period_ = kIPPeriod; |
148 } | 147 } |
149 | 148 |
150 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { | 149 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { |
151 DVLOGF(4); | 150 DVLOGF(4); |
152 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 151 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
153 DCHECK(!encoder_thread_.IsRunning()); | 152 DCHECK(!encoder_thread_.IsRunning()); |
154 } | 153 } |
155 | 154 |
156 bool VaapiVideoEncodeAccelerator::Initialize( | 155 bool VaapiVideoEncodeAccelerator::Initialize( |
157 media::VideoPixelFormat format, | 156 VideoPixelFormat format, |
158 const gfx::Size& input_visible_size, | 157 const gfx::Size& input_visible_size, |
159 media::VideoCodecProfile output_profile, | 158 VideoCodecProfile output_profile, |
160 uint32_t initial_bitrate, | 159 uint32_t initial_bitrate, |
161 Client* client) { | 160 Client* client) { |
162 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 161 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
163 DCHECK(!encoder_thread_.IsRunning()); | 162 DCHECK(!encoder_thread_.IsRunning()); |
164 DCHECK_EQ(state_, kUninitialized); | 163 DCHECK_EQ(state_, kUninitialized); |
165 | 164 |
166 DVLOGF(1) << "Initializing VAVEA, input_format: " | 165 DVLOGF(1) << "Initializing VAVEA, input_format: " |
167 << media::VideoPixelFormatToString(format) | 166 << VideoPixelFormatToString(format) |
168 << ", input_visible_size: " << input_visible_size.ToString() | 167 << ", input_visible_size: " << input_visible_size.ToString() |
169 << ", output_profile: " << output_profile | 168 << ", output_profile: " << output_profile |
170 << ", initial_bitrate: " << initial_bitrate; | 169 << ", initial_bitrate: " << initial_bitrate; |
171 | 170 |
172 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | 171 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
173 client_ = client_ptr_factory_->GetWeakPtr(); | 172 client_ = client_ptr_factory_->GetWeakPtr(); |
174 | 173 |
175 const SupportedProfiles& profiles = GetSupportedProfiles(); | 174 const SupportedProfiles& profiles = GetSupportedProfiles(); |
176 auto profile = find_if(profiles.begin(), profiles.end(), | 175 auto profile = find_if(profiles.begin(), profiles.end(), |
177 [output_profile](const SupportedProfile& profile) { | 176 [output_profile](const SupportedProfile& profile) { |
178 return profile.profile == output_profile; | 177 return profile.profile == output_profile; |
179 }); | 178 }); |
180 if (profile == profiles.end()) { | 179 if (profile == profiles.end()) { |
181 DVLOGF(1) << "Unsupported output profile " << output_profile; | 180 DVLOGF(1) << "Unsupported output profile " << output_profile; |
182 return false; | 181 return false; |
183 } | 182 } |
184 if (input_visible_size.width() > profile->max_resolution.width() || | 183 if (input_visible_size.width() > profile->max_resolution.width() || |
185 input_visible_size.height() > profile->max_resolution.height()) { | 184 input_visible_size.height() > profile->max_resolution.height()) { |
186 DVLOGF(1) << "Input size too big: " << input_visible_size.ToString() | 185 DVLOGF(1) << "Input size too big: " << input_visible_size.ToString() |
187 << ", max supported size: " << profile->max_resolution.ToString(); | 186 << ", max supported size: " << profile->max_resolution.ToString(); |
188 return false; | 187 return false; |
189 } | 188 } |
190 | 189 |
191 if (format != media::PIXEL_FORMAT_I420) { | 190 if (format != PIXEL_FORMAT_I420) { |
192 DVLOGF(1) << "Unsupported input format: " | 191 DVLOGF(1) << "Unsupported input format: " |
193 << media::VideoPixelFormatToString(format); | 192 << VideoPixelFormatToString(format); |
194 return false; | 193 return false; |
195 } | 194 } |
196 | 195 |
197 profile_ = output_profile; | 196 profile_ = output_profile; |
198 visible_size_ = input_visible_size; | 197 visible_size_ = input_visible_size; |
199 // 4:2:0 format has to be 2-aligned. | 198 // 4:2:0 format has to be 2-aligned. |
200 DCHECK_EQ(visible_size_.width() % 2, 0); | 199 DCHECK_EQ(visible_size_.width() % 2, 0); |
201 DCHECK_EQ(visible_size_.height() % 2, 0); | 200 DCHECK_EQ(visible_size_.height() % 2, 0); |
202 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), | 201 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), |
203 RoundUpToPowerOf2(visible_size_.height(), 16)); | 202 RoundUpToPowerOf2(visible_size_.height(), 16)); |
(...skipping 23 matching lines...) Expand all Loading... |
227 base::Unretained(this))); | 226 base::Unretained(this))); |
228 | 227 |
229 return true; | 228 return true; |
230 } | 229 } |
231 | 230 |
232 void VaapiVideoEncodeAccelerator::InitializeTask() { | 231 void VaapiVideoEncodeAccelerator::InitializeTask() { |
233 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 232 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
234 DCHECK_EQ(state_, kUninitialized); | 233 DCHECK_EQ(state_, kUninitialized); |
235 DVLOGF(4); | 234 DVLOGF(4); |
236 | 235 |
237 va_surface_release_cb_ = media::BindToCurrentLoop( | 236 va_surface_release_cb_ = BindToCurrentLoop( |
238 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, | 237 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, |
239 base::Unretained(this))); | 238 base::Unretained(this))); |
240 | 239 |
241 if (!vaapi_wrapper_->CreateSurfaces(VA_RT_FORMAT_YUV420, coded_size_, | 240 if (!vaapi_wrapper_->CreateSurfaces(VA_RT_FORMAT_YUV420, coded_size_, |
242 kNumSurfaces, | 241 kNumSurfaces, |
243 &available_va_surface_ids_)) { | 242 &available_va_surface_ids_)) { |
244 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); | 243 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); |
245 return; | 244 return; |
246 } | 245 } |
247 | 246 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
279 frame_num_ %= idr_period_; | 278 frame_num_ %= idr_period_; |
280 | 279 |
281 if (current_pic_->frame_num == 0) { | 280 if (current_pic_->frame_num == 0) { |
282 current_pic_->idr = true; | 281 current_pic_->idr = true; |
283 // H264 spec mandates idr_pic_id to differ between two consecutive IDRs. | 282 // H264 spec mandates idr_pic_id to differ between two consecutive IDRs. |
284 idr_pic_id_ ^= 1; | 283 idr_pic_id_ ^= 1; |
285 ref_pic_list0_.clear(); | 284 ref_pic_list0_.clear(); |
286 } | 285 } |
287 | 286 |
288 if (current_pic_->frame_num % i_period_ == 0) | 287 if (current_pic_->frame_num % i_period_ == 0) |
289 current_pic_->type = media::H264SliceHeader::kISlice; | 288 current_pic_->type = H264SliceHeader::kISlice; |
290 else | 289 else |
291 current_pic_->type = media::H264SliceHeader::kPSlice; | 290 current_pic_->type = H264SliceHeader::kPSlice; |
292 | 291 |
293 if (current_pic_->type != media::H264SliceHeader::kBSlice) | 292 if (current_pic_->type != H264SliceHeader::kBSlice) |
294 current_pic_->ref = true; | 293 current_pic_->ref = true; |
295 | 294 |
296 current_pic_->pic_order_cnt = current_pic_->frame_num * 2; | 295 current_pic_->pic_order_cnt = current_pic_->frame_num * 2; |
297 current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt; | 296 current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt; |
298 current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt; | 297 current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt; |
299 | 298 |
300 current_encode_job_->keyframe = current_pic_->idr; | 299 current_encode_job_->keyframe = current_pic_->idr; |
301 | 300 |
302 DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type | 301 DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type |
303 << (force_keyframe ? " (forced keyframe)" : "") | 302 << (force_keyframe ? " (forced keyframe)" : "") |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 hrd_param.initial_buffer_fullness = cpb_size_ / 2; | 469 hrd_param.initial_buffer_fullness = cpb_size_ / 2; |
471 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | 470 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( |
472 VAEncMiscParameterTypeHRD, sizeof(hrd_param), &hrd_param)) | 471 VAEncMiscParameterTypeHRD, sizeof(hrd_param), &hrd_param)) |
473 return false; | 472 return false; |
474 | 473 |
475 return true; | 474 return true; |
476 } | 475 } |
477 | 476 |
478 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { | 477 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { |
479 DCHECK(current_pic_); | 478 DCHECK(current_pic_); |
480 if (current_pic_->type != media::H264SliceHeader::kISlice) | 479 if (current_pic_->type != H264SliceHeader::kISlice) |
481 return true; | 480 return true; |
482 | 481 |
483 // Submit PPS. | 482 // Submit PPS. |
484 VAEncPackedHeaderParameterBuffer par_buffer; | 483 VAEncPackedHeaderParameterBuffer par_buffer; |
485 memset(&par_buffer, 0, sizeof(par_buffer)); | 484 memset(&par_buffer, 0, sizeof(par_buffer)); |
486 par_buffer.type = VAEncPackedHeaderSequence; | 485 par_buffer.type = VAEncPackedHeaderSequence; |
487 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; | 486 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; |
488 | 487 |
489 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | 488 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, |
490 sizeof(par_buffer), &par_buffer)) | 489 sizeof(par_buffer), &par_buffer)) |
(...skipping 22 matching lines...) Expand all Loading... |
513 } | 512 } |
514 | 513 |
515 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { | 514 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { |
516 DCHECK(current_pic_); | 515 DCHECK(current_pic_); |
517 DVLOGF(3) << "Encoding frame_num: " << current_pic_->frame_num; | 516 DVLOGF(3) << "Encoding frame_num: " << current_pic_->frame_num; |
518 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | 517 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( |
519 current_encode_job_->input_surface->id()); | 518 current_encode_job_->input_surface->id()); |
520 } | 519 } |
521 | 520 |
522 bool VaapiVideoEncodeAccelerator::UploadFrame( | 521 bool VaapiVideoEncodeAccelerator::UploadFrame( |
523 const scoped_refptr<media::VideoFrame>& frame) { | 522 const scoped_refptr<VideoFrame>& frame) { |
524 return vaapi_wrapper_->UploadVideoFrameToSurface( | 523 return vaapi_wrapper_->UploadVideoFrameToSurface( |
525 frame, current_encode_job_->input_surface->id()); | 524 frame, current_encode_job_->input_surface->id()); |
526 } | 525 } |
527 | 526 |
528 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { | 527 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { |
529 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 528 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
530 | 529 |
531 if (state_ != kEncoding) | 530 if (state_ != kEncoding) |
532 return; | 531 return; |
533 | 532 |
(...skipping 19 matching lines...) Expand all Loading... |
553 DVLOGF(3) << "Returning bitstream buffer " | 552 DVLOGF(3) << "Returning bitstream buffer " |
554 << (encode_job->keyframe ? "(keyframe)" : "") | 553 << (encode_job->keyframe ? "(keyframe)" : "") |
555 << " id: " << buffer->id << " size: " << data_size; | 554 << " id: " << buffer->id << " size: " << data_size; |
556 | 555 |
557 child_task_runner_->PostTask( | 556 child_task_runner_->PostTask( |
558 FROM_HERE, | 557 FROM_HERE, |
559 base::Bind(&Client::BitstreamBufferReady, client_, buffer->id, data_size, | 558 base::Bind(&Client::BitstreamBufferReady, client_, buffer->id, data_size, |
560 encode_job->keyframe, encode_job->timestamp)); | 559 encode_job->keyframe, encode_job->timestamp)); |
561 } | 560 } |
562 | 561 |
563 void VaapiVideoEncodeAccelerator::Encode( | 562 void VaapiVideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame, |
564 const scoped_refptr<media::VideoFrame>& frame, | 563 bool force_keyframe) { |
565 bool force_keyframe) { | |
566 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() | 564 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() |
567 << " force_keyframe: " << force_keyframe; | 565 << " force_keyframe: " << force_keyframe; |
568 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 566 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
569 | 567 |
570 encoder_thread_task_runner_->PostTask( | 568 encoder_thread_task_runner_->PostTask( |
571 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, | 569 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, |
572 base::Unretained(this), frame, force_keyframe)); | 570 base::Unretained(this), frame, force_keyframe)); |
573 } | 571 } |
574 | 572 |
575 bool VaapiVideoEncodeAccelerator::PrepareNextJob(base::TimeDelta timestamp) { | 573 bool VaapiVideoEncodeAccelerator::PrepareNextJob(base::TimeDelta timestamp) { |
(...skipping 23 matching lines...) Expand all Loading... |
599 | 597 |
600 // Reference surfaces are needed until the job is done, but they get | 598 // Reference surfaces are needed until the job is done, but they get |
601 // removed from ref_pic_list0_ when it's full at the end of job submission. | 599 // removed from ref_pic_list0_ when it's full at the end of job submission. |
602 // Keep refs to them along with the job and only release after sync. | 600 // Keep refs to them along with the job and only release after sync. |
603 current_encode_job_->reference_surfaces = ref_pic_list0_; | 601 current_encode_job_->reference_surfaces = ref_pic_list0_; |
604 | 602 |
605 return true; | 603 return true; |
606 } | 604 } |
607 | 605 |
608 void VaapiVideoEncodeAccelerator::EncodeTask( | 606 void VaapiVideoEncodeAccelerator::EncodeTask( |
609 const scoped_refptr<media::VideoFrame>& frame, | 607 const scoped_refptr<VideoFrame>& frame, |
610 bool force_keyframe) { | 608 bool force_keyframe) { |
611 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 609 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
612 DCHECK_NE(state_, kUninitialized); | 610 DCHECK_NE(state_, kUninitialized); |
613 | 611 |
614 encoder_input_queue_.push( | 612 encoder_input_queue_.push( |
615 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); | 613 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); |
616 EncodeFrameTask(); | 614 EncodeFrameTask(); |
617 } | 615 } |
618 | 616 |
619 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { | 617 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
651 if (!ExecuteEncode()) { | 649 if (!ExecuteEncode()) { |
652 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); | 650 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); |
653 return; | 651 return; |
654 } | 652 } |
655 | 653 |
656 EndFrame(); | 654 EndFrame(); |
657 TryToReturnBitstreamBuffer(); | 655 TryToReturnBitstreamBuffer(); |
658 } | 656 } |
659 | 657 |
660 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( | 658 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
661 const media::BitstreamBuffer& buffer) { | 659 const BitstreamBuffer& buffer) { |
662 DVLOGF(4) << "id: " << buffer.id(); | 660 DVLOGF(4) << "id: " << buffer.id(); |
663 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 661 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
664 | 662 |
665 if (buffer.size() < output_buffer_byte_size_) { | 663 if (buffer.size() < output_buffer_byte_size_) { |
666 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); | 664 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); |
667 return; | 665 return; |
668 } | 666 } |
669 | 667 |
670 std::unique_ptr<SharedMemoryRegion> shm( | 668 std::unique_ptr<SharedMemoryRegion> shm( |
671 new SharedMemoryRegion(buffer, false)); | 669 new SharedMemoryRegion(buffer, false)); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
763 delete this; | 761 delete this; |
764 } | 762 } |
765 | 763 |
766 void VaapiVideoEncodeAccelerator::DestroyTask() { | 764 void VaapiVideoEncodeAccelerator::DestroyTask() { |
767 DVLOGF(2); | 765 DVLOGF(2); |
768 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 766 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
769 SetState(kError); | 767 SetState(kError); |
770 } | 768 } |
771 | 769 |
772 void VaapiVideoEncodeAccelerator::UpdateSPS() { | 770 void VaapiVideoEncodeAccelerator::UpdateSPS() { |
773 memset(¤t_sps_, 0, sizeof(media::H264SPS)); | 771 memset(¤t_sps_, 0, sizeof(H264SPS)); |
774 | 772 |
775 // Spec A.2 and A.3. | 773 // Spec A.2 and A.3. |
776 switch (profile_) { | 774 switch (profile_) { |
777 case media::H264PROFILE_BASELINE: | 775 case H264PROFILE_BASELINE: |
778 // Due to crbug.com/345569, we don't distinguish between constrained | 776 // Due to crbug.com/345569, we don't distinguish between constrained |
779 // and non-constrained baseline profiles. Since many codecs can't do | 777 // and non-constrained baseline profiles. Since many codecs can't do |
780 // non-constrained, and constrained is usually what we mean (and it's a | 778 // non-constrained, and constrained is usually what we mean (and it's a |
781 // subset of non-constrained), default to it. | 779 // subset of non-constrained), default to it. |
782 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; | 780 current_sps_.profile_idc = H264SPS::kProfileIDCBaseline; |
783 current_sps_.constraint_set0_flag = true; | 781 current_sps_.constraint_set0_flag = true; |
784 break; | 782 break; |
785 case media::H264PROFILE_MAIN: | 783 case H264PROFILE_MAIN: |
786 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain; | 784 current_sps_.profile_idc = H264SPS::kProfileIDCMain; |
787 current_sps_.constraint_set1_flag = true; | 785 current_sps_.constraint_set1_flag = true; |
788 break; | 786 break; |
789 case media::H264PROFILE_HIGH: | 787 case H264PROFILE_HIGH: |
790 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; | 788 current_sps_.profile_idc = H264SPS::kProfileIDCHigh; |
791 break; | 789 break; |
792 default: | 790 default: |
793 NOTIMPLEMENTED(); | 791 NOTIMPLEMENTED(); |
794 return; | 792 return; |
795 } | 793 } |
796 | 794 |
797 current_sps_.level_idc = kDefaultLevelIDC; | 795 current_sps_.level_idc = kDefaultLevelIDC; |
798 current_sps_.seq_parameter_set_id = 0; | 796 current_sps_.seq_parameter_set_id = 0; |
799 current_sps_.chroma_format_idc = kChromaFormatIDC; | 797 current_sps_.chroma_format_idc = kChromaFormatIDC; |
800 | 798 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
835 current_sps_.num_units_in_tick = 1; | 833 current_sps_.num_units_in_tick = 1; |
836 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. | 834 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. |
837 current_sps_.fixed_frame_rate_flag = true; | 835 current_sps_.fixed_frame_rate_flag = true; |
838 | 836 |
839 current_sps_.nal_hrd_parameters_present_flag = true; | 837 current_sps_.nal_hrd_parameters_present_flag = true; |
840 // H.264 spec ch. E.2.2. | 838 // H.264 spec ch. E.2.2. |
841 current_sps_.cpb_cnt_minus1 = 0; | 839 current_sps_.cpb_cnt_minus1 = 0; |
842 current_sps_.bit_rate_scale = kBitRateScale; | 840 current_sps_.bit_rate_scale = kBitRateScale; |
843 current_sps_.cpb_size_scale = kCPBSizeScale; | 841 current_sps_.cpb_size_scale = kCPBSizeScale; |
844 current_sps_.bit_rate_value_minus1[0] = | 842 current_sps_.bit_rate_value_minus1[0] = |
845 (bitrate_ >> | 843 (bitrate_ >> (kBitRateScale + H264SPS::kBitRateScaleConstantTerm)) - 1; |
846 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - | |
847 1; | |
848 current_sps_.cpb_size_value_minus1[0] = | 844 current_sps_.cpb_size_value_minus1[0] = |
849 (cpb_size_ >> | 845 (cpb_size_ >> (kCPBSizeScale + H264SPS::kCPBSizeScaleConstantTerm)) - 1; |
850 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - | |
851 1; | |
852 current_sps_.cbr_flag[0] = true; | 846 current_sps_.cbr_flag[0] = true; |
853 current_sps_.initial_cpb_removal_delay_length_minus_1 = | 847 current_sps_.initial_cpb_removal_delay_length_minus_1 = |
854 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | 848 H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
855 current_sps_.cpb_removal_delay_length_minus1 = | 849 current_sps_.cpb_removal_delay_length_minus1 = |
856 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | 850 H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; |
857 current_sps_.dpb_output_delay_length_minus1 = | 851 current_sps_.dpb_output_delay_length_minus1 = |
858 media::H264SPS::kDefaultDPBOutputDelayLength - 1; | 852 H264SPS::kDefaultDPBOutputDelayLength - 1; |
859 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; | 853 current_sps_.time_offset_length = H264SPS::kDefaultTimeOffsetLength; |
860 current_sps_.low_delay_hrd_flag = false; | 854 current_sps_.low_delay_hrd_flag = false; |
861 } | 855 } |
862 | 856 |
863 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { | 857 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { |
864 packed_sps_.Reset(); | 858 packed_sps_.Reset(); |
865 | 859 |
866 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); | 860 packed_sps_.BeginNALU(H264NALU::kSPS, 3); |
867 | 861 |
868 packed_sps_.AppendBits(8, current_sps_.profile_idc); | 862 packed_sps_.AppendBits(8, current_sps_.profile_idc); |
869 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); | 863 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); |
870 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); | 864 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); |
871 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); | 865 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); |
872 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); | 866 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); |
873 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); | 867 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); |
874 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); | 868 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); |
875 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits | 869 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits |
876 packed_sps_.AppendBits(8, current_sps_.level_idc); | 870 packed_sps_.AppendBits(8, current_sps_.level_idc); |
877 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); | 871 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); |
878 | 872 |
879 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { | 873 if (current_sps_.profile_idc == H264SPS::kProfileIDCHigh) { |
880 packed_sps_.AppendUE(current_sps_.chroma_format_idc); | 874 packed_sps_.AppendUE(current_sps_.chroma_format_idc); |
881 if (current_sps_.chroma_format_idc == 3) | 875 if (current_sps_.chroma_format_idc == 3) |
882 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); | 876 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); |
883 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); | 877 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); |
884 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); | 878 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); |
885 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); | 879 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); |
886 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); | 880 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); |
887 CHECK(!current_sps_.seq_scaling_matrix_present_flag); | 881 CHECK(!current_sps_.seq_scaling_matrix_present_flag); |
888 } | 882 } |
889 | 883 |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
968 // max_num_ref_frames. | 962 // max_num_ref_frames. |
969 const unsigned int max_dec_frame_buffering = | 963 const unsigned int max_dec_frame_buffering = |
970 current_sps_.max_num_ref_frames; | 964 current_sps_.max_num_ref_frames; |
971 packed_sps_.AppendUE(max_dec_frame_buffering); | 965 packed_sps_.AppendUE(max_dec_frame_buffering); |
972 } | 966 } |
973 | 967 |
974 packed_sps_.FinishNALU(); | 968 packed_sps_.FinishNALU(); |
975 } | 969 } |
976 | 970 |
977 void VaapiVideoEncodeAccelerator::UpdatePPS() { | 971 void VaapiVideoEncodeAccelerator::UpdatePPS() { |
978 memset(¤t_pps_, 0, sizeof(media::H264PPS)); | 972 memset(¤t_pps_, 0, sizeof(H264PPS)); |
979 | 973 |
980 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; | 974 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; |
981 current_pps_.pic_parameter_set_id = 0; | 975 current_pps_.pic_parameter_set_id = 0; |
982 | 976 |
983 current_pps_.entropy_coding_mode_flag = | 977 current_pps_.entropy_coding_mode_flag = |
984 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; | 978 current_sps_.profile_idc >= H264SPS::kProfileIDCMain; |
985 | 979 |
986 CHECK_GT(max_ref_idx_l0_size_, 0u); | 980 CHECK_GT(max_ref_idx_l0_size_, 0u); |
987 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; | 981 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; |
988 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; | 982 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; |
989 DCHECK_LE(qp_, 51u); | 983 DCHECK_LE(qp_, 51u); |
990 current_pps_.pic_init_qp_minus26 = qp_ - 26; | 984 current_pps_.pic_init_qp_minus26 = qp_ - 26; |
991 current_pps_.deblocking_filter_control_present_flag = true; | 985 current_pps_.deblocking_filter_control_present_flag = true; |
992 current_pps_.transform_8x8_mode_flag = | 986 current_pps_.transform_8x8_mode_flag = |
993 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); | 987 (current_sps_.profile_idc == H264SPS::kProfileIDCHigh); |
994 } | 988 } |
995 | 989 |
996 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { | 990 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { |
997 packed_pps_.Reset(); | 991 packed_pps_.Reset(); |
998 | 992 |
999 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); | 993 packed_pps_.BeginNALU(H264NALU::kPPS, 3); |
1000 | 994 |
1001 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); | 995 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); |
1002 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); | 996 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); |
1003 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); | 997 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); |
1004 packed_pps_.AppendBool( | 998 packed_pps_.AppendBool( |
1005 current_pps_.bottom_field_pic_order_in_frame_present_flag); | 999 current_pps_.bottom_field_pic_order_in_frame_present_flag); |
1006 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); | 1000 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); |
1007 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); | 1001 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); |
1008 | 1002 |
1009 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); | 1003 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1055 client_ptr_factory_.reset(); | 1049 client_ptr_factory_.reset(); |
1056 } | 1050 } |
1057 } | 1051 } |
1058 | 1052 |
1059 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() | 1053 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() |
1060 : coded_buffer(VA_INVALID_ID), keyframe(false) {} | 1054 : coded_buffer(VA_INVALID_ID), keyframe(false) {} |
1061 | 1055 |
1062 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() {} | 1056 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() {} |
1063 | 1057 |
1064 } // namespace media | 1058 } // namespace media |
OLD | NEW |