OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/v4l2_video_encode_accelerator.h" | 5 #include "media/gpu/v4l2_video_encode_accelerator.h" |
6 | 6 |
7 #include <fcntl.h> | 7 #include <fcntl.h> |
8 #include <linux/videodev2.h> | 8 #include <linux/videodev2.h> |
9 #include <poll.h> | 9 #include <poll.h> |
10 #include <string.h> | 10 #include <string.h> |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
73 ImageProcessorInputRecord() | 73 ImageProcessorInputRecord() |
74 : force_keyframe(false) {} | 74 : force_keyframe(false) {} |
75 | 75 |
76 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord:: | 76 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord:: |
77 ~ImageProcessorInputRecord() {} | 77 ~ImageProcessorInputRecord() {} |
78 | 78 |
79 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator( | 79 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator( |
80 const scoped_refptr<V4L2Device>& device) | 80 const scoped_refptr<V4L2Device>& device) |
81 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 81 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
82 output_buffer_byte_size_(0), | 82 output_buffer_byte_size_(0), |
83 device_input_format_(media::PIXEL_FORMAT_UNKNOWN), | 83 device_input_format_(PIXEL_FORMAT_UNKNOWN), |
84 input_planes_count_(0), | 84 input_planes_count_(0), |
85 output_format_fourcc_(0), | 85 output_format_fourcc_(0), |
86 encoder_state_(kUninitialized), | 86 encoder_state_(kUninitialized), |
87 stream_header_size_(0), | 87 stream_header_size_(0), |
88 device_(device), | 88 device_(device), |
89 input_streamon_(false), | 89 input_streamon_(false), |
90 input_buffer_queued_count_(0), | 90 input_buffer_queued_count_(0), |
91 input_memory_type_(V4L2_MEMORY_USERPTR), | 91 input_memory_type_(V4L2_MEMORY_USERPTR), |
92 output_streamon_(false), | 92 output_streamon_(false), |
93 output_buffer_queued_count_(0), | 93 output_buffer_queued_count_(0), |
94 encoder_thread_("V4L2EncoderThread"), | 94 encoder_thread_("V4L2EncoderThread"), |
95 device_poll_thread_("V4L2EncoderDevicePollThread"), | 95 device_poll_thread_("V4L2EncoderDevicePollThread"), |
96 weak_this_ptr_factory_(this) { | 96 weak_this_ptr_factory_(this) { |
97 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); | 97 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); |
98 } | 98 } |
99 | 99 |
100 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() { | 100 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() { |
101 DCHECK(!encoder_thread_.IsRunning()); | 101 DCHECK(!encoder_thread_.IsRunning()); |
102 DCHECK(!device_poll_thread_.IsRunning()); | 102 DCHECK(!device_poll_thread_.IsRunning()); |
103 DVLOG(4) << __func__; | 103 DVLOG(4) << __func__; |
104 | 104 |
105 DestroyInputBuffers(); | 105 DestroyInputBuffers(); |
106 DestroyOutputBuffers(); | 106 DestroyOutputBuffers(); |
107 } | 107 } |
108 | 108 |
109 bool V4L2VideoEncodeAccelerator::Initialize( | 109 bool V4L2VideoEncodeAccelerator::Initialize(VideoPixelFormat input_format, |
110 media::VideoPixelFormat input_format, | 110 const gfx::Size& input_visible_size, |
111 const gfx::Size& input_visible_size, | 111 VideoCodecProfile output_profile, |
112 media::VideoCodecProfile output_profile, | 112 uint32_t initial_bitrate, |
113 uint32_t initial_bitrate, | 113 Client* client) { |
114 Client* client) { | |
115 DVLOG(3) << __func__ | 114 DVLOG(3) << __func__ |
116 << ": input_format=" << media::VideoPixelFormatToString(input_format) | 115 << ": input_format=" << VideoPixelFormatToString(input_format) |
117 << ", input_visible_size=" << input_visible_size.ToString() | 116 << ", input_visible_size=" << input_visible_size.ToString() |
118 << ", output_profile=" << output_profile | 117 << ", output_profile=" << output_profile |
119 << ", initial_bitrate=" << initial_bitrate; | 118 << ", initial_bitrate=" << initial_bitrate; |
120 | 119 |
121 visible_size_ = input_visible_size; | 120 visible_size_ = input_visible_size; |
122 | 121 |
123 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | 122 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
124 client_ = client_ptr_factory_->GetWeakPtr(); | 123 client_ = client_ptr_factory_->GetWeakPtr(); |
125 | 124 |
126 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 125 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
127 DCHECK_EQ(encoder_state_, kUninitialized); | 126 DCHECK_EQ(encoder_state_, kUninitialized); |
128 | 127 |
129 struct v4l2_capability caps; | 128 struct v4l2_capability caps; |
130 memset(&caps, 0, sizeof(caps)); | 129 memset(&caps, 0, sizeof(caps)); |
131 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; | 130 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; |
132 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); | 131 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); |
133 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | 132 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
134 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " | 133 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " |
135 << "caps check failed: 0x" << std::hex << caps.capabilities; | 134 << "caps check failed: 0x" << std::hex << caps.capabilities; |
136 return false; | 135 return false; |
137 } | 136 } |
138 | 137 |
139 if (!SetFormats(input_format, output_profile)) { | 138 if (!SetFormats(input_format, output_profile)) { |
140 DLOG(ERROR) << "Failed setting up formats"; | 139 DLOG(ERROR) << "Failed setting up formats"; |
141 return false; | 140 return false; |
142 } | 141 } |
143 | 142 |
144 if (input_format != device_input_format_) { | 143 if (input_format != device_input_format_) { |
145 DVLOG(1) << "Input format not supported by the HW, will convert to " | 144 DVLOG(1) << "Input format not supported by the HW, will convert to " |
146 << media::VideoPixelFormatToString(device_input_format_); | 145 << VideoPixelFormatToString(device_input_format_); |
147 | 146 |
148 scoped_refptr<V4L2Device> device = | 147 scoped_refptr<V4L2Device> device = |
149 V4L2Device::Create(V4L2Device::kImageProcessor); | 148 V4L2Device::Create(V4L2Device::kImageProcessor); |
150 image_processor_.reset(new V4L2ImageProcessor(device)); | 149 image_processor_.reset(new V4L2ImageProcessor(device)); |
151 | 150 |
152 // Convert from input_format to device_input_format_, keeping the size | 151 // Convert from input_format to device_input_format_, keeping the size |
153 // at visible_size_ and requiring the output buffers to be of at least | 152 // at visible_size_ and requiring the output buffers to be of at least |
154 // input_allocated_size_. Unretained is safe because |this| owns image | 153 // input_allocated_size_. Unretained is safe because |this| owns image |
155 // processor and there will be no callbacks after processor destroys. | 154 // processor and there will be no callbacks after processor destroys. |
156 if (!image_processor_->Initialize( | 155 if (!image_processor_->Initialize( |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
213 : input_allocated_size_, | 212 : input_allocated_size_, |
214 output_buffer_byte_size_)); | 213 output_buffer_byte_size_)); |
215 return true; | 214 return true; |
216 } | 215 } |
217 | 216 |
218 void V4L2VideoEncodeAccelerator::ImageProcessorError() { | 217 void V4L2VideoEncodeAccelerator::ImageProcessorError() { |
219 LOG(ERROR) << "Image processor error"; | 218 LOG(ERROR) << "Image processor error"; |
220 NOTIFY_ERROR(kPlatformFailureError); | 219 NOTIFY_ERROR(kPlatformFailureError); |
221 } | 220 } |
222 | 221 |
223 void V4L2VideoEncodeAccelerator::Encode( | 222 void V4L2VideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame, |
224 const scoped_refptr<media::VideoFrame>& frame, | 223 bool force_keyframe) { |
225 bool force_keyframe) { | |
226 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; | 224 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; |
227 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 225 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
228 | 226 |
229 if (image_processor_) { | 227 if (image_processor_) { |
230 if (free_image_processor_output_buffers_.size() > 0) { | 228 if (free_image_processor_output_buffers_.size() > 0) { |
231 int output_buffer_index = free_image_processor_output_buffers_.back(); | 229 int output_buffer_index = free_image_processor_output_buffers_.back(); |
232 free_image_processor_output_buffers_.pop_back(); | 230 free_image_processor_output_buffers_.pop_back(); |
233 // Unretained is safe because |this| owns image processor and there will | 231 // Unretained is safe because |this| owns image processor and there will |
234 // be no callbacks after processor destroys. | 232 // be no callbacks after processor destroys. |
235 image_processor_->Process( | 233 image_processor_->Process( |
236 frame, output_buffer_index, | 234 frame, output_buffer_index, |
237 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed, | 235 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed, |
238 base::Unretained(this), force_keyframe, | 236 base::Unretained(this), force_keyframe, |
239 frame->timestamp())); | 237 frame->timestamp())); |
240 } else { | 238 } else { |
241 ImageProcessorInputRecord record; | 239 ImageProcessorInputRecord record; |
242 record.frame = frame; | 240 record.frame = frame; |
243 record.force_keyframe = force_keyframe; | 241 record.force_keyframe = force_keyframe; |
244 image_processor_input_queue_.push(record); | 242 image_processor_input_queue_.push(record); |
245 } | 243 } |
246 } else { | 244 } else { |
247 encoder_thread_.message_loop()->PostTask( | 245 encoder_thread_.message_loop()->PostTask( |
248 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, | 246 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, |
249 base::Unretained(this), frame, force_keyframe)); | 247 base::Unretained(this), frame, force_keyframe)); |
250 } | 248 } |
251 } | 249 } |
252 | 250 |
253 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer( | 251 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer( |
254 const media::BitstreamBuffer& buffer) { | 252 const BitstreamBuffer& buffer) { |
255 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); | 253 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); |
256 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 254 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
257 | 255 |
258 if (buffer.size() < output_buffer_byte_size_) { | 256 if (buffer.size() < output_buffer_byte_size_) { |
259 NOTIFY_ERROR(kInvalidArgumentError); | 257 NOTIFY_ERROR(kInvalidArgumentError); |
260 return; | 258 return; |
261 } | 259 } |
262 | 260 |
263 std::unique_ptr<SharedMemoryRegion> shm( | 261 std::unique_ptr<SharedMemoryRegion> shm( |
264 new SharedMemoryRegion(buffer, false)); | 262 new SharedMemoryRegion(buffer, false)); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
312 // Otherwise, call the destroy task directly. | 310 // Otherwise, call the destroy task directly. |
313 DestroyTask(); | 311 DestroyTask(); |
314 } | 312 } |
315 | 313 |
316 // Set to kError state just in case. | 314 // Set to kError state just in case. |
317 encoder_state_ = kError; | 315 encoder_state_ = kError; |
318 | 316 |
319 delete this; | 317 delete this; |
320 } | 318 } |
321 | 319 |
322 media::VideoEncodeAccelerator::SupportedProfiles | 320 VideoEncodeAccelerator::SupportedProfiles |
323 V4L2VideoEncodeAccelerator::GetSupportedProfiles() { | 321 V4L2VideoEncodeAccelerator::GetSupportedProfiles() { |
324 SupportedProfiles profiles; | 322 SupportedProfiles profiles; |
325 SupportedProfile profile; | 323 SupportedProfile profile; |
326 profile.max_framerate_numerator = 30; | 324 profile.max_framerate_numerator = 30; |
327 profile.max_framerate_denominator = 1; | 325 profile.max_framerate_denominator = 1; |
328 | 326 |
329 gfx::Size min_resolution; | 327 gfx::Size min_resolution; |
330 v4l2_fmtdesc fmtdesc; | 328 v4l2_fmtdesc fmtdesc; |
331 memset(&fmtdesc, 0, sizeof(fmtdesc)); | 329 memset(&fmtdesc, 0, sizeof(fmtdesc)); |
332 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 330 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
333 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { | 331 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { |
334 device_->GetSupportedResolution(fmtdesc.pixelformat, &min_resolution, | 332 device_->GetSupportedResolution(fmtdesc.pixelformat, &min_resolution, |
335 &profile.max_resolution); | 333 &profile.max_resolution); |
336 switch (fmtdesc.pixelformat) { | 334 switch (fmtdesc.pixelformat) { |
337 case V4L2_PIX_FMT_H264: | 335 case V4L2_PIX_FMT_H264: |
338 profile.profile = media::H264PROFILE_MAIN; | 336 profile.profile = H264PROFILE_MAIN; |
339 profiles.push_back(profile); | 337 profiles.push_back(profile); |
340 break; | 338 break; |
341 case V4L2_PIX_FMT_VP8: | 339 case V4L2_PIX_FMT_VP8: |
342 profile.profile = media::VP8PROFILE_ANY; | 340 profile.profile = VP8PROFILE_ANY; |
343 profiles.push_back(profile); | 341 profiles.push_back(profile); |
344 break; | 342 break; |
345 case V4L2_PIX_FMT_VP9: | 343 case V4L2_PIX_FMT_VP9: |
346 profile.profile = media::VP9PROFILE_PROFILE0; | 344 profile.profile = VP9PROFILE_PROFILE0; |
347 profiles.push_back(profile); | 345 profiles.push_back(profile); |
348 profile.profile = media::VP9PROFILE_PROFILE1; | 346 profile.profile = VP9PROFILE_PROFILE1; |
349 profiles.push_back(profile); | 347 profiles.push_back(profile); |
350 profile.profile = media::VP9PROFILE_PROFILE2; | 348 profile.profile = VP9PROFILE_PROFILE2; |
351 profiles.push_back(profile); | 349 profiles.push_back(profile); |
352 profile.profile = media::VP9PROFILE_PROFILE3; | 350 profile.profile = VP9PROFILE_PROFILE3; |
353 profiles.push_back(profile); | 351 profiles.push_back(profile); |
354 break; | 352 break; |
355 } | 353 } |
356 } | 354 } |
357 | 355 |
358 return profiles; | 356 return profiles; |
359 } | 357 } |
360 | 358 |
361 void V4L2VideoEncodeAccelerator::FrameProcessed(bool force_keyframe, | 359 void V4L2VideoEncodeAccelerator::FrameProcessed(bool force_keyframe, |
362 base::TimeDelta timestamp, | 360 base::TimeDelta timestamp, |
363 int output_buffer_index) { | 361 int output_buffer_index) { |
364 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 362 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
365 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe | 363 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe |
366 << ", output_buffer_index=" << output_buffer_index; | 364 << ", output_buffer_index=" << output_buffer_index; |
367 DCHECK_GE(output_buffer_index, 0); | 365 DCHECK_GE(output_buffer_index, 0); |
368 DCHECK_LT(static_cast<size_t>(output_buffer_index), | 366 DCHECK_LT(static_cast<size_t>(output_buffer_index), |
369 image_processor_output_buffer_map_.size()); | 367 image_processor_output_buffer_map_.size()); |
370 | 368 |
371 std::vector<base::ScopedFD>& scoped_fds = | 369 std::vector<base::ScopedFD>& scoped_fds = |
372 image_processor_output_buffer_map_[output_buffer_index]; | 370 image_processor_output_buffer_map_[output_buffer_index]; |
373 std::vector<int> fds; | 371 std::vector<int> fds; |
374 for (auto& fd : scoped_fds) { | 372 for (auto& fd : scoped_fds) { |
375 fds.push_back(fd.get()); | 373 fds.push_back(fd.get()); |
376 } | 374 } |
377 scoped_refptr<media::VideoFrame> output_frame = | 375 scoped_refptr<VideoFrame> output_frame = VideoFrame::WrapExternalDmabufs( |
378 media::VideoFrame::WrapExternalDmabufs( | 376 device_input_format_, image_processor_->output_allocated_size(), |
379 device_input_format_, image_processor_->output_allocated_size(), | 377 gfx::Rect(visible_size_), visible_size_, fds, timestamp); |
380 gfx::Rect(visible_size_), visible_size_, fds, timestamp); | |
381 if (!output_frame) { | 378 if (!output_frame) { |
382 NOTIFY_ERROR(kPlatformFailureError); | 379 NOTIFY_ERROR(kPlatformFailureError); |
383 return; | 380 return; |
384 } | 381 } |
385 output_frame->AddDestructionObserver(media::BindToCurrentLoop( | 382 output_frame->AddDestructionObserver(BindToCurrentLoop( |
386 base::Bind(&V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer, | 383 base::Bind(&V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer, |
387 weak_this_, output_buffer_index))); | 384 weak_this_, output_buffer_index))); |
388 | 385 |
389 encoder_thread_.message_loop()->PostTask( | 386 encoder_thread_.message_loop()->PostTask( |
390 FROM_HERE, | 387 FROM_HERE, |
391 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, | 388 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, |
392 base::Unretained(this), output_frame, force_keyframe)); | 389 base::Unretained(this), output_frame, force_keyframe)); |
393 } | 390 } |
394 | 391 |
395 void V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer( | 392 void V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer( |
396 int output_buffer_index) { | 393 int output_buffer_index) { |
397 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 394 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
398 DVLOG(3) << __func__ << ": output_buffer_index=" << output_buffer_index; | 395 DVLOG(3) << __func__ << ": output_buffer_index=" << output_buffer_index; |
399 free_image_processor_output_buffers_.push_back(output_buffer_index); | 396 free_image_processor_output_buffers_.push_back(output_buffer_index); |
400 if (!image_processor_input_queue_.empty()) { | 397 if (!image_processor_input_queue_.empty()) { |
401 ImageProcessorInputRecord record = image_processor_input_queue_.front(); | 398 ImageProcessorInputRecord record = image_processor_input_queue_.front(); |
402 image_processor_input_queue_.pop(); | 399 image_processor_input_queue_.pop(); |
403 Encode(record.frame, record.force_keyframe); | 400 Encode(record.frame, record.force_keyframe); |
404 } | 401 } |
405 } | 402 } |
406 | 403 |
407 void V4L2VideoEncodeAccelerator::EncodeTask( | 404 void V4L2VideoEncodeAccelerator::EncodeTask( |
408 const scoped_refptr<media::VideoFrame>& frame, | 405 const scoped_refptr<VideoFrame>& frame, |
409 bool force_keyframe) { | 406 bool force_keyframe) { |
410 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; | 407 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; |
411 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | 408 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
412 DCHECK_NE(encoder_state_, kUninitialized); | 409 DCHECK_NE(encoder_state_, kUninitialized); |
413 | 410 |
414 if (encoder_state_ == kError) { | 411 if (encoder_state_ == kError) { |
415 DVLOG(2) << "EncodeTask(): early out: kError state"; | 412 DVLOG(2) << "EncodeTask(): early out: kError state"; |
416 return; | 413 return; |
417 } | 414 } |
418 | 415 |
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
677 output_buffer_queued_count_--; | 674 output_buffer_queued_count_--; |
678 } | 675 } |
679 } | 676 } |
680 | 677 |
681 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() { | 678 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() { |
682 DVLOG(3) << "EnqueueInputRecord()"; | 679 DVLOG(3) << "EnqueueInputRecord()"; |
683 DCHECK(!free_input_buffers_.empty()); | 680 DCHECK(!free_input_buffers_.empty()); |
684 DCHECK(!encoder_input_queue_.empty()); | 681 DCHECK(!encoder_input_queue_.empty()); |
685 | 682 |
686 // Enqueue an input (VIDEO_OUTPUT) buffer. | 683 // Enqueue an input (VIDEO_OUTPUT) buffer. |
687 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front(); | 684 scoped_refptr<VideoFrame> frame = encoder_input_queue_.front(); |
688 const int index = free_input_buffers_.back(); | 685 const int index = free_input_buffers_.back(); |
689 InputRecord& input_record = input_buffer_map_[index]; | 686 InputRecord& input_record = input_buffer_map_[index]; |
690 DCHECK(!input_record.at_device); | 687 DCHECK(!input_record.at_device); |
691 struct v4l2_buffer qbuf; | 688 struct v4l2_buffer qbuf; |
692 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; | 689 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; |
693 memset(&qbuf, 0, sizeof(qbuf)); | 690 memset(&qbuf, 0, sizeof(qbuf)); |
694 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | 691 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
695 qbuf.index = index; | 692 qbuf.index = index; |
696 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 693 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
697 qbuf.m.planes = qbuf_planes; | 694 qbuf.m.planes = qbuf_planes; |
698 qbuf.timestamp.tv_sec = static_cast<time_t>(frame->timestamp().InSeconds()); | 695 qbuf.timestamp.tv_sec = static_cast<time_t>(frame->timestamp().InSeconds()); |
699 qbuf.timestamp.tv_usec = | 696 qbuf.timestamp.tv_usec = |
700 frame->timestamp().InMicroseconds() - | 697 frame->timestamp().InMicroseconds() - |
701 frame->timestamp().InSeconds() * base::Time::kMicrosecondsPerSecond; | 698 frame->timestamp().InSeconds() * base::Time::kMicrosecondsPerSecond; |
702 | 699 |
703 DCHECK_EQ(device_input_format_, frame->format()); | 700 DCHECK_EQ(device_input_format_, frame->format()); |
704 for (size_t i = 0; i < input_planes_count_; ++i) { | 701 for (size_t i = 0; i < input_planes_count_; ++i) { |
705 qbuf.m.planes[i].bytesused = base::checked_cast<__u32>( | 702 qbuf.m.planes[i].bytesused = base::checked_cast<__u32>( |
706 media::VideoFrame::PlaneSize(frame->format(), i, input_allocated_size_) | 703 VideoFrame::PlaneSize(frame->format(), i, input_allocated_size_) |
707 .GetArea()); | 704 .GetArea()); |
708 | 705 |
709 switch (input_memory_type_) { | 706 switch (input_memory_type_) { |
710 case V4L2_MEMORY_USERPTR: | 707 case V4L2_MEMORY_USERPTR: |
711 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused; | 708 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused; |
712 qbuf.m.planes[i].m.userptr = | 709 qbuf.m.planes[i].m.userptr = |
713 reinterpret_cast<unsigned long>(frame->data(i)); | 710 reinterpret_cast<unsigned long>(frame->data(i)); |
714 DCHECK(qbuf.m.planes[i].m.userptr); | 711 DCHECK(qbuf.m.planes[i].m.userptr); |
715 break; | 712 break; |
716 | 713 |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
916 memset(&parms, 0, sizeof(parms)); | 913 memset(&parms, 0, sizeof(parms)); |
917 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 914 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
918 // Note that we are provided "frames per second" but V4L2 expects "time per | 915 // Note that we are provided "frames per second" but V4L2 expects "time per |
919 // frame"; hence we provide the reciprocal of the framerate here. | 916 // frame"; hence we provide the reciprocal of the framerate here. |
920 parms.parm.output.timeperframe.numerator = 1; | 917 parms.parm.output.timeperframe.numerator = 1; |
921 parms.parm.output.timeperframe.denominator = framerate; | 918 parms.parm.output.timeperframe.denominator = framerate; |
922 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms); | 919 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms); |
923 } | 920 } |
924 | 921 |
925 bool V4L2VideoEncodeAccelerator::SetOutputFormat( | 922 bool V4L2VideoEncodeAccelerator::SetOutputFormat( |
926 media::VideoCodecProfile output_profile) { | 923 VideoCodecProfile output_profile) { |
927 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 924 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
928 DCHECK(!input_streamon_); | 925 DCHECK(!input_streamon_); |
929 DCHECK(!output_streamon_); | 926 DCHECK(!output_streamon_); |
930 | 927 |
931 output_format_fourcc_ = | 928 output_format_fourcc_ = |
932 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile, false); | 929 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile, false); |
933 if (!output_format_fourcc_) { | 930 if (!output_format_fourcc_) { |
934 LOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile; | 931 LOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile; |
935 return false; | 932 return false; |
936 } | 933 } |
(...skipping 13 matching lines...) Expand all Loading... |
950 | 947 |
951 // Device might have adjusted the required output size. | 948 // Device might have adjusted the required output size. |
952 size_t adjusted_output_buffer_size = | 949 size_t adjusted_output_buffer_size = |
953 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage); | 950 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage); |
954 output_buffer_byte_size_ = adjusted_output_buffer_size; | 951 output_buffer_byte_size_ = adjusted_output_buffer_size; |
955 | 952 |
956 return true; | 953 return true; |
957 } | 954 } |
958 | 955 |
959 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat( | 956 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat( |
960 media::VideoPixelFormat input_format) { | 957 VideoPixelFormat input_format) { |
961 DVLOG(3) << "NegotiateInputFormat()"; | 958 DVLOG(3) << "NegotiateInputFormat()"; |
962 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 959 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
963 DCHECK(!input_streamon_); | 960 DCHECK(!input_streamon_); |
964 DCHECK(!output_streamon_); | 961 DCHECK(!output_streamon_); |
965 | 962 |
966 device_input_format_ = media::PIXEL_FORMAT_UNKNOWN; | 963 device_input_format_ = PIXEL_FORMAT_UNKNOWN; |
967 input_planes_count_ = 0; | 964 input_planes_count_ = 0; |
968 | 965 |
969 uint32_t input_format_fourcc = | 966 uint32_t input_format_fourcc = |
970 V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format); | 967 V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format); |
971 if (!input_format_fourcc) { | 968 if (!input_format_fourcc) { |
972 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; | 969 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; |
973 return false; | 970 return false; |
974 } | 971 } |
975 | 972 |
976 size_t input_planes_count = media::VideoFrame::NumPlanes(input_format); | 973 size_t input_planes_count = VideoFrame::NumPlanes(input_format); |
977 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); | 974 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); |
978 | 975 |
979 // First see if we the device can use the provided input_format directly. | 976 // First see if we the device can use the provided input_format directly. |
980 struct v4l2_format format; | 977 struct v4l2_format format; |
981 memset(&format, 0, sizeof(format)); | 978 memset(&format, 0, sizeof(format)); |
982 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 979 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
983 format.fmt.pix_mp.width = visible_size_.width(); | 980 format.fmt.pix_mp.width = visible_size_.width(); |
984 format.fmt.pix_mp.height = visible_size_.height(); | 981 format.fmt.pix_mp.height = visible_size_.height(); |
985 format.fmt.pix_mp.pixelformat = input_format_fourcc; | 982 format.fmt.pix_mp.pixelformat = input_format_fourcc; |
986 format.fmt.pix_mp.num_planes = input_planes_count; | 983 format.fmt.pix_mp.num_planes = input_planes_count; |
987 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) { | 984 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) { |
988 // Error or format unsupported by device, try to negotiate a fallback. | 985 // Error or format unsupported by device, try to negotiate a fallback. |
989 input_format_fourcc = device_->PreferredInputFormat(); | 986 input_format_fourcc = device_->PreferredInputFormat(); |
990 input_format = | 987 input_format = |
991 V4L2Device::V4L2PixFmtToVideoPixelFormat(input_format_fourcc); | 988 V4L2Device::V4L2PixFmtToVideoPixelFormat(input_format_fourcc); |
992 if (input_format == media::PIXEL_FORMAT_UNKNOWN) { | 989 if (input_format == PIXEL_FORMAT_UNKNOWN) { |
993 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; | 990 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; |
994 return false; | 991 return false; |
995 } | 992 } |
996 | 993 |
997 input_planes_count = media::VideoFrame::NumPlanes(input_format); | 994 input_planes_count = VideoFrame::NumPlanes(input_format); |
998 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); | 995 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); |
999 | 996 |
1000 // Device might have adjusted parameters, reset them along with the format. | 997 // Device might have adjusted parameters, reset them along with the format. |
1001 memset(&format, 0, sizeof(format)); | 998 memset(&format, 0, sizeof(format)); |
1002 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | 999 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
1003 format.fmt.pix_mp.width = visible_size_.width(); | 1000 format.fmt.pix_mp.width = visible_size_.width(); |
1004 format.fmt.pix_mp.height = visible_size_.height(); | 1001 format.fmt.pix_mp.height = visible_size_.height(); |
1005 format.fmt.pix_mp.pixelformat = input_format_fourcc; | 1002 format.fmt.pix_mp.pixelformat = input_format_fourcc; |
1006 format.fmt.pix_mp.num_planes = input_planes_count; | 1003 format.fmt.pix_mp.num_planes = input_planes_count; |
1007 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | 1004 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
1008 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count); | 1005 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count); |
1009 } | 1006 } |
1010 | 1007 |
1011 // Take device-adjusted sizes for allocated size. If the size is adjusted | 1008 // Take device-adjusted sizes for allocated size. If the size is adjusted |
1012 // down, it means the input is too big and the hardware does not support it. | 1009 // down, it means the input is too big and the hardware does not support it. |
1013 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format); | 1010 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format); |
1014 if (!gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_))) { | 1011 if (!gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_))) { |
1015 DVLOG(1) << "Input size too big " << visible_size_.ToString() | 1012 DVLOG(1) << "Input size too big " << visible_size_.ToString() |
1016 << ", adjusted to " << input_allocated_size_.ToString(); | 1013 << ", adjusted to " << input_allocated_size_.ToString(); |
1017 return false; | 1014 return false; |
1018 } | 1015 } |
1019 | 1016 |
1020 device_input_format_ = input_format; | 1017 device_input_format_ = input_format; |
1021 input_planes_count_ = input_planes_count; | 1018 input_planes_count_ = input_planes_count; |
1022 return true; | 1019 return true; |
1023 } | 1020 } |
1024 | 1021 |
1025 bool V4L2VideoEncodeAccelerator::SetFormats( | 1022 bool V4L2VideoEncodeAccelerator::SetFormats(VideoPixelFormat input_format, |
1026 media::VideoPixelFormat input_format, | 1023 VideoCodecProfile output_profile) { |
1027 media::VideoCodecProfile output_profile) { | |
1028 DVLOG(3) << "SetFormats()"; | 1024 DVLOG(3) << "SetFormats()"; |
1029 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 1025 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
1030 DCHECK(!input_streamon_); | 1026 DCHECK(!input_streamon_); |
1031 DCHECK(!output_streamon_); | 1027 DCHECK(!output_streamon_); |
1032 | 1028 |
1033 if (!SetOutputFormat(output_profile)) | 1029 if (!SetOutputFormat(output_profile)) |
1034 return false; | 1030 return false; |
1035 | 1031 |
1036 if (!NegotiateInputFormat(input_format)) | 1032 if (!NegotiateInputFormat(input_format)) |
1037 return false; | 1033 return false; |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1256 reqbufs.count = 0; | 1252 reqbufs.count = 0; |
1257 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | 1253 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1258 reqbufs.memory = V4L2_MEMORY_MMAP; | 1254 reqbufs.memory = V4L2_MEMORY_MMAP; |
1259 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | 1255 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
1260 | 1256 |
1261 output_buffer_map_.clear(); | 1257 output_buffer_map_.clear(); |
1262 free_output_buffers_.clear(); | 1258 free_output_buffers_.clear(); |
1263 } | 1259 } |
1264 | 1260 |
1265 } // namespace media | 1261 } // namespace media |
OLD | NEW |