OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/exynos_video_encode_accelerator.h" | |
6 | |
7 #include <fcntl.h> | |
8 #include <linux/videodev2.h> | |
9 #include <poll.h> | |
10 #include <sys/eventfd.h> | |
11 #include <sys/ioctl.h> | |
12 | |
13 #include "base/callback.h" | |
14 #include "base/debug/trace_event.h" | |
15 #include "base/message_loop/message_loop_proxy.h" | |
16 #include "base/posix/eintr_wrapper.h" | |
17 #include "media/base/bitstream_buffer.h" | |
18 | |
19 #define NOTIFY_ERROR(x) \ | |
20 do { \ | |
21 SetEncoderState(kError); \ | |
22 DLOG(ERROR) << "calling NotifyError(): " << x; \ | |
23 NotifyError(x); \ | |
24 } while (0) | |
25 | |
26 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \ | |
27 do { \ | |
28 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ | |
29 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | |
30 NOTIFY_ERROR(kPlatformFailureError); \ | |
31 return; \ | |
32 } \ | |
33 } while (0) | |
34 | |
35 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \ | |
36 do { \ | |
37 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ | |
38 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | |
39 NOTIFY_ERROR(kPlatformFailureError); \ | |
40 return false; \ | |
41 } \ | |
42 } while (0) | |
43 | |
44 namespace content { | |
45 | |
46 namespace { | |
47 | |
48 const char kExynosGscDevice[] = "/dev/gsc1"; | |
49 const char kExynosMfcDevice[] = "/dev/mfc-enc"; | |
50 | |
51 } // anonymous namespace | |
52 | |
53 struct ExynosVideoEncodeAccelerator::BitstreamBufferRef { | |
54 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) | |
55 : id(id), shm(shm.Pass()), size(size) {} | |
56 const int32 id; | |
57 const scoped_ptr<base::SharedMemory> shm; | |
58 const size_t size; | |
59 }; | |
60 | |
61 | |
62 ExynosVideoEncodeAccelerator::GscInputRecord::GscInputRecord() | |
63 : at_device(false) {} | |
64 | |
65 ExynosVideoEncodeAccelerator::GscOutputRecord::GscOutputRecord() | |
66 : at_device(false), mfc_input(-1) {} | |
67 | |
68 ExynosVideoEncodeAccelerator::MfcInputRecord::MfcInputRecord() | |
69 : at_device(false) { | |
70 fd[0] = fd[1] = -1; | |
71 } | |
72 | |
73 ExynosVideoEncodeAccelerator::MfcOutputRecord::MfcOutputRecord() | |
74 : at_device(false) {} | |
75 | |
76 ExynosVideoEncodeAccelerator::ExynosVideoEncodeAccelerator( | |
77 media::VideoEncodeAccelerator::Client* client) | |
78 : child_message_loop_proxy_(base::MessageLoopProxy::current()), | |
79 weak_this_ptr_factory_(this), | |
80 weak_this_(weak_this_ptr_factory_.GetWeakPtr()), | |
81 client_ptr_factory_(client), | |
82 client_(client_ptr_factory_.GetWeakPtr()), | |
83 encoder_thread_("ExynosEncoderThread"), | |
84 encoder_state_(kUninitialized), | |
85 output_buffer_byte_size_(0), | |
86 stream_header_size_(0), | |
87 input_format_fourcc_(0), | |
88 output_format_fourcc_(0), | |
89 gsc_fd_(-1), | |
90 gsc_input_streamon_(false), | |
91 gsc_input_buffer_queued_count_(0), | |
92 gsc_output_streamon_(false), | |
93 gsc_output_buffer_queued_count_(0), | |
94 mfc_fd_(-1), | |
95 mfc_input_streamon_(false), | |
96 mfc_input_buffer_queued_count_(0), | |
97 mfc_output_streamon_(false), | |
98 mfc_output_buffer_queued_count_(0), | |
99 device_poll_thread_("ExynosDevicePollThread"), | |
100 device_poll_interrupt_fd_(-1) { | |
101 DCHECK(client_); | |
102 } | |
103 | |
104 ExynosVideoEncodeAccelerator::~ExynosVideoEncodeAccelerator() { | |
105 DCHECK(!encoder_thread_.IsRunning()); | |
106 DCHECK(!device_poll_thread_.IsRunning()); | |
107 | |
108 if (device_poll_interrupt_fd_ != -1) { | |
109 HANDLE_EINTR(close(device_poll_interrupt_fd_)); | |
110 device_poll_interrupt_fd_ = -1; | |
111 } | |
112 if (mfc_fd_ != -1) { | |
113 DestroyMfcInputBuffers(); | |
114 DestroyMfcOutputBuffers(); | |
115 HANDLE_EINTR(close(mfc_fd_)); | |
116 mfc_fd_ = -1; | |
117 } | |
118 if (gsc_fd_ != -1) { | |
119 DestroyGscInputBuffers(); | |
120 DestroyGscOutputBuffers(); | |
121 HANDLE_EINTR(close(gsc_fd_)); | |
122 gsc_fd_ = -1; | |
123 } | |
124 } | |
125 | |
126 void ExynosVideoEncodeAccelerator::Initialize( | |
127 media::VideoFrame::Format input_format, | |
128 const gfx::Size& input_visible_size, | |
129 media::VideoCodecProfile output_profile, | |
130 int32 initial_bitrate) { | |
131 DVLOG(3) << "Initialize(): input_format=" << input_format | |
132 << ", input_visible_size=" << input_visible_size.ToString() | |
133 << ", output_profile=" << output_profile | |
134 << ", initial_bitrate=" << initial_bitrate; | |
135 | |
136 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
137 DCHECK_EQ(encoder_state_, kUninitialized); | |
138 | |
139 input_visible_size_ = input_visible_size; | |
140 input_allocated_size_.SetSize((input_visible_size_.width() + 0xF) & ~0xF, | |
141 (input_visible_size_.height() + 0xF) & ~0xF); | |
142 converted_visible_size_.SetSize((input_visible_size_.width() + 0x1) & ~0x1, | |
143 (input_visible_size_.height() + 0x1) & ~0x1); | |
144 converted_allocated_size_.SetSize( | |
145 (converted_visible_size_.width() + 0xF) & ~0xF, | |
146 (converted_visible_size_.height() + 0xF) & ~0xF); | |
147 output_visible_size_ = converted_visible_size_; | |
148 | |
149 switch (input_format) { | |
150 case media::VideoFrame::RGB32: | |
151 input_format_fourcc_ = V4L2_PIX_FMT_RGB32; | |
152 break; | |
153 case media::VideoFrame::I420: | |
154 input_format_fourcc_ = V4L2_PIX_FMT_YUV420M; | |
155 break; | |
156 default: | |
157 NOTIFY_ERROR(kInvalidArgumentError); | |
158 return; | |
159 } | |
160 | |
161 if (output_profile >= media::H264PROFILE_MIN && | |
162 output_profile <= media::H264PROFILE_MAX) { | |
163 output_format_fourcc_ = V4L2_PIX_FMT_H264; | |
164 } else { | |
165 NOTIFY_ERROR(kInvalidArgumentError); | |
166 return; | |
167 } | |
168 | |
169 // Open the video converter device. | |
170 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice; | |
171 gsc_fd_ = | |
172 HANDLE_EINTR(open(kExynosGscDevice, O_RDWR | O_NONBLOCK | O_CLOEXEC)); | |
173 if (gsc_fd_ == -1) { | |
174 DPLOG(ERROR) << "Initialize(): could not open GSC device: " | |
175 << kExynosGscDevice; | |
176 NOTIFY_ERROR(kPlatformFailureError); | |
177 return; | |
178 } | |
179 | |
180 // Capabilities check. | |
181 struct v4l2_capability caps; | |
182 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE | | |
183 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; | |
184 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_QUERYCAP, &caps); | |
185 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | |
186 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " | |
187 "caps check failed: 0x" << std::hex << caps.capabilities; | |
188 NOTIFY_ERROR(kPlatformFailureError); | |
189 return; | |
190 } | |
191 | |
192 // Open the video encoder device. | |
193 mfc_fd_ = | |
194 HANDLE_EINTR(open(kExynosMfcDevice, O_RDWR | O_NONBLOCK | O_CLOEXEC)); | |
195 if (mfc_fd_ == -1) { | |
196 DPLOG(ERROR) << "Initialize(): could not open MFC device: " | |
197 << kExynosMfcDevice; | |
198 NOTIFY_ERROR(kPlatformFailureError); | |
199 return; | |
200 } | |
201 | |
202 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_QUERYCAP, &caps); | |
203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | |
204 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " | |
205 "caps check failed: 0x" << std::hex << caps.capabilities; | |
206 NOTIFY_ERROR(kPlatformFailureError); | |
207 return; | |
208 } | |
209 | |
210 // Create the interrupt fd. | |
211 DCHECK_EQ(device_poll_interrupt_fd_, -1); | |
212 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); | |
213 if (device_poll_interrupt_fd_ == -1) { | |
214 DPLOG(ERROR) << "Initialize(): eventfd() failed"; | |
215 NOTIFY_ERROR(kPlatformFailureError); | |
216 return; | |
217 } | |
218 | |
219 DVLOG(3) | |
220 << "Initialize(): input_visible_size_=" << input_visible_size_.ToString() | |
221 << ", input_allocated_size_=" << input_allocated_size_.ToString() | |
222 << ", converted_visible_size_=" << converted_visible_size_.ToString() | |
223 << ", converted_allocated_size_=" << converted_allocated_size_.ToString() | |
224 << ", output_visible_size_=" << output_visible_size_.ToString(); | |
225 | |
226 if (!CreateGscInputBuffers() || !CreateGscOutputBuffers()) | |
227 return; | |
228 | |
229 // MFC setup for encoding is rather particular in ordering: | |
230 // | |
231 // 1. Format (VIDIOC_S_FMT) set first on OUTPUT and CPATURE queues. | |
232 // 2. VIDIOC_REQBUFS, VIDIOC_QBUF, and VIDIOC_STREAMON on CAPTURE queue. | |
233 // 3. VIDIOC_REQBUFS (and later VIDIOC_QBUF and VIDIOC_STREAMON) on OUTPUT | |
234 // queue. | |
235 // | |
236 // Unfortunately, we cannot do (3) in Initialize() here since we have no | |
237 // buffers to QBUF in step (2) until the client has provided output buffers | |
238 // through UseOutputBitstreamBuffer(). So, we just do (1), and the | |
239 // VIDIOC_REQBUFS part of (2) here. The rest is done the first time we get | |
240 // a UseOutputBitstreamBuffer() callback. | |
241 | |
242 if (!SetMfcFormats()) | |
243 return; | |
244 | |
245 RequestEncodingParametersChangeTask(initial_bitrate, 30, 1); | |
246 | |
247 // VIDIOC_REQBUFS on CAPTURE queue. | |
248 if (!CreateMfcOutputBuffers()) | |
Pawel Osciak
2013/08/09 13:40:02
Do we need to do this here? I'd do it together wit
sheu
2013/08/12 20:23:59
I'm trying to frontload as much init as I can in I
| |
249 return; | |
250 | |
251 | |
252 if (!encoder_thread_.Start()) { | |
253 DLOG(ERROR) << "Initialize(): encoder thread failed to start"; | |
254 NOTIFY_ERROR(kPlatformFailureError); | |
255 return; | |
256 } | |
257 | |
258 SetEncoderState(kInitialized); | |
259 | |
260 child_message_loop_proxy_->PostTask( | |
261 FROM_HERE, base::Bind(&Client::NotifyInitializeDone, client_)); | |
262 | |
263 child_message_loop_proxy_->PostTask( | |
264 FROM_HERE, | |
265 base::Bind(&Client::RequireBitstreamBuffers, | |
266 client_, | |
267 gsc_input_buffer_map_.size(), | |
268 input_allocated_size_, | |
269 output_buffer_byte_size_)); | |
270 } | |
271 | |
272 void ExynosVideoEncodeAccelerator::Encode( | |
273 const scoped_refptr<media::VideoFrame>& frame, | |
274 bool force_keyframe) { | |
275 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; | |
276 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
277 | |
278 encoder_thread_.message_loop()->PostTask( | |
279 FROM_HERE, | |
280 base::Bind(&ExynosVideoEncodeAccelerator::EncodeTask, | |
281 base::Unretained(this), | |
282 frame, | |
283 force_keyframe)); | |
284 } | |
285 | |
286 void ExynosVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
287 const media::BitstreamBuffer& buffer) { | |
288 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); | |
289 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
290 | |
291 if (buffer.size() < output_buffer_byte_size_) { | |
292 NOTIFY_ERROR(kInvalidArgumentError); | |
293 return; | |
294 } | |
295 | |
296 scoped_ptr<base::SharedMemory> shm( | |
297 new base::SharedMemory(buffer.handle(), false)); | |
298 if (!shm->Map(buffer.size())) { | |
299 NOTIFY_ERROR(kPlatformFailureError); | |
300 return; | |
301 } | |
302 | |
303 scoped_ptr<BitstreamBufferRef> buffer_ref( | |
304 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); | |
305 encoder_thread_.message_loop()->PostTask( | |
306 FROM_HERE, | |
307 base::Bind(&ExynosVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
308 base::Unretained(this), | |
309 base::Passed(&buffer_ref))); | |
310 } | |
311 | |
312 void ExynosVideoEncodeAccelerator::RequestEncodingParametersChange( | |
313 int32 bitrate, uint32 framerate_num, uint32 framerate_denom) { | |
Pawel Osciak
2013/08/09 13:40:02
DCHECK(child_message_loop_proxy_->BelongsToCurrent
sheu
2013/08/12 20:23:59
Done.
| |
314 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate | |
315 << ", framerate=" << framerate_num << "/" << framerate_denom; | |
316 encoder_thread_.message_loop()->PostTask( | |
317 FROM_HERE, | |
318 base::Bind( | |
319 &ExynosVideoEncodeAccelerator::RequestEncodingParametersChangeTask, | |
320 base::Unretained(this), | |
321 bitrate, | |
322 framerate_num, | |
323 framerate_denom)); | |
324 } | |
325 | |
326 void ExynosVideoEncodeAccelerator::Destroy() { | |
327 DVLOG(3) << "Destroy()"; | |
328 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
329 | |
330 // We're destroying; cancel all callbacks. | |
331 client_ptr_factory_.InvalidateWeakPtrs(); | |
332 | |
333 // If the encoder thread is running, destroy using posted task. | |
334 if (encoder_thread_.IsRunning()) { | |
335 encoder_thread_.message_loop()->PostTask( | |
336 FROM_HERE, | |
337 base::Bind(&ExynosVideoEncodeAccelerator::DestroyTask, | |
338 base::Unretained(this))); | |
339 // DestroyTask() will cause the encoder_thread_ to flush all tasks. | |
340 encoder_thread_.Stop(); | |
341 } else { | |
342 // Otherwise, call the destroy task directly. | |
343 DestroyTask(); | |
344 } | |
345 | |
346 // Set to kError state just in case. | |
347 SetEncoderState(kError); | |
348 | |
349 delete this; | |
350 } | |
351 | |
352 // static | |
353 std::vector<media::VideoEncodeAccelerator::SupportedProfile> | |
354 ExynosVideoEncodeAccelerator::GetSupportedProfiles() { | |
355 std::vector<SupportedProfile> profiles(1); | |
356 SupportedProfile& profile = profiles[0]; | |
357 profile.profile = media::H264PROFILE_MAIN; | |
358 profile.max_resolution.SetSize(1920, 1088); | |
359 profile.max_framerate.numerator = 30; | |
360 profile.max_framerate.denominator = 1; | |
361 return profiles; | |
362 } | |
363 | |
364 void ExynosVideoEncodeAccelerator::EncodeTask( | |
365 const scoped_refptr<media::VideoFrame>& frame, bool force_keyframe) { | |
366 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; | |
367 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
368 DCHECK_NE(encoder_state_, kUninitialized); | |
369 | |
370 if (encoder_state_ == kError) { | |
371 DVLOG(2) << "EncodeTask(): early out: kError state"; | |
372 return; | |
373 } | |
374 | |
375 encoder_input_queue_.push_back(frame); | |
376 EnqueueGsc(); | |
377 | |
378 if (force_keyframe) { | |
Pawel Osciak
2013/08/09 13:40:02
Perhaps comment that we can't really control the t
sheu
2013/08/12 20:23:59
We kind of can now, since I put in my dynamic fram
| |
379 struct v4l2_ext_control ctrls[1]; | |
380 struct v4l2_ext_controls control; | |
381 memset(&ctrls, 0, sizeof(ctrls)); | |
382 memset(&control, 0, sizeof(control)); | |
383 ctrls[0].id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE; | |
384 ctrls[0].value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME; | |
385 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; | |
386 control.count = 1; | |
387 control.controls = ctrls; | |
388 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); | |
389 } | |
390 } | |
391 | |
392 void ExynosVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
393 scoped_ptr<BitstreamBufferRef> buffer_ref) { | |
394 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id; | |
395 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
396 | |
397 encoder_output_queue_.push_back( | |
398 linked_ptr<BitstreamBufferRef>(buffer_ref.release())); | |
399 EnqueueMfc(); | |
400 | |
401 if (encoder_state_ == kInitialized) { | |
402 // Finish setting up our MFC OUTPUT queue. See: Initialize(). | |
403 // VIDIOC_REQBUFS on OUTPUT queue. | |
404 if (!CreateMfcInputBuffers()) | |
405 return; | |
406 if (!StartDevicePoll()) | |
407 return; | |
408 encoder_state_ = kEncoding; | |
409 } | |
410 } | |
411 | |
412 void ExynosVideoEncodeAccelerator::DestroyTask() { | |
413 DVLOG(3) << "DestroyTask()"; | |
414 | |
415 // DestroyTask() should run regardless of encoder_state_. | |
416 | |
417 // Stop streaming and the device_poll_thread_. | |
418 StopDevicePoll(); | |
419 | |
420 // Set our state to kError. Just in case. | |
421 encoder_state_ = kError; | |
422 } | |
423 | |
424 void ExynosVideoEncodeAccelerator::ServiceDeviceTask() { | |
425 DVLOG(3) << "ServiceDeviceTask()"; | |
426 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
427 DCHECK_NE(encoder_state_, kUninitialized); | |
428 DCHECK_NE(encoder_state_, kInitialized); | |
429 | |
430 if (encoder_state_ == kError) { | |
431 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; | |
432 return; | |
433 } | |
434 | |
435 DequeueGsc(); | |
436 DequeueMfc(); | |
437 EnqueueGsc(); | |
438 EnqueueMfc(); | |
439 | |
440 // Clear the interrupt fd. | |
441 if (!ClearDevicePollInterrupt()) | |
442 return; | |
443 | |
444 unsigned int poll_fds = 0; | |
445 // Add GSC fd, if we should poll on it. | |
446 // GSC has to wait until both input and output buffers are queued. | |
447 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0) | |
448 poll_fds |= kPollGsc; | |
449 // Add MFC fd, if we should poll on it. | |
450 // MFC can be polled as soon as either input or output buffers are queued. | |
451 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0) | |
452 poll_fds |= kPollMfc; | |
453 | |
454 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), | |
455 // so either: | |
456 // * device_poll_thread_ is running normally | |
457 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask() | |
458 // shut it down, in which case we're either in kError state, and we should | |
459 // have early-outed already. | |
460 DCHECK(device_poll_thread_.message_loop()); | |
461 // Queue the DevicePollTask() now. | |
462 device_poll_thread_.message_loop()->PostTask( | |
463 FROM_HERE, | |
464 base::Bind(&ExynosVideoEncodeAccelerator::DevicePollTask, | |
465 base::Unretained(this), | |
466 poll_fds)); | |
467 | |
468 DVLOG(2) << "ServiceDeviceTask(): buffer counts: ENC[" | |
469 << encoder_input_queue_.size() << "] => GSC[" | |
470 << gsc_free_input_buffers_.size() << "+" | |
471 << gsc_input_buffer_queued_count_ << "/" | |
472 << gsc_input_buffer_map_.size() << "->" | |
473 << gsc_free_output_buffers_.size() << "+" | |
474 << gsc_output_buffer_queued_count_ << "/" | |
475 << gsc_output_buffer_map_.size() << "] => " | |
476 << mfc_ready_input_buffers_.size() << " => MFC[" | |
477 << mfc_free_input_buffers_.size() << "+" | |
478 << mfc_input_buffer_queued_count_ << "/" | |
479 << mfc_input_buffer_map_.size() << "->" | |
480 << mfc_free_output_buffers_.size() << "+" | |
481 << mfc_output_buffer_queued_count_ << "/" | |
482 << mfc_output_buffer_map_.size() << "] => OUT[" | |
483 << encoder_output_queue_.size() << "]"; | |
484 } | |
485 | |
486 void ExynosVideoEncodeAccelerator::EnqueueGsc() { | |
487 DVLOG(3) << "EnqueueGsc()"; | |
488 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
489 | |
490 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_; | |
491 while (!encoder_input_queue_.empty() && !gsc_free_input_buffers_.empty()) { | |
492 if (!EnqueueGscInputRecord()) | |
493 return; | |
494 } | |
495 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) { | |
496 // We started up a previously empty queue. | |
497 // Queue state changed; signal interrupt. | |
498 if (!SetDevicePollInterrupt()) | |
499 return; | |
500 // Start VIDIOC_STREAMON if we haven't yet. | |
501 if (!gsc_input_streamon_) { | |
502 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
503 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); | |
504 gsc_input_streamon_ = true; | |
505 } | |
506 } | |
507 | |
508 // Enqueue a GSC output, only if we need one. GSC output buffers write | |
509 // directly to MFC input buffers, so we'll have to check for free MFC input | |
510 // buffers as well. | |
511 if (gsc_input_buffer_queued_count_ != 0 && | |
512 gsc_output_buffer_queued_count_ == 0 && | |
513 !gsc_free_output_buffers_.empty() && !mfc_free_input_buffers_.empty()) { | |
514 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_; | |
515 if (!EnqueueGscOutputRecord()) | |
516 return; | |
517 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) { | |
518 // We just started up a previously empty queue. | |
519 // Queue state changed; signal interrupt. | |
520 if (!SetDevicePollInterrupt()) | |
521 return; | |
522 // Start VIDIOC_STREAMON if we haven't yet. | |
523 if (!gsc_output_streamon_) { | |
524 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
525 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); | |
526 gsc_output_streamon_ = true; | |
527 } | |
528 } | |
529 } | |
530 // Bug check: GSC is liable to race conditions if more than one buffer is | |
531 // simultaneously queued. | |
532 DCHECK_GE(1, gsc_output_buffer_queued_count_); | |
533 } | |
534 | |
535 void ExynosVideoEncodeAccelerator::DequeueGsc() { | |
536 DVLOG(3) << "DequeueGsc()"; | |
537 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
538 | |
539 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free | |
540 // list. | |
541 struct v4l2_buffer dqbuf; | |
542 struct v4l2_plane planes[3]; | |
543 while (gsc_input_buffer_queued_count_ > 0) { | |
544 DCHECK(gsc_input_streamon_); | |
545 memset(&dqbuf, 0, sizeof(dqbuf)); | |
546 memset(&planes, 0, sizeof(planes)); | |
547 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
548 dqbuf.memory = V4L2_MEMORY_USERPTR; | |
549 dqbuf.m.planes = planes; | |
550 dqbuf.length = arraysize(planes); | |
551 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { | |
552 if (errno == EAGAIN) { | |
553 // EAGAIN if we're just out of buffers to dequeue. | |
554 break; | |
555 } | |
556 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; | |
557 NOTIFY_ERROR(kPlatformFailureError); | |
558 return; | |
559 } | |
560 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; | |
561 DCHECK(input_record.at_device); | |
562 DCHECK(input_record.frame.get()); | |
563 input_record.at_device = false; | |
564 input_record.frame = NULL; | |
565 gsc_free_input_buffers_.push_back(dqbuf.index); | |
566 gsc_input_buffer_queued_count_--; | |
567 } | |
568 | |
569 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and recycle to the | |
570 // free list. Queue the corresponding MFC buffer to the GSC->MFC holding | |
571 // queue. | |
572 while (gsc_output_buffer_queued_count_ > 0) { | |
573 DCHECK(gsc_output_streamon_); | |
574 memset(&dqbuf, 0, sizeof(dqbuf)); | |
575 memset(&planes, 0, sizeof(planes)); | |
576 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
577 dqbuf.memory = V4L2_MEMORY_DMABUF; | |
578 dqbuf.m.planes = planes; | |
579 dqbuf.length = 2; | |
580 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { | |
581 if (errno == EAGAIN) { | |
582 // EAGAIN if we're just out of buffers to dequeue. | |
583 break; | |
584 } | |
585 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; | |
586 NOTIFY_ERROR(kPlatformFailureError); | |
587 return; | |
588 } | |
589 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; | |
590 DCHECK(output_record.at_device); | |
591 DCHECK(output_record.mfc_input != -1); | |
592 mfc_ready_input_buffers_.push_back(output_record.mfc_input); | |
593 output_record.at_device = false; | |
594 output_record.mfc_input = -1; | |
595 gsc_free_output_buffers_.push_back(dqbuf.index); | |
596 gsc_output_buffer_queued_count_--; | |
597 } | |
598 } | |
599 void ExynosVideoEncodeAccelerator::EnqueueMfc() { | |
600 DVLOG(3) << "EnqueueMfc()"; | |
Pawel Osciak
2013/08/09 13:40:02
DCHECK_EQ(encoder_thread_.message_loop(), base::Me
sheu
2013/08/12 20:23:59
Done.
| |
601 | |
602 // Enqueue all the MFC inputs we can. | |
603 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_; | |
604 while (!mfc_ready_input_buffers_.empty()) { | |
605 if (!EnqueueMfcInputRecord()) | |
606 return; | |
607 } | |
608 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) { | |
609 // We just started up a previously empty queue. | |
610 // Queue state changed; signal interrupt. | |
611 if (!SetDevicePollInterrupt()) | |
612 return; | |
613 // Start VIDIOC_STREAMON if we haven't yet. | |
614 if (!mfc_input_streamon_) { | |
615 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
616 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); | |
617 mfc_input_streamon_ = true; | |
618 } | |
619 } | |
620 | |
621 // Enqueue all the MFC outputs we can. | |
622 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_; | |
623 while (!mfc_free_output_buffers_.empty() && !encoder_output_queue_.empty()) { | |
624 if (!EnqueueMfcOutputRecord()) | |
625 return; | |
626 } | |
627 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) { | |
628 // We just started up a previously empty queue. | |
629 // Queue state changed; signal interrupt. | |
630 if (!SetDevicePollInterrupt()) | |
631 return; | |
632 // Start VIDIOC_STREAMON if we haven't yet. | |
633 if (!mfc_output_streamon_) { | |
634 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
635 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); | |
636 mfc_output_streamon_ = true; | |
637 } | |
638 } | |
639 } | |
640 | |
641 void ExynosVideoEncodeAccelerator::DequeueMfc() { | |
642 DVLOG(3) << "DequeueMfc()"; | |
Pawel Osciak
2013/08/09 13:40:02
DCHECK_EQ(encoder_thread_.message_loop(), base::Me
sheu
2013/08/12 20:23:59
Done.
| |
643 | |
644 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free | |
645 // list. | |
646 struct v4l2_buffer dqbuf; | |
647 struct v4l2_plane planes[2]; | |
648 while (mfc_input_buffer_queued_count_ > 0) { | |
649 DCHECK(mfc_input_streamon_); | |
650 memset(&dqbuf, 0, sizeof(dqbuf)); | |
651 memset(&planes, 0, sizeof(planes)); | |
652 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
653 dqbuf.memory = V4L2_MEMORY_MMAP; | |
654 dqbuf.m.planes = planes; | |
655 dqbuf.length = 2; | |
656 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { | |
657 if (errno == EAGAIN) { | |
658 // EAGAIN if we're just out of buffers to dequeue. | |
659 break; | |
660 } | |
661 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; | |
662 NOTIFY_ERROR(kPlatformFailureError); | |
663 return; | |
664 } | |
665 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; | |
666 DCHECK(input_record.at_device); | |
667 input_record.at_device = false; | |
668 mfc_free_input_buffers_.push_back(dqbuf.index); | |
669 mfc_input_buffer_queued_count_--; | |
670 } | |
671 | |
672 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and recycle to the | |
673 // free list. Notify the client that an output buffer is complete. | |
674 while (mfc_output_buffer_queued_count_ > 0) { | |
675 DCHECK(mfc_output_streamon_); | |
676 memset(&dqbuf, 0, sizeof(dqbuf)); | |
677 memset(planes, 0, sizeof(planes)); | |
678 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
679 dqbuf.memory = V4L2_MEMORY_USERPTR; | |
680 dqbuf.m.planes = planes; | |
681 dqbuf.length = 1; | |
682 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { | |
683 if (errno == EAGAIN) { | |
684 // EAGAIN if we're just out of buffers to dequeue. | |
685 break; | |
686 } | |
687 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; | |
688 NOTIFY_ERROR(kPlatformFailureError); | |
689 return; | |
690 } | |
691 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0); | |
692 const size_t output_size = dqbuf.m.planes[0].bytesused; | |
693 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; | |
694 DCHECK(output_record.at_device); | |
695 DCHECK(output_record.buffer_ref.get()); | |
696 uint8* data = reinterpret_cast<uint8*>( | |
697 output_record.buffer_ref->shm->memory()); | |
698 if (stream_header_size_ == 0) { | |
699 // Assume that the first buffer dequeued is the stream header. | |
700 stream_header_size_ = output_size; | |
701 stream_header_.reset(new uint8[stream_header_size_]); | |
702 memcpy(stream_header_.get(), data, stream_header_size_); | |
703 } | |
704 if (key_frame && | |
705 output_buffer_byte_size_ - stream_header_size_ >= output_size) { | |
Pawel Osciak
2013/08/09 13:40:02
Ok, this is a huge nit, but this is the second pla
sheu
2013/08/12 20:23:59
To avoid overflow.
The DCHECK_LE business above i
| |
706 // Insert stream header before every keyframe. | |
707 memmove(data + stream_header_size_, data, output_size); | |
708 memcpy(data, stream_header_.get(), stream_header_size_); | |
709 } | |
710 DVLOG(3) << "DequeueMfc(): returning " | |
711 "bitstream_buffer_id=" << output_record.buffer_ref->id | |
712 << ", key_frame=" << key_frame; | |
713 child_message_loop_proxy_->PostTask( | |
714 FROM_HERE, | |
715 base::Bind(&Client::BitstreamBufferReady, | |
716 client_, | |
717 output_record.buffer_ref->id, | |
718 dqbuf.m.planes[0].bytesused, | |
719 key_frame)); | |
720 output_record.at_device = false; | |
721 output_record.buffer_ref.reset(); | |
722 mfc_free_output_buffers_.push_back(dqbuf.index); | |
723 mfc_output_buffer_queued_count_--; | |
724 } | |
725 } | |
726 | |
727 bool ExynosVideoEncodeAccelerator::EnqueueGscInputRecord() { | |
728 DVLOG(3) << "EnqueueGscInputRecord()"; | |
729 DCHECK(!encoder_input_queue_.empty()); | |
730 DCHECK(!gsc_free_input_buffers_.empty()); | |
731 | |
732 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for an input video frame | |
733 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front(); | |
734 const int gsc_buffer = gsc_free_input_buffers_.back(); | |
735 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; | |
736 DCHECK(!input_record.at_device); | |
737 DCHECK(!input_record.frame.get()); | |
738 struct v4l2_buffer qbuf; | |
739 struct v4l2_plane qbuf_planes[3]; | |
740 memset(&qbuf, 0, sizeof(qbuf)); | |
741 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
742 qbuf.index = gsc_buffer; | |
743 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
744 qbuf.memory = V4L2_MEMORY_USERPTR; | |
745 qbuf.m.planes = qbuf_planes; | |
746 switch (input_format_fourcc_) { | |
747 case V4L2_PIX_FMT_RGB32: { | |
748 qbuf.m.planes[0].bytesused = input_allocated_size_.GetArea() * 4; | |
749 qbuf.m.planes[0].length = input_allocated_size_.GetArea() * 4; | |
750 qbuf.m.planes[0].m.userptr = reinterpret_cast<unsigned long>( | |
751 frame->data(media::VideoFrame::kRGBPlane)); | |
752 qbuf.length = 1; | |
Pawel Osciak
2013/08/09 13:40:02
arraysize(qbuf_planes)
and everywhere else.
sheu
2013/08/12 20:23:59
RGB32 has 1 plane, YUV420M has 3.
| |
753 break; | |
754 } | |
755 case V4L2_PIX_FMT_YUV420M: { | |
756 qbuf.m.planes[0].bytesused = input_allocated_size_.GetArea(); | |
757 qbuf.m.planes[0].length = input_allocated_size_.GetArea(); | |
758 qbuf.m.planes[0].m.userptr = reinterpret_cast<unsigned long>( | |
759 frame->data(media::VideoFrame::kYPlane)); | |
760 qbuf.m.planes[1].bytesused = input_allocated_size_.GetArea() / 4; | |
761 qbuf.m.planes[1].length = input_allocated_size_.GetArea() / 4; | |
762 qbuf.m.planes[1].m.userptr = reinterpret_cast<unsigned long>( | |
763 frame->data(media::VideoFrame::kUPlane)); | |
764 qbuf.m.planes[2].bytesused = input_allocated_size_.GetArea() / 4; | |
765 qbuf.m.planes[2].length = input_allocated_size_.GetArea() / 4; | |
766 qbuf.m.planes[2].m.userptr = reinterpret_cast<unsigned long>( | |
767 frame->data(media::VideoFrame::kVPlane)); | |
768 qbuf.length = 3; | |
769 break; | |
770 } | |
771 default: | |
772 NOTREACHED(); | |
773 NOTIFY_ERROR(kIllegalStateError); | |
774 return false; | |
775 } | |
776 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); | |
777 input_record.at_device = true; | |
778 input_record.frame = frame; | |
779 encoder_input_queue_.pop_front(); | |
780 gsc_free_input_buffers_.pop_back(); | |
781 gsc_input_buffer_queued_count_++; | |
782 return true; | |
783 } | |
784 | |
785 bool ExynosVideoEncodeAccelerator::EnqueueGscOutputRecord() { | |
786 DVLOG(3) << "EnqueueGscOutputRecord()"; | |
787 DCHECK(!gsc_free_output_buffers_.empty()); | |
788 DCHECK(!mfc_free_input_buffers_.empty()); | |
789 | |
790 // Enqueue a GSC output (VIDEO_CAPTURE) buffer. | |
791 const int gsc_buffer = gsc_free_output_buffers_.back(); | |
792 const int mfc_buffer = mfc_free_input_buffers_.back(); | |
793 GscOutputRecord& output_record = gsc_output_buffer_map_[gsc_buffer]; | |
794 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; | |
795 DCHECK(!output_record.at_device); | |
796 DCHECK_EQ(output_record.mfc_input, -1); | |
797 DCHECK(!input_record.at_device); | |
798 struct v4l2_buffer qbuf; | |
799 struct v4l2_plane qbuf_planes[2]; | |
800 memset(&qbuf, 0, sizeof(qbuf)); | |
801 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
802 qbuf.index = gsc_buffer; | |
803 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
804 qbuf.memory = V4L2_MEMORY_DMABUF; | |
805 qbuf.m.planes = qbuf_planes; | |
806 qbuf.m.planes[0].m.fd = input_record.fd[0]; | |
807 qbuf.m.planes[1].m.fd = input_record.fd[1]; | |
808 qbuf.length = 2; | |
809 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); | |
810 output_record.at_device = true; | |
811 output_record.mfc_input = mfc_buffer; | |
812 mfc_free_input_buffers_.pop_back(); | |
813 gsc_free_output_buffers_.pop_back(); | |
814 gsc_output_buffer_queued_count_++; | |
815 return true; | |
816 } | |
817 | |
818 bool ExynosVideoEncodeAccelerator::EnqueueMfcInputRecord() { | |
819 DVLOG(3) << "EnqueueMfcInputRecord()"; | |
820 DCHECK(!mfc_ready_input_buffers_.empty()); | |
821 | |
822 // Enqueue a MFC input (VIDEO_OUTPUT) buffer. | |
823 const int mfc_buffer = mfc_ready_input_buffers_.front(); | |
824 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; | |
825 DCHECK(!input_record.at_device); | |
826 struct v4l2_buffer qbuf; | |
827 struct v4l2_plane qbuf_planes[2]; | |
828 memset(&qbuf, 0, sizeof(qbuf)); | |
829 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
830 qbuf.index = mfc_buffer; | |
831 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
832 qbuf.memory = V4L2_MEMORY_MMAP; | |
833 qbuf.m.planes = qbuf_planes; | |
834 qbuf.length = 2; | |
835 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); | |
836 input_record.at_device = true; | |
837 mfc_ready_input_buffers_.pop_front(); | |
838 mfc_input_buffer_queued_count_++; | |
839 return true; | |
840 } | |
841 | |
842 bool ExynosVideoEncodeAccelerator::EnqueueMfcOutputRecord() { | |
843 DVLOG(3) << "EnqueueMfcOutputRecord()"; | |
844 DCHECK(!mfc_free_output_buffers_.empty()); | |
845 DCHECK(!encoder_output_queue_.empty()); | |
846 | |
847 // Enqueue a MFC output (VIDEO_CAPTURE) buffer. | |
848 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back(); | |
849 const int mfc_buffer = mfc_free_output_buffers_.back(); | |
850 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; | |
851 DCHECK(!output_record.at_device); | |
852 DCHECK(!output_record.buffer_ref.get()); | |
853 struct v4l2_buffer qbuf; | |
854 struct v4l2_plane qbuf_planes[1]; | |
855 memset(&qbuf, 0, sizeof(qbuf)); | |
856 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
857 qbuf.index = mfc_buffer; | |
858 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
859 qbuf.memory = V4L2_MEMORY_USERPTR; | |
860 qbuf.m.planes = qbuf_planes; | |
861 qbuf.m.planes[0].bytesused = output_buffer->size; | |
862 qbuf.m.planes[0].length = output_buffer->size; | |
863 qbuf.m.planes[0].m.userptr = | |
864 reinterpret_cast<unsigned long>(output_buffer->shm->memory()); | |
865 qbuf.length = 1; | |
866 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); | |
867 output_record.at_device = true; | |
868 output_record.buffer_ref = output_buffer; | |
869 encoder_output_queue_.pop_back(); | |
870 mfc_free_output_buffers_.pop_back(); | |
871 mfc_output_buffer_queued_count_++; | |
872 return true; | |
873 } | |
874 | |
875 bool ExynosVideoEncodeAccelerator::StartDevicePoll() { | |
876 DVLOG(3) << "StartDevicePoll()"; | |
877 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
878 DCHECK(!device_poll_thread_.IsRunning()); | |
879 | |
880 // Start up the device poll thread and schedule its first DevicePollTask(). | |
881 if (!device_poll_thread_.Start()) { | |
882 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; | |
883 NOTIFY_ERROR(kPlatformFailureError); | |
884 return false; | |
885 } | |
886 device_poll_thread_.message_loop()->PostTask( | |
887 FROM_HERE, | |
888 base::Bind(&ExynosVideoEncodeAccelerator::DevicePollTask, | |
889 base::Unretained(this), | |
890 0)); | |
891 | |
892 return true; | |
893 } | |
894 | |
895 bool ExynosVideoEncodeAccelerator::StopDevicePoll() { | |
896 DVLOG(3) << "StopDevicePoll()"; | |
897 | |
898 // Signal the DevicePollTask() to stop, and stop the device poll thread. | |
899 if (!SetDevicePollInterrupt()) | |
900 return false; | |
901 device_poll_thread_.Stop(); | |
902 // Clear the interrupt now, to be sure. | |
903 if (!ClearDevicePollInterrupt()) | |
904 return false; | |
905 | |
906 // Stop streaming. | |
907 if (gsc_input_streamon_) { | |
908 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
909 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); | |
910 } | |
911 gsc_input_streamon_ = false; | |
912 if (gsc_output_streamon_) { | |
913 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
914 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); | |
915 } | |
916 gsc_output_streamon_ = false; | |
917 if (mfc_input_streamon_) { | |
918 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
919 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); | |
920 } | |
921 mfc_input_streamon_ = false; | |
922 if (mfc_output_streamon_) { | |
923 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
924 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); | |
925 } | |
926 mfc_output_streamon_ = false; | |
927 | |
928 // Reset all our accounting info. | |
929 encoder_input_queue_.clear(); | |
930 gsc_free_input_buffers_.clear(); | |
931 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { | |
932 GscInputRecord& input_record = gsc_input_buffer_map_[i]; | |
933 input_record.at_device = false; | |
934 input_record.frame = NULL; | |
935 gsc_free_input_buffers_.push_back(i); | |
936 } | |
937 gsc_input_buffer_queued_count_ = 0; | |
938 gsc_free_output_buffers_.clear(); | |
939 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { | |
940 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; | |
941 output_record.at_device = false; | |
942 output_record.mfc_input = -1; | |
943 gsc_free_output_buffers_.push_back(i); | |
944 } | |
945 gsc_output_buffer_queued_count_ = 0; | |
946 mfc_ready_input_buffers_.clear(); | |
947 mfc_free_input_buffers_.clear(); | |
948 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { | |
949 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; | |
950 input_record.at_device = false; | |
951 mfc_free_input_buffers_.push_back(i); | |
952 } | |
953 mfc_input_buffer_queued_count_ = 0; | |
954 mfc_free_output_buffers_.clear(); | |
955 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { | |
956 MfcOutputRecord& output_record = mfc_output_buffer_map_[i]; | |
957 output_record.at_device = false; | |
958 output_record.buffer_ref.reset(); | |
959 mfc_free_output_buffers_.push_back(i); | |
960 } | |
961 mfc_output_buffer_queued_count_ = 0; | |
962 encoder_output_queue_.clear(); | |
963 | |
964 DVLOG(3) << "StopDevicePoll(): device poll stopped"; | |
965 return true; | |
966 } | |
967 | |
968 bool ExynosVideoEncodeAccelerator::SetDevicePollInterrupt() { | |
969 DVLOG(3) << "SetDevicePollInterrupt()"; | |
970 | |
971 // We might get called here if we fail during initialization, in which case we | |
972 // don't have a file descriptor. | |
973 if (device_poll_interrupt_fd_ == -1) | |
974 return true; | |
975 | |
976 const uint64 buf = 1; | |
977 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { | |
978 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed"; | |
979 NOTIFY_ERROR(kPlatformFailureError); | |
980 return false; | |
981 } | |
982 return true; | |
983 } | |
984 | |
985 bool ExynosVideoEncodeAccelerator::ClearDevicePollInterrupt() { | |
986 DVLOG(3) << "ClearDevicePollInterrupt()"; | |
987 | |
988 // We might get called here if we fail during initialization, in which case we | |
989 // don't have a file descriptor. | |
990 if (device_poll_interrupt_fd_ == -1) | |
991 return true; | |
992 | |
993 uint64 buf; | |
994 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { | |
995 if (errno == EAGAIN) { | |
996 // No interrupt flag set, and we're reading nonblocking. Not an error. | |
997 return true; | |
998 } else { | |
999 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed"; | |
1000 NOTIFY_ERROR(kPlatformFailureError); | |
1001 return false; | |
1002 } | |
1003 } | |
1004 return true; | |
1005 } | |
1006 | |
1007 void ExynosVideoEncodeAccelerator::DevicePollTask(unsigned int poll_fds) { | |
1008 DVLOG(3) << "DevicePollTask()"; | |
1009 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); | |
1010 DCHECK_NE(device_poll_interrupt_fd_, -1); | |
1011 | |
1012 // This routine just polls the set of device fds, and schedules a | |
1013 // ServiceDeviceTask() on encoder_thread_ when processing needs to occur. | |
1014 // Other threads may notify this task to return early by writing to | |
1015 // device_poll_interrupt_fd_. | |
1016 struct pollfd pollfds[3]; | |
1017 nfds_t nfds; | |
1018 | |
1019 // Add device_poll_interrupt_fd_; | |
1020 pollfds[0].fd = device_poll_interrupt_fd_; | |
1021 pollfds[0].events = POLLIN | POLLERR; | |
1022 nfds = 1; | |
1023 | |
1024 // Add GSC fd, if we should poll on it. | |
1025 // GSC has to wait until both input and output buffers are queued. | |
1026 if (poll_fds & kPollGsc) { | |
1027 VLOG(3) << "DevicePollTask(): adding GSC to poll() set"; | |
Pawel Osciak
2013/08/09 13:40:02
s/VLOG/DVLOG/
sheu
2013/08/12 20:23:59
Done.
| |
1028 pollfds[nfds].fd = gsc_fd_; | |
1029 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; | |
1030 nfds++; | |
1031 } | |
1032 if (poll_fds & kPollMfc) { | |
1033 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set"; | |
1034 pollfds[nfds].fd = mfc_fd_; | |
1035 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; | |
1036 nfds++; | |
1037 } | |
1038 | |
1039 // Poll it! | |
1040 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) { | |
1041 DPLOG(ERROR) << "DevicePollTask(): poll() failed"; | |
1042 NOTIFY_ERROR(kPlatformFailureError); | |
1043 return; | |
1044 } | |
1045 | |
1046 // All processing should happen on ServiceDeviceTask(), since we shouldn't | |
1047 // touch encoder state from this thread. | |
1048 encoder_thread_.message_loop()->PostTask( | |
1049 FROM_HERE, | |
1050 base::Bind(&ExynosVideoEncodeAccelerator::ServiceDeviceTask, | |
1051 base::Unretained(this))); | |
1052 } | |
1053 | |
1054 void ExynosVideoEncodeAccelerator::NotifyError(Error error) { | |
1055 DVLOG(1) << "NotifyError(): error=" << error; | |
1056 | |
1057 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { | |
1058 child_message_loop_proxy_->PostTask( | |
1059 FROM_HERE, | |
1060 base::Bind( | |
1061 &ExynosVideoEncodeAccelerator::NotifyError, weak_this_, error)); | |
1062 return; | |
1063 } | |
1064 | |
1065 if (client_) { | |
1066 client_->NotifyError(error); | |
1067 client_ptr_factory_.InvalidateWeakPtrs(); | |
1068 } | |
1069 } | |
1070 | |
1071 void ExynosVideoEncodeAccelerator::SetEncoderState(State state) { | |
1072 DVLOG(3) << "SetEncoderState(): state=" << state; | |
1073 | |
1074 // We can touch encoder_state_ only if this is the encoder thread or the | |
1075 // encoder thread isn't running. | |
1076 if (encoder_thread_.message_loop() != NULL && | |
1077 encoder_thread_.message_loop() != base::MessageLoop::current()) { | |
1078 encoder_thread_.message_loop()->PostTask( | |
1079 FROM_HERE, | |
1080 base::Bind(&ExynosVideoEncodeAccelerator::SetEncoderState, | |
1081 base::Unretained(this), | |
1082 state)); | |
1083 } else { | |
1084 encoder_state_ = state; | |
1085 } | |
1086 } | |
1087 | |
1088 void ExynosVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
1089 int32 bitrate, | |
1090 uint32 framerate_num, | |
1091 uint32 framerate_denom) { | |
1092 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate | |
1093 << ", framerate=" << framerate_num << "/" << framerate_denom; | |
1094 | |
1095 struct v4l2_ext_control ctrls[1]; | |
1096 struct v4l2_ext_controls control; | |
1097 memset(&ctrls, 0, sizeof(ctrls)); | |
1098 memset(&control, 0, sizeof(control)); | |
1099 ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE; | |
1100 ctrls[0].value = bitrate; | |
1101 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; | |
1102 control.count = 1; | |
Pawel Osciak
2013/08/09 13:40:02
arraysize etc.
sheu
2013/08/12 20:23:59
Done.
| |
1103 control.controls = ctrls; | |
1104 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); | |
1105 | |
1106 struct v4l2_streamparm parms; | |
1107 memset(&parms, 0, sizeof(parms)); | |
1108 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1109 // Note that we are provided "frames per second" but V4L2 expects "time per | |
1110 // frame"; hence we provide the reciprocal of the framerate here. | |
1111 parms.parm.output.timeperframe.numerator = framerate_denom; | |
1112 parms.parm.output.timeperframe.denominator = framerate_num; | |
1113 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_PARM, &parms); | |
1114 } | |
1115 | |
1116 bool ExynosVideoEncodeAccelerator::CreateGscInputBuffers() { | |
1117 DVLOG(3) << "CreateGscInputBuffers()"; | |
1118 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1119 DCHECK_EQ(encoder_state_, kUninitialized); | |
1120 DCHECK(!gsc_input_streamon_); | |
1121 | |
1122 struct v4l2_control control; | |
Pawel Osciak
2013/08/09 13:40:02
You could in fact use S_EXT_CTRLS to save a few io
| |
1123 memset(&control, 0, sizeof(control)); | |
1124 control.id = V4L2_CID_ROTATE; | |
1125 control.value = 0; | |
1126 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); | |
1127 | |
1128 // HFLIP actually seems to control vertical mirroring for GSC, and vice-versa. | |
1129 memset(&control, 0, sizeof(control)); | |
1130 control.id = V4L2_CID_HFLIP; | |
1131 control.value = 0; | |
1132 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); | |
1133 | |
1134 memset(&control, 0, sizeof(control)); | |
1135 control.id = V4L2_CID_VFLIP; | |
1136 control.value = 0; | |
1137 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); | |
1138 | |
1139 memset(&control, 0, sizeof(control)); | |
1140 control.id = V4L2_CID_GLOBAL_ALPHA; | |
1141 control.value = 255; | |
1142 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); | |
1143 | |
1144 struct v4l2_format format; | |
1145 memset(&format, 0, sizeof(format)); | |
1146 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1147 format.fmt.pix_mp.width = input_allocated_size_.width(); | |
1148 format.fmt.pix_mp.height = input_allocated_size_.height(); | |
1149 format.fmt.pix_mp.pixelformat = input_format_fourcc_; | |
1150 switch (input_format_fourcc_) { | |
1151 case V4L2_PIX_FMT_RGB32: | |
1152 format.fmt.pix_mp.plane_fmt[0].sizeimage = | |
1153 input_allocated_size_.GetArea() * 4; | |
1154 format.fmt.pix_mp.plane_fmt[0].bytesperline = | |
1155 input_allocated_size_.width() * 4; | |
1156 format.fmt.pix_mp.num_planes = 1; | |
1157 break; | |
1158 case V4L2_PIX_FMT_YUV420M: | |
1159 format.fmt.pix_mp.plane_fmt[0].sizeimage = | |
1160 input_allocated_size_.GetArea(); | |
1161 format.fmt.pix_mp.plane_fmt[0].bytesperline = | |
1162 input_allocated_size_.width(); | |
1163 format.fmt.pix_mp.plane_fmt[1].sizeimage = | |
1164 input_allocated_size_.GetArea() / 4; | |
1165 format.fmt.pix_mp.plane_fmt[1].bytesperline = | |
1166 input_allocated_size_.width() / 2; | |
1167 format.fmt.pix_mp.plane_fmt[2].sizeimage = | |
1168 input_allocated_size_.GetArea() / 4; | |
1169 format.fmt.pix_mp.plane_fmt[2].bytesperline = | |
1170 input_allocated_size_.width() / 2; | |
1171 format.fmt.pix_mp.num_planes = 3; | |
1172 break; | |
1173 default: | |
1174 NOTREACHED(); | |
1175 NOTIFY_ERROR(kIllegalStateError); | |
1176 return false; | |
1177 } | |
1178 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); | |
1179 | |
1180 struct v4l2_crop crop; | |
1181 memset(&crop, 0, sizeof(crop)); | |
1182 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1183 crop.c.left = 0; | |
1184 crop.c.top = 0; | |
1185 crop.c.width = input_visible_size_.width(); | |
1186 crop.c.height = input_visible_size_.height(); | |
1187 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); | |
1188 | |
1189 struct v4l2_requestbuffers reqbufs; | |
1190 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1191 reqbufs.count = kGscInputBufferCount; | |
1192 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1193 reqbufs.memory = V4L2_MEMORY_USERPTR; | |
1194 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); | |
1195 | |
1196 DCHECK(gsc_input_buffer_map_.empty()); | |
1197 gsc_input_buffer_map_.resize(reqbufs.count); | |
1198 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) | |
1199 gsc_free_input_buffers_.push_back(i); | |
1200 | |
1201 return true; | |
1202 } | |
1203 | |
1204 bool ExynosVideoEncodeAccelerator::CreateGscOutputBuffers() { | |
1205 DVLOG(3) << "CreateGscOutputBuffers()"; | |
1206 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1207 DCHECK_EQ(encoder_state_, kUninitialized); | |
1208 DCHECK(!gsc_output_streamon_); | |
1209 | |
1210 struct v4l2_format format; | |
1211 memset(&format, 0, sizeof(format)); | |
1212 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1213 format.fmt.pix_mp.width = converted_allocated_size_.width(); | |
1214 format.fmt.pix_mp.height = converted_allocated_size_.height(); | |
1215 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; | |
1216 format.fmt.pix_mp.plane_fmt[0].sizeimage = | |
1217 converted_allocated_size_.GetArea(); | |
1218 format.fmt.pix_mp.plane_fmt[1].sizeimage = | |
1219 converted_allocated_size_.GetArea() / 2; | |
1220 format.fmt.pix_mp.plane_fmt[0].bytesperline = | |
1221 converted_allocated_size_.width(); | |
1222 format.fmt.pix_mp.plane_fmt[1].bytesperline = | |
1223 converted_allocated_size_.width(); | |
1224 format.fmt.pix_mp.num_planes = 2; | |
1225 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); | |
1226 | |
1227 struct v4l2_crop crop; | |
1228 memset(&crop, 0, sizeof(crop)); | |
1229 crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1230 crop.c.left = 0; | |
1231 crop.c.top = 0; | |
1232 crop.c.width = converted_visible_size_.width(); | |
1233 crop.c.height = converted_visible_size_.height(); | |
1234 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); | |
1235 | |
1236 struct v4l2_requestbuffers reqbufs; | |
1237 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1238 reqbufs.count = kGscOutputBufferCount; | |
1239 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1240 reqbufs.memory = V4L2_MEMORY_DMABUF; | |
1241 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); | |
1242 | |
1243 DCHECK(gsc_output_buffer_map_.empty()); | |
1244 gsc_output_buffer_map_.resize(reqbufs.count); | |
1245 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) | |
1246 gsc_free_output_buffers_.push_back(i); | |
1247 return true; | |
1248 } | |
1249 | |
1250 bool ExynosVideoEncodeAccelerator::SetMfcFormats() { | |
1251 DVLOG(3) << "SetMfcFormats()"; | |
1252 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1253 DCHECK(!mfc_input_streamon_); | |
1254 DCHECK(!mfc_output_streamon_); | |
1255 | |
1256 // VIDIOC_S_FMT on OUTPUT queue. | |
1257 struct v4l2_format format; | |
1258 memset(&format, 0, sizeof(format)); | |
1259 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1260 format.fmt.pix_mp.width = input_allocated_size_.width(); | |
1261 format.fmt.pix_mp.height = input_allocated_size_.height(); | |
1262 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; | |
1263 format.fmt.pix_mp.num_planes = 2; | |
1264 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); | |
1265 // We read direct from GSC, so we rely on the HW not changing our set | |
1266 // size/stride. | |
1267 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].sizeimage, | |
1268 static_cast<__u32>(input_allocated_size_.GetArea())); | |
1269 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].bytesperline, | |
1270 static_cast<__u32>(input_allocated_size_.width())); | |
1271 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].sizeimage, | |
1272 static_cast<__u32>(input_allocated_size_.GetArea() / 2)); | |
1273 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].bytesperline, | |
1274 static_cast<__u32>(input_allocated_size_.width())); | |
1275 | |
1276 struct v4l2_crop crop; | |
1277 memset(&crop, 0, sizeof(crop)); | |
1278 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
1279 crop.c.left = 0; | |
1280 crop.c.top = 0; | |
1281 crop.c.width = input_visible_size_.width(); | |
1282 crop.c.height = input_visible_size_.height(); | |
1283 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CROP, &crop); | |
1284 | |
1285 // VIDIOC_S_FMT on CAPTURE queue. | |
1286 output_buffer_byte_size_ = kMfcOutputBufferSize; | |
1287 memset(&format, 0, sizeof(format)); | |
1288 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1289 format.fmt.pix_mp.width = output_visible_size_.width(); | |
1290 format.fmt.pix_mp.height = output_visible_size_.height(); | |
1291 format.fmt.pix_mp.pixelformat = output_format_fourcc_; | |
1292 format.fmt.pix_mp.plane_fmt[0].sizeimage = output_buffer_byte_size_; | |
1293 format.fmt.pix_mp.num_planes = 1; | |
1294 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); | |
1295 | |
1296 struct v4l2_ext_control ctrls[6]; | |
1297 struct v4l2_ext_controls control; | |
1298 memset(&ctrls, 0, sizeof(ctrls)); | |
1299 memset(&control, 0, sizeof(control)); | |
1300 ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; | |
Pawel Osciak
2013/08/09 13:40:02
Would
ctrls[] = { { .id = foo, .value = bar }, {
sheu
2013/08/12 20:23:59
That's a GCC extension, so I'd avoid it.
| |
1301 ctrls[0].value = 0; | |
1302 ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; | |
1303 ctrls[1].value = 1; | |
1304 ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; | |
1305 ctrls[2].value = 10; | |
1306 ctrls[3].id = V4L2_CID_MPEG_VIDEO_BITRATE; | |
1307 ctrls[3].value = 20480000; | |
1308 ctrls[4].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; | |
1309 ctrls[4].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; | |
1310 ctrls[5].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; | |
1311 ctrls[5].value = 51; | |
Pawel Osciak
2013/08/09 13:40:02
constants?
sheu
2013/08/12 20:23:59
Normally yes, but I think it would in this case ma
| |
1312 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; | |
1313 control.count = arraysize(ctrls); | |
1314 control.controls = ctrls; | |
1315 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); | |
1316 | |
1317 return true; | |
1318 } | |
1319 | |
1320 bool ExynosVideoEncodeAccelerator::CreateMfcInputBuffers() { | |
1321 DVLOG(3) << "CreateMfcInputBuffers()"; | |
1322 // This function runs on encoder_thread_ after output buffers have been | |
1323 // provided by the client. | |
1324 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
1325 DCHECK(!mfc_input_streamon_); | |
1326 | |
1327 struct v4l2_requestbuffers reqbufs; | |
1328 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1329 reqbufs.count = 1; // Driver will allocate the appropriate number of buffers. | |
1330 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1331 reqbufs.memory = V4L2_MEMORY_MMAP; | |
1332 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); | |
1333 | |
1334 DCHECK(mfc_input_buffer_map_.empty()); | |
1335 mfc_input_buffer_map_.resize(reqbufs.count); | |
1336 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { | |
1337 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; | |
1338 for (int j = 0; j < 2; ++j) { | |
1339 // Export the DMABUF fd so GSC can write to it. | |
1340 struct v4l2_exportbuffer expbuf; | |
1341 memset(&expbuf, 0, sizeof(expbuf)); | |
1342 expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1343 expbuf.index = i; | |
1344 expbuf.plane = j; | |
1345 expbuf.flags = O_CLOEXEC; | |
1346 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_EXPBUF, &expbuf); | |
1347 input_record.fd[j] = expbuf.fd; | |
1348 } | |
1349 mfc_free_input_buffers_.push_back(i); | |
1350 } | |
1351 | |
1352 return true; | |
1353 } | |
1354 | |
1355 bool ExynosVideoEncodeAccelerator::CreateMfcOutputBuffers() { | |
1356 DVLOG(3) << "CreateMfcOutputBuffers()"; | |
1357 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1358 DCHECK(!mfc_output_streamon_); | |
1359 | |
1360 struct v4l2_requestbuffers reqbufs; | |
1361 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1362 reqbufs.count = kMfcOutputBufferCount; | |
1363 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1364 reqbufs.memory = V4L2_MEMORY_USERPTR; | |
1365 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); | |
1366 | |
1367 DCHECK(mfc_output_buffer_map_.empty()); | |
1368 mfc_output_buffer_map_.resize(reqbufs.count); | |
1369 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) | |
1370 mfc_free_output_buffers_.push_back(i); | |
1371 | |
1372 return true; | |
1373 } | |
1374 | |
1375 void ExynosVideoEncodeAccelerator::DestroyGscInputBuffers() { | |
1376 DVLOG(3) << "DestroyGscInputBuffers()"; | |
1377 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1378 DCHECK(!gsc_input_streamon_); | |
1379 | |
1380 struct v4l2_requestbuffers reqbufs; | |
1381 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1382 reqbufs.count = 0; | |
1383 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1384 reqbufs.memory = V4L2_MEMORY_USERPTR; | |
1385 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) | |
1386 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; | |
1387 | |
1388 gsc_input_buffer_map_.clear(); | |
1389 gsc_free_input_buffers_.clear(); | |
1390 } | |
1391 | |
1392 void ExynosVideoEncodeAccelerator::DestroyGscOutputBuffers() { | |
1393 DVLOG(3) << "DestroyGscOutputBuffers()"; | |
1394 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1395 DCHECK(!gsc_output_streamon_); | |
1396 | |
1397 struct v4l2_requestbuffers reqbufs; | |
1398 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1399 reqbufs.count = 0; | |
1400 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1401 reqbufs.memory = V4L2_MEMORY_DMABUF; | |
1402 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) | |
1403 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; | |
1404 | |
1405 gsc_output_buffer_map_.clear(); | |
1406 gsc_free_output_buffers_.clear(); | |
1407 } | |
1408 | |
1409 void ExynosVideoEncodeAccelerator::DestroyMfcInputBuffers() { | |
1410 DVLOG(3) << "DestroyMfcInputBuffers()"; | |
1411 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1412 DCHECK(!mfc_input_streamon_); | |
1413 | |
1414 struct v4l2_requestbuffers reqbufs; | |
1415 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1416 reqbufs.count = 0; | |
1417 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1418 reqbufs.memory = V4L2_MEMORY_MMAP; | |
1419 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) | |
1420 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; | |
1421 | |
1422 mfc_input_buffer_map_.clear(); | |
1423 mfc_free_input_buffers_.clear(); | |
1424 } | |
1425 | |
1426 void ExynosVideoEncodeAccelerator::DestroyMfcOutputBuffers() { | |
1427 DVLOG(3) << "DestroyMfcOutputBuffers()"; | |
1428 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
1429 DCHECK(!mfc_output_streamon_); | |
1430 | |
1431 struct v4l2_requestbuffers reqbufs; | |
1432 memset(&reqbufs, 0, sizeof(reqbufs)); | |
1433 reqbufs.count = 0; | |
1434 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1435 reqbufs.memory = V4L2_MEMORY_USERPTR; | |
1436 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) | |
1437 DPLOG(ERROR) << "DestroyMfcOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; | |
1438 | |
1439 mfc_output_buffer_map_.clear(); | |
1440 mfc_free_output_buffers_.clear(); | |
1441 } | |
1442 | |
1443 } // namespace content | |
OLD | NEW |