OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/common/gpu/media/exynos_video_encode_accelerator.h" |
| 6 |
| 7 #include <fcntl.h> |
| 8 #include <linux/videodev2.h> |
| 9 #include <poll.h> |
| 10 #include <sys/eventfd.h> |
| 11 #include <sys/ioctl.h> |
| 12 |
| 13 #include "base/callback.h" |
| 14 #include "base/debug/trace_event.h" |
| 15 #include "base/message_loop/message_loop_proxy.h" |
| 16 #include "base/posix/eintr_wrapper.h" |
| 17 #include "media/base/bitstream_buffer.h" |
| 18 |
| 19 #define NOTIFY_ERROR(x) \ |
| 20 do { \ |
| 21 SetEncoderState(kError); \ |
| 22 DLOG(ERROR) << "calling NotifyError(): " << x; \ |
| 23 NotifyError(x); \ |
| 24 } while (0) |
| 25 |
| 26 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \ |
| 27 do { \ |
| 28 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 29 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 30 NOTIFY_ERROR(kPlatformFailureError); \ |
| 31 return; \ |
| 32 } \ |
| 33 } while (0) |
| 34 |
| 35 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \ |
| 36 do { \ |
| 37 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 38 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 39 NOTIFY_ERROR(kPlatformFailureError); \ |
| 40 return false; \ |
| 41 } \ |
| 42 } while (0) |
| 43 |
| 44 namespace content { |
| 45 |
| 46 namespace { |
| 47 |
| 48 const char kExynosGscDevice[] = "/dev/gsc1"; |
| 49 const char kExynosMfcDevice[] = "/dev/mfc-enc"; |
| 50 |
| 51 // File descriptors we need to poll, one-bit flag for each. |
| 52 enum PollFds { |
| 53 kPollGsc = (1 << 0), |
| 54 kPollMfc = (1 << 1), |
| 55 }; |
| 56 |
| 57 } // anonymous namespace |
| 58 |
| 59 struct ExynosVideoEncodeAccelerator::BitstreamBufferRef { |
| 60 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) |
| 61 : id(id), shm(shm.Pass()), size(size) {} |
| 62 const int32 id; |
| 63 const scoped_ptr<base::SharedMemory> shm; |
| 64 const size_t size; |
| 65 }; |
| 66 |
| 67 |
| 68 ExynosVideoEncodeAccelerator::GscInputRecord::GscInputRecord() |
| 69 : at_device(false) {} |
| 70 |
| 71 ExynosVideoEncodeAccelerator::GscOutputRecord::GscOutputRecord() |
| 72 : at_device(false), mfc_input(-1) {} |
| 73 |
| 74 ExynosVideoEncodeAccelerator::MfcInputRecord::MfcInputRecord() |
| 75 : at_device(false) { |
| 76 fd[0] = fd[1] = -1; |
| 77 } |
| 78 |
| 79 ExynosVideoEncodeAccelerator::MfcOutputRecord::MfcOutputRecord() |
| 80 : at_device(false) {} |
| 81 |
| 82 ExynosVideoEncodeAccelerator::ExynosVideoEncodeAccelerator( |
| 83 media::VideoEncodeAccelerator::Client* client) |
| 84 : child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 85 weak_this_ptr_factory_(this), |
| 86 weak_this_(weak_this_ptr_factory_.GetWeakPtr()), |
| 87 client_ptr_factory_(client), |
| 88 client_(client_ptr_factory_.GetWeakPtr()), |
| 89 encoder_thread_("ExynosEncoderThread"), |
| 90 encoder_state_(kUninitialized), |
| 91 output_buffer_byte_size_(0), |
| 92 stream_header_size_(0), |
| 93 input_format_fourcc_(0), |
| 94 output_format_fourcc_(0), |
| 95 gsc_fd_(-1), |
| 96 gsc_input_streamon_(false), |
| 97 gsc_input_buffer_queued_count_(0), |
| 98 gsc_output_streamon_(false), |
| 99 gsc_output_buffer_queued_count_(0), |
| 100 mfc_fd_(-1), |
| 101 mfc_input_streamon_(false), |
| 102 mfc_input_buffer_queued_count_(0), |
| 103 mfc_output_streamon_(false), |
| 104 mfc_output_buffer_queued_count_(0), |
| 105 device_poll_thread_("ExynosEncoderDevicePollThread"), |
| 106 device_poll_interrupt_fd_(-1) { |
| 107 DCHECK(client_); |
| 108 } |
| 109 |
| 110 ExynosVideoEncodeAccelerator::~ExynosVideoEncodeAccelerator() { |
| 111 DCHECK(!encoder_thread_.IsRunning()); |
| 112 DCHECK(!device_poll_thread_.IsRunning()); |
| 113 |
| 114 if (device_poll_interrupt_fd_ != -1) { |
| 115 HANDLE_EINTR(close(device_poll_interrupt_fd_)); |
| 116 device_poll_interrupt_fd_ = -1; |
| 117 } |
| 118 if (mfc_fd_ != -1) { |
| 119 DestroyMfcInputBuffers(); |
| 120 DestroyMfcOutputBuffers(); |
| 121 HANDLE_EINTR(close(mfc_fd_)); |
| 122 mfc_fd_ = -1; |
| 123 } |
| 124 if (gsc_fd_ != -1) { |
| 125 DestroyGscInputBuffers(); |
| 126 DestroyGscOutputBuffers(); |
| 127 HANDLE_EINTR(close(gsc_fd_)); |
| 128 gsc_fd_ = -1; |
| 129 } |
| 130 } |
| 131 |
| 132 void ExynosVideoEncodeAccelerator::Initialize( |
| 133 media::VideoFrame::Format input_format, |
| 134 const gfx::Size& input_visible_size, |
| 135 media::VideoCodecProfile output_profile, |
| 136 uint32 initial_bitrate) { |
| 137 DVLOG(3) << "Initialize(): input_format=" << input_format |
| 138 << ", input_visible_size=" << input_visible_size.ToString() |
| 139 << ", output_profile=" << output_profile |
| 140 << ", initial_bitrate=" << initial_bitrate; |
| 141 |
| 142 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 143 DCHECK_EQ(encoder_state_, kUninitialized); |
| 144 |
| 145 input_visible_size_ = input_visible_size; |
| 146 input_allocated_size_.SetSize((input_visible_size_.width() + 0xF) & ~0xF, |
| 147 (input_visible_size_.height() + 0xF) & ~0xF); |
| 148 converted_visible_size_.SetSize((input_visible_size_.width() + 0x1) & ~0x1, |
| 149 (input_visible_size_.height() + 0x1) & ~0x1); |
| 150 converted_allocated_size_.SetSize( |
| 151 (converted_visible_size_.width() + 0xF) & ~0xF, |
| 152 (converted_visible_size_.height() + 0xF) & ~0xF); |
| 153 output_visible_size_ = converted_visible_size_; |
| 154 |
| 155 switch (input_format) { |
| 156 case media::VideoFrame::RGB32: |
| 157 input_format_fourcc_ = V4L2_PIX_FMT_RGB32; |
| 158 break; |
| 159 case media::VideoFrame::I420: |
| 160 input_format_fourcc_ = V4L2_PIX_FMT_YUV420M; |
| 161 break; |
| 162 default: |
| 163 NOTIFY_ERROR(kInvalidArgumentError); |
| 164 return; |
| 165 } |
| 166 |
| 167 if (output_profile >= media::H264PROFILE_MIN && |
| 168 output_profile <= media::H264PROFILE_MAX) { |
| 169 output_format_fourcc_ = V4L2_PIX_FMT_H264; |
| 170 } else { |
| 171 NOTIFY_ERROR(kInvalidArgumentError); |
| 172 return; |
| 173 } |
| 174 |
| 175 // Open the color conversion device. |
| 176 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice; |
| 177 gsc_fd_ = |
| 178 HANDLE_EINTR(open(kExynosGscDevice, O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 179 if (gsc_fd_ == -1) { |
| 180 DPLOG(ERROR) << "Initialize(): could not open GSC device: " |
| 181 << kExynosGscDevice; |
| 182 NOTIFY_ERROR(kPlatformFailureError); |
| 183 return; |
| 184 } |
| 185 |
| 186 // Capabilities check. |
| 187 struct v4l2_capability caps; |
| 188 memset(&caps, 0, sizeof(caps)); |
| 189 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE | |
| 190 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; |
| 191 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_QUERYCAP, &caps); |
| 192 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 193 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " |
| 194 "caps check failed: 0x" << std::hex << caps.capabilities; |
| 195 NOTIFY_ERROR(kPlatformFailureError); |
| 196 return; |
| 197 } |
| 198 |
| 199 // Open the video encoder device. |
| 200 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice; |
| 201 mfc_fd_ = |
| 202 HANDLE_EINTR(open(kExynosMfcDevice, O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 203 if (mfc_fd_ == -1) { |
| 204 DPLOG(ERROR) << "Initialize(): could not open MFC device: " |
| 205 << kExynosMfcDevice; |
| 206 NOTIFY_ERROR(kPlatformFailureError); |
| 207 return; |
| 208 } |
| 209 |
| 210 memset(&caps, 0, sizeof(caps)); |
| 211 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_QUERYCAP, &caps); |
| 212 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 213 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " |
| 214 "caps check failed: 0x" << std::hex << caps.capabilities; |
| 215 NOTIFY_ERROR(kPlatformFailureError); |
| 216 return; |
| 217 } |
| 218 |
| 219 // Create the interrupt fd. |
| 220 DCHECK_EQ(device_poll_interrupt_fd_, -1); |
| 221 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); |
| 222 if (device_poll_interrupt_fd_ == -1) { |
| 223 DPLOG(ERROR) << "Initialize(): eventfd() failed"; |
| 224 NOTIFY_ERROR(kPlatformFailureError); |
| 225 return; |
| 226 } |
| 227 |
| 228 DVLOG(3) |
| 229 << "Initialize(): input_visible_size_=" << input_visible_size_.ToString() |
| 230 << ", input_allocated_size_=" << input_allocated_size_.ToString() |
| 231 << ", converted_visible_size_=" << converted_visible_size_.ToString() |
| 232 << ", converted_allocated_size_=" << converted_allocated_size_.ToString() |
| 233 << ", output_visible_size_=" << output_visible_size_.ToString(); |
| 234 |
| 235 if (!CreateGscInputBuffers() || !CreateGscOutputBuffers()) |
| 236 return; |
| 237 |
| 238 // MFC setup for encoding is rather particular in ordering: |
| 239 // |
| 240 // 1. Format (VIDIOC_S_FMT) set first on OUTPUT and CAPTURE queues. |
| 241 // 2. VIDIOC_REQBUFS, VIDIOC_QBUF, and VIDIOC_STREAMON on CAPTURE queue. |
| 242 // 3. VIDIOC_REQBUFS (and later VIDIOC_QBUF and VIDIOC_STREAMON) on OUTPUT |
| 243 // queue. |
| 244 // |
| 245 // Unfortunately, we cannot do (3) in Initialize() here since we have no |
| 246 // buffers to QBUF in step (2) until the client has provided output buffers |
| 247 // through UseOutputBitstreamBuffer(). So, we just do (1), and the |
| 248 // VIDIOC_REQBUFS part of (2) here. The rest is done the first time we get |
| 249 // a UseOutputBitstreamBuffer() callback. |
| 250 |
| 251 if (!SetMfcFormats()) |
| 252 return; |
| 253 |
| 254 RequestEncodingParametersChangeTask(initial_bitrate, kInitialFramerate); |
| 255 |
| 256 // VIDIOC_REQBUFS on CAPTURE queue. |
| 257 if (!CreateMfcOutputBuffers()) |
| 258 return; |
| 259 |
| 260 |
| 261 if (!encoder_thread_.Start()) { |
| 262 DLOG(ERROR) << "Initialize(): encoder thread failed to start"; |
| 263 NOTIFY_ERROR(kPlatformFailureError); |
| 264 return; |
| 265 } |
| 266 |
| 267 SetEncoderState(kInitialized); |
| 268 |
| 269 child_message_loop_proxy_->PostTask( |
| 270 FROM_HERE, base::Bind(&Client::NotifyInitializeDone, client_)); |
| 271 |
| 272 child_message_loop_proxy_->PostTask( |
| 273 FROM_HERE, |
| 274 base::Bind(&Client::RequireBitstreamBuffers, |
| 275 client_, |
| 276 gsc_input_buffer_map_.size(), |
| 277 input_allocated_size_, |
| 278 output_buffer_byte_size_)); |
| 279 } |
| 280 |
| 281 void ExynosVideoEncodeAccelerator::Encode( |
| 282 const scoped_refptr<media::VideoFrame>& frame, |
| 283 bool force_keyframe) { |
| 284 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; |
| 285 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 286 |
| 287 encoder_thread_.message_loop()->PostTask( |
| 288 FROM_HERE, |
| 289 base::Bind(&ExynosVideoEncodeAccelerator::EncodeTask, |
| 290 base::Unretained(this), |
| 291 frame, |
| 292 force_keyframe)); |
| 293 } |
| 294 |
| 295 void ExynosVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
| 296 const media::BitstreamBuffer& buffer) { |
| 297 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); |
| 298 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 299 |
| 300 if (buffer.size() < output_buffer_byte_size_) { |
| 301 NOTIFY_ERROR(kInvalidArgumentError); |
| 302 return; |
| 303 } |
| 304 |
| 305 scoped_ptr<base::SharedMemory> shm( |
| 306 new base::SharedMemory(buffer.handle(), false)); |
| 307 if (!shm->Map(buffer.size())) { |
| 308 NOTIFY_ERROR(kPlatformFailureError); |
| 309 return; |
| 310 } |
| 311 |
| 312 scoped_ptr<BitstreamBufferRef> buffer_ref( |
| 313 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); |
| 314 encoder_thread_.message_loop()->PostTask( |
| 315 FROM_HERE, |
| 316 base::Bind(&ExynosVideoEncodeAccelerator::UseOutputBitstreamBufferTask, |
| 317 base::Unretained(this), |
| 318 base::Passed(&buffer_ref))); |
| 319 } |
| 320 |
| 321 void ExynosVideoEncodeAccelerator::RequestEncodingParametersChange( |
| 322 uint32 bitrate, |
| 323 uint32 framerate) { |
| 324 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate |
| 325 << ", framerate=" << framerate; |
| 326 if (bitrate < 1 || framerate < 1) { |
| 327 DLOG(ERROR) << "RequestEncodingParametersChange(): " |
| 328 "invalid bitrate=" << bitrate |
| 329 << " or framerate=" << framerate; |
| 330 NOTIFY_ERROR(kInvalidArgumentError); |
| 331 return; |
| 332 } |
| 333 encoder_thread_.message_loop()->PostTask( |
| 334 FROM_HERE, |
| 335 base::Bind( |
| 336 &ExynosVideoEncodeAccelerator::RequestEncodingParametersChangeTask, |
| 337 base::Unretained(this), |
| 338 bitrate, |
| 339 framerate)); |
| 340 } |
| 341 |
| 342 void ExynosVideoEncodeAccelerator::Destroy() { |
| 343 DVLOG(3) << "Destroy()"; |
| 344 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 345 |
| 346 // We're destroying; cancel all callbacks. |
| 347 client_ptr_factory_.InvalidateWeakPtrs(); |
| 348 |
| 349 // If the encoder thread is running, destroy using posted task. |
| 350 if (encoder_thread_.IsRunning()) { |
| 351 encoder_thread_.message_loop()->PostTask( |
| 352 FROM_HERE, |
| 353 base::Bind(&ExynosVideoEncodeAccelerator::DestroyTask, |
| 354 base::Unretained(this))); |
| 355 // DestroyTask() will put the encoder into kError state and cause all tasks |
| 356 // to no-op. |
| 357 encoder_thread_.Stop(); |
| 358 } else { |
| 359 // Otherwise, call the destroy task directly. |
| 360 DestroyTask(); |
| 361 } |
| 362 |
| 363 // Set to kError state just in case. |
| 364 SetEncoderState(kError); |
| 365 |
| 366 delete this; |
| 367 } |
| 368 |
| 369 // static |
| 370 std::vector<media::VideoEncodeAccelerator::SupportedProfile> |
| 371 ExynosVideoEncodeAccelerator::GetSupportedProfiles() { |
| 372 std::vector<SupportedProfile> profiles(1); |
| 373 SupportedProfile& profile = profiles[0]; |
| 374 profile.profile = media::H264PROFILE_MAIN; |
| 375 profile.max_resolution.SetSize(1920, 1088); |
| 376 profile.max_framerate.numerator = 30; |
| 377 profile.max_framerate.denominator = 1; |
| 378 return profiles; |
| 379 } |
| 380 |
| 381 void ExynosVideoEncodeAccelerator::EncodeTask( |
| 382 const scoped_refptr<media::VideoFrame>& frame, bool force_keyframe) { |
| 383 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; |
| 384 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 385 DCHECK_NE(encoder_state_, kUninitialized); |
| 386 |
| 387 if (encoder_state_ == kError) { |
| 388 DVLOG(2) << "EncodeTask(): early out: kError state"; |
| 389 return; |
| 390 } |
| 391 |
| 392 encoder_input_queue_.push_back(frame); |
| 393 EnqueueGsc(); |
| 394 |
| 395 if (force_keyframe) { |
| 396 // TODO(sheu): this presently makes for slightly imprecise encoding |
| 397 // parameters updates. To precisely align the parameter updates with the |
| 398 // incoming input frame, we should track the parameters through the GSC |
| 399 // pipeline and only apply them when the MFC input is about to be queued. |
| 400 struct v4l2_ext_control ctrls[1]; |
| 401 struct v4l2_ext_controls control; |
| 402 memset(&ctrls, 0, sizeof(ctrls)); |
| 403 memset(&control, 0, sizeof(control)); |
| 404 ctrls[0].id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE; |
| 405 ctrls[0].value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME; |
| 406 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; |
| 407 control.count = 1; |
| 408 control.controls = ctrls; |
| 409 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); |
| 410 } |
| 411 } |
| 412 |
| 413 void ExynosVideoEncodeAccelerator::UseOutputBitstreamBufferTask( |
| 414 scoped_ptr<BitstreamBufferRef> buffer_ref) { |
| 415 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id; |
| 416 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 417 |
| 418 encoder_output_queue_.push_back( |
| 419 linked_ptr<BitstreamBufferRef>(buffer_ref.release())); |
| 420 EnqueueMfc(); |
| 421 |
| 422 if (encoder_state_ == kInitialized) { |
| 423 // Finish setting up our MFC OUTPUT queue. See: Initialize(). |
| 424 // VIDIOC_REQBUFS on OUTPUT queue. |
| 425 if (!CreateMfcInputBuffers()) |
| 426 return; |
| 427 if (!StartDevicePoll()) |
| 428 return; |
| 429 encoder_state_ = kEncoding; |
| 430 } |
| 431 } |
| 432 |
| 433 void ExynosVideoEncodeAccelerator::DestroyTask() { |
| 434 DVLOG(3) << "DestroyTask()"; |
| 435 |
| 436 // DestroyTask() should run regardless of encoder_state_. |
| 437 |
| 438 // Stop streaming and the device_poll_thread_. |
| 439 StopDevicePoll(); |
| 440 |
| 441 // Set our state to kError, and early-out all tasks. |
| 442 encoder_state_ = kError; |
| 443 } |
| 444 |
| 445 void ExynosVideoEncodeAccelerator::ServiceDeviceTask() { |
| 446 DVLOG(3) << "ServiceDeviceTask()"; |
| 447 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 448 DCHECK_NE(encoder_state_, kUninitialized); |
| 449 DCHECK_NE(encoder_state_, kInitialized); |
| 450 |
| 451 if (encoder_state_ == kError) { |
| 452 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; |
| 453 return; |
| 454 } |
| 455 |
| 456 DequeueGsc(); |
| 457 DequeueMfc(); |
| 458 EnqueueGsc(); |
| 459 EnqueueMfc(); |
| 460 |
| 461 // Clear the interrupt fd. |
| 462 if (!ClearDevicePollInterrupt()) |
| 463 return; |
| 464 |
| 465 unsigned int poll_fds = 0; |
| 466 // Add GSC fd, if we should poll on it. |
| 467 // GSC has to wait until both input and output buffers are queued. |
| 468 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0) |
| 469 poll_fds |= kPollGsc; |
| 470 // Add MFC fd, if we should poll on it. |
| 471 // MFC can be polled as soon as either input or output buffers are queued. |
| 472 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0) |
| 473 poll_fds |= kPollMfc; |
| 474 |
| 475 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), |
| 476 // so either: |
| 477 // * device_poll_thread_ is running normally |
| 478 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, |
| 479 // in which case we're in kError state, and we should have early-outed |
| 480 // already. |
| 481 DCHECK(device_poll_thread_.message_loop()); |
| 482 // Queue the DevicePollTask() now. |
| 483 device_poll_thread_.message_loop()->PostTask( |
| 484 FROM_HERE, |
| 485 base::Bind(&ExynosVideoEncodeAccelerator::DevicePollTask, |
| 486 base::Unretained(this), |
| 487 poll_fds)); |
| 488 |
| 489 DVLOG(2) << "ServiceDeviceTask(): buffer counts: ENC[" |
| 490 << encoder_input_queue_.size() << "] => GSC[" |
| 491 << gsc_free_input_buffers_.size() << "+" |
| 492 << gsc_input_buffer_queued_count_ << "/" |
| 493 << gsc_input_buffer_map_.size() << "->" |
| 494 << gsc_free_output_buffers_.size() << "+" |
| 495 << gsc_output_buffer_queued_count_ << "/" |
| 496 << gsc_output_buffer_map_.size() << "] => " |
| 497 << mfc_ready_input_buffers_.size() << " => MFC[" |
| 498 << mfc_free_input_buffers_.size() << "+" |
| 499 << mfc_input_buffer_queued_count_ << "/" |
| 500 << mfc_input_buffer_map_.size() << "->" |
| 501 << mfc_free_output_buffers_.size() << "+" |
| 502 << mfc_output_buffer_queued_count_ << "/" |
| 503 << mfc_output_buffer_map_.size() << "] => OUT[" |
| 504 << encoder_output_queue_.size() << "]"; |
| 505 } |
| 506 |
| 507 void ExynosVideoEncodeAccelerator::EnqueueGsc() { |
| 508 DVLOG(3) << "EnqueueGsc()"; |
| 509 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 510 |
| 511 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_; |
| 512 while (!encoder_input_queue_.empty() && !gsc_free_input_buffers_.empty()) { |
| 513 if (!EnqueueGscInputRecord()) |
| 514 return; |
| 515 } |
| 516 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) { |
| 517 // We started up a previously empty queue. |
| 518 // Queue state changed; signal interrupt. |
| 519 if (!SetDevicePollInterrupt()) |
| 520 return; |
| 521 // Start VIDIOC_STREAMON if we haven't yet. |
| 522 if (!gsc_input_streamon_) { |
| 523 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 524 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 525 gsc_input_streamon_ = true; |
| 526 } |
| 527 } |
| 528 |
| 529 // Enqueue a GSC output, only if we need one. GSC output buffers write |
| 530 // directly to MFC input buffers, so we'll have to check for free MFC input |
| 531 // buffers as well. |
| 532 // GSC is liable to race conditions if more than one output buffer is |
| 533 // simultaneously enqueued, so enqueue just one. |
| 534 if (gsc_input_buffer_queued_count_ != 0 && |
| 535 gsc_output_buffer_queued_count_ == 0 && |
| 536 !gsc_free_output_buffers_.empty() && !mfc_free_input_buffers_.empty()) { |
| 537 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_; |
| 538 if (!EnqueueGscOutputRecord()) |
| 539 return; |
| 540 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) { |
| 541 // We just started up a previously empty queue. |
| 542 // Queue state changed; signal interrupt. |
| 543 if (!SetDevicePollInterrupt()) |
| 544 return; |
| 545 // Start VIDIOC_STREAMON if we haven't yet. |
| 546 if (!gsc_output_streamon_) { |
| 547 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 548 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 549 gsc_output_streamon_ = true; |
| 550 } |
| 551 } |
| 552 } |
| 553 DCHECK_LE(gsc_output_buffer_queued_count_, 1); |
| 554 } |
| 555 |
| 556 void ExynosVideoEncodeAccelerator::DequeueGsc() { |
| 557 DVLOG(3) << "DequeueGsc()"; |
| 558 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 559 |
| 560 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 561 // list. |
| 562 struct v4l2_buffer dqbuf; |
| 563 struct v4l2_plane planes[3]; |
| 564 while (gsc_input_buffer_queued_count_ > 0) { |
| 565 DCHECK(gsc_input_streamon_); |
| 566 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 567 memset(&planes, 0, sizeof(planes)); |
| 568 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 569 dqbuf.memory = V4L2_MEMORY_USERPTR; |
| 570 dqbuf.m.planes = planes; |
| 571 dqbuf.length = arraysize(planes); |
| 572 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf)) != 0) { |
| 573 if (errno == EAGAIN) { |
| 574 // EAGAIN if we're just out of buffers to dequeue. |
| 575 break; |
| 576 } |
| 577 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 578 NOTIFY_ERROR(kPlatformFailureError); |
| 579 return; |
| 580 } |
| 581 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; |
| 582 DCHECK(input_record.at_device); |
| 583 DCHECK(input_record.frame.get()); |
| 584 input_record.at_device = false; |
| 585 input_record.frame = NULL; |
| 586 gsc_free_input_buffers_.push_back(dqbuf.index); |
| 587 gsc_input_buffer_queued_count_--; |
| 588 } |
| 589 |
| 590 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and recycle to the |
| 591 // free list. Queue the corresponding MFC buffer to the GSC->MFC holding |
| 592 // queue. |
| 593 while (gsc_output_buffer_queued_count_ > 0) { |
| 594 DCHECK(gsc_output_streamon_); |
| 595 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 596 memset(&planes, 0, sizeof(planes)); |
| 597 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 598 dqbuf.memory = V4L2_MEMORY_DMABUF; |
| 599 dqbuf.m.planes = planes; |
| 600 dqbuf.length = 2; |
| 601 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf)) != 0) { |
| 602 if (errno == EAGAIN) { |
| 603 // EAGAIN if we're just out of buffers to dequeue. |
| 604 break; |
| 605 } |
| 606 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 607 NOTIFY_ERROR(kPlatformFailureError); |
| 608 return; |
| 609 } |
| 610 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; |
| 611 DCHECK(output_record.at_device); |
| 612 DCHECK(output_record.mfc_input != -1); |
| 613 mfc_ready_input_buffers_.push_back(output_record.mfc_input); |
| 614 output_record.at_device = false; |
| 615 output_record.mfc_input = -1; |
| 616 gsc_free_output_buffers_.push_back(dqbuf.index); |
| 617 gsc_output_buffer_queued_count_--; |
| 618 } |
| 619 } |
| 620 void ExynosVideoEncodeAccelerator::EnqueueMfc() { |
| 621 DVLOG(3) << "EnqueueMfc()"; |
| 622 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 623 |
| 624 // Enqueue all the MFC inputs we can. |
| 625 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_; |
| 626 while (!mfc_ready_input_buffers_.empty()) { |
| 627 if (!EnqueueMfcInputRecord()) |
| 628 return; |
| 629 } |
| 630 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) { |
| 631 // We just started up a previously empty queue. |
| 632 // Queue state changed; signal interrupt. |
| 633 if (!SetDevicePollInterrupt()) |
| 634 return; |
| 635 // Start VIDIOC_STREAMON if we haven't yet. |
| 636 if (!mfc_input_streamon_) { |
| 637 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 638 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 639 mfc_input_streamon_ = true; |
| 640 } |
| 641 } |
| 642 |
| 643 // Enqueue all the MFC outputs we can. |
| 644 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_; |
| 645 while (!mfc_free_output_buffers_.empty() && !encoder_output_queue_.empty()) { |
| 646 if (!EnqueueMfcOutputRecord()) |
| 647 return; |
| 648 } |
| 649 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) { |
| 650 // We just started up a previously empty queue. |
| 651 // Queue state changed; signal interrupt. |
| 652 if (!SetDevicePollInterrupt()) |
| 653 return; |
| 654 // Start VIDIOC_STREAMON if we haven't yet. |
| 655 if (!mfc_output_streamon_) { |
| 656 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 657 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 658 mfc_output_streamon_ = true; |
| 659 } |
| 660 } |
| 661 } |
| 662 |
| 663 void ExynosVideoEncodeAccelerator::DequeueMfc() { |
| 664 DVLOG(3) << "DequeueMfc()"; |
| 665 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 666 |
| 667 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 668 // list. |
| 669 struct v4l2_buffer dqbuf; |
| 670 struct v4l2_plane planes[2]; |
| 671 while (mfc_input_buffer_queued_count_ > 0) { |
| 672 DCHECK(mfc_input_streamon_); |
| 673 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 674 memset(&planes, 0, sizeof(planes)); |
| 675 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 676 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 677 dqbuf.m.planes = planes; |
| 678 dqbuf.length = 2; |
| 679 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf)) != 0) { |
| 680 if (errno == EAGAIN) { |
| 681 // EAGAIN if we're just out of buffers to dequeue. |
| 682 break; |
| 683 } |
| 684 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 685 NOTIFY_ERROR(kPlatformFailureError); |
| 686 return; |
| 687 } |
| 688 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; |
| 689 DCHECK(input_record.at_device); |
| 690 input_record.at_device = false; |
| 691 mfc_free_input_buffers_.push_back(dqbuf.index); |
| 692 mfc_input_buffer_queued_count_--; |
| 693 } |
| 694 |
| 695 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and recycle to the |
| 696 // free list. Notify the client that an output buffer is complete. |
| 697 while (mfc_output_buffer_queued_count_ > 0) { |
| 698 DCHECK(mfc_output_streamon_); |
| 699 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 700 memset(planes, 0, sizeof(planes)); |
| 701 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 702 dqbuf.memory = V4L2_MEMORY_USERPTR; |
| 703 dqbuf.m.planes = planes; |
| 704 dqbuf.length = 1; |
| 705 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf)) != 0) { |
| 706 if (errno == EAGAIN) { |
| 707 // EAGAIN if we're just out of buffers to dequeue. |
| 708 break; |
| 709 } |
| 710 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 711 NOTIFY_ERROR(kPlatformFailureError); |
| 712 return; |
| 713 } |
| 714 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0); |
| 715 const size_t output_size = dqbuf.m.planes[0].bytesused; |
| 716 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; |
| 717 DCHECK(output_record.at_device); |
| 718 DCHECK(output_record.buffer_ref.get()); |
| 719 uint8* data = |
| 720 reinterpret_cast<uint8*>(output_record.buffer_ref->shm->memory()); |
| 721 if (stream_header_size_ == 0) { |
| 722 // Assume that the first buffer dequeued is the stream header. |
| 723 stream_header_size_ = output_size; |
| 724 stream_header_.reset(new uint8[stream_header_size_]); |
| 725 memcpy(stream_header_.get(), data, stream_header_size_); |
| 726 } |
| 727 if (key_frame && |
| 728 output_buffer_byte_size_ - stream_header_size_ >= output_size) { |
| 729 // Insert stream header before every keyframe. |
| 730 memmove(data + stream_header_size_, data, output_size); |
| 731 memcpy(data, stream_header_.get(), stream_header_size_); |
| 732 } |
| 733 DVLOG(3) << "DequeueMfc(): returning " |
| 734 "bitstream_buffer_id=" << output_record.buffer_ref->id |
| 735 << ", key_frame=" << key_frame; |
| 736 child_message_loop_proxy_->PostTask( |
| 737 FROM_HERE, |
| 738 base::Bind(&Client::BitstreamBufferReady, |
| 739 client_, |
| 740 output_record.buffer_ref->id, |
| 741 dqbuf.m.planes[0].bytesused, |
| 742 key_frame)); |
| 743 output_record.at_device = false; |
| 744 output_record.buffer_ref.reset(); |
| 745 mfc_free_output_buffers_.push_back(dqbuf.index); |
| 746 mfc_output_buffer_queued_count_--; |
| 747 } |
| 748 } |
| 749 |
| 750 bool ExynosVideoEncodeAccelerator::EnqueueGscInputRecord() { |
| 751 DVLOG(3) << "EnqueueGscInputRecord()"; |
| 752 DCHECK(!encoder_input_queue_.empty()); |
| 753 DCHECK(!gsc_free_input_buffers_.empty()); |
| 754 |
| 755 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for an input video frame |
| 756 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front(); |
| 757 const int gsc_buffer = gsc_free_input_buffers_.back(); |
| 758 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; |
| 759 DCHECK(!input_record.at_device); |
| 760 DCHECK(!input_record.frame.get()); |
| 761 struct v4l2_buffer qbuf; |
| 762 struct v4l2_plane qbuf_planes[3]; |
| 763 memset(&qbuf, 0, sizeof(qbuf)); |
| 764 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 765 qbuf.index = gsc_buffer; |
| 766 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 767 qbuf.memory = V4L2_MEMORY_USERPTR; |
| 768 qbuf.m.planes = qbuf_planes; |
| 769 switch (input_format_fourcc_) { |
| 770 case V4L2_PIX_FMT_RGB32: { |
| 771 qbuf.m.planes[0].bytesused = input_allocated_size_.GetArea() * 4; |
| 772 qbuf.m.planes[0].length = input_allocated_size_.GetArea() * 4; |
| 773 qbuf.m.planes[0].m.userptr = reinterpret_cast<unsigned long>( |
| 774 frame->data(media::VideoFrame::kRGBPlane)); |
| 775 qbuf.length = 1; |
| 776 break; |
| 777 } |
| 778 case V4L2_PIX_FMT_YUV420M: { |
| 779 qbuf.m.planes[0].bytesused = input_allocated_size_.GetArea(); |
| 780 qbuf.m.planes[0].length = input_allocated_size_.GetArea(); |
| 781 qbuf.m.planes[0].m.userptr = reinterpret_cast<unsigned long>( |
| 782 frame->data(media::VideoFrame::kYPlane)); |
| 783 qbuf.m.planes[1].bytesused = input_allocated_size_.GetArea() / 4; |
| 784 qbuf.m.planes[1].length = input_allocated_size_.GetArea() / 4; |
| 785 qbuf.m.planes[1].m.userptr = reinterpret_cast<unsigned long>( |
| 786 frame->data(media::VideoFrame::kUPlane)); |
| 787 qbuf.m.planes[2].bytesused = input_allocated_size_.GetArea() / 4; |
| 788 qbuf.m.planes[2].length = input_allocated_size_.GetArea() / 4; |
| 789 qbuf.m.planes[2].m.userptr = reinterpret_cast<unsigned long>( |
| 790 frame->data(media::VideoFrame::kVPlane)); |
| 791 qbuf.length = 3; |
| 792 break; |
| 793 } |
| 794 default: |
| 795 NOTREACHED(); |
| 796 NOTIFY_ERROR(kIllegalStateError); |
| 797 return false; |
| 798 } |
| 799 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 800 input_record.at_device = true; |
| 801 input_record.frame = frame; |
| 802 encoder_input_queue_.pop_front(); |
| 803 gsc_free_input_buffers_.pop_back(); |
| 804 gsc_input_buffer_queued_count_++; |
| 805 return true; |
| 806 } |
| 807 |
| 808 bool ExynosVideoEncodeAccelerator::EnqueueGscOutputRecord() { |
| 809 DVLOG(3) << "EnqueueGscOutputRecord()"; |
| 810 DCHECK(!gsc_free_output_buffers_.empty()); |
| 811 DCHECK(!mfc_free_input_buffers_.empty()); |
| 812 |
| 813 // Enqueue a GSC output (VIDEO_CAPTURE) buffer. |
| 814 const int gsc_buffer = gsc_free_output_buffers_.back(); |
| 815 const int mfc_buffer = mfc_free_input_buffers_.back(); |
| 816 GscOutputRecord& output_record = gsc_output_buffer_map_[gsc_buffer]; |
| 817 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; |
| 818 DCHECK(!output_record.at_device); |
| 819 DCHECK_EQ(output_record.mfc_input, -1); |
| 820 DCHECK(!input_record.at_device); |
| 821 struct v4l2_buffer qbuf; |
| 822 struct v4l2_plane qbuf_planes[2]; |
| 823 memset(&qbuf, 0, sizeof(qbuf)); |
| 824 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 825 qbuf.index = gsc_buffer; |
| 826 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 827 qbuf.memory = V4L2_MEMORY_DMABUF; |
| 828 qbuf.m.planes = qbuf_planes; |
| 829 qbuf.m.planes[0].m.fd = input_record.fd[0]; |
| 830 qbuf.m.planes[1].m.fd = input_record.fd[1]; |
| 831 qbuf.length = 2; |
| 832 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 833 output_record.at_device = true; |
| 834 output_record.mfc_input = mfc_buffer; |
| 835 mfc_free_input_buffers_.pop_back(); |
| 836 gsc_free_output_buffers_.pop_back(); |
| 837 gsc_output_buffer_queued_count_++; |
| 838 return true; |
| 839 } |
| 840 |
| 841 bool ExynosVideoEncodeAccelerator::EnqueueMfcInputRecord() { |
| 842 DVLOG(3) << "EnqueueMfcInputRecord()"; |
| 843 DCHECK(!mfc_ready_input_buffers_.empty()); |
| 844 |
| 845 // Enqueue a MFC input (VIDEO_OUTPUT) buffer. |
| 846 const int mfc_buffer = mfc_ready_input_buffers_.front(); |
| 847 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; |
| 848 DCHECK(!input_record.at_device); |
| 849 struct v4l2_buffer qbuf; |
| 850 struct v4l2_plane qbuf_planes[2]; |
| 851 memset(&qbuf, 0, sizeof(qbuf)); |
| 852 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 853 qbuf.index = mfc_buffer; |
| 854 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 855 qbuf.memory = V4L2_MEMORY_MMAP; |
| 856 qbuf.m.planes = qbuf_planes; |
| 857 qbuf.length = 2; |
| 858 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 859 input_record.at_device = true; |
| 860 mfc_ready_input_buffers_.pop_front(); |
| 861 mfc_input_buffer_queued_count_++; |
| 862 return true; |
| 863 } |
| 864 |
| 865 bool ExynosVideoEncodeAccelerator::EnqueueMfcOutputRecord() { |
| 866 DVLOG(3) << "EnqueueMfcOutputRecord()"; |
| 867 DCHECK(!mfc_free_output_buffers_.empty()); |
| 868 DCHECK(!encoder_output_queue_.empty()); |
| 869 |
| 870 // Enqueue a MFC output (VIDEO_CAPTURE) buffer. |
| 871 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back(); |
| 872 const int mfc_buffer = mfc_free_output_buffers_.back(); |
| 873 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; |
| 874 DCHECK(!output_record.at_device); |
| 875 DCHECK(!output_record.buffer_ref.get()); |
| 876 struct v4l2_buffer qbuf; |
| 877 struct v4l2_plane qbuf_planes[1]; |
| 878 memset(&qbuf, 0, sizeof(qbuf)); |
| 879 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 880 qbuf.index = mfc_buffer; |
| 881 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 882 qbuf.memory = V4L2_MEMORY_USERPTR; |
| 883 qbuf.m.planes = qbuf_planes; |
| 884 qbuf.m.planes[0].bytesused = output_buffer->size; |
| 885 qbuf.m.planes[0].length = output_buffer->size; |
| 886 qbuf.m.planes[0].m.userptr = |
| 887 reinterpret_cast<unsigned long>(output_buffer->shm->memory()); |
| 888 qbuf.length = 1; |
| 889 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 890 output_record.at_device = true; |
| 891 output_record.buffer_ref = output_buffer; |
| 892 encoder_output_queue_.pop_back(); |
| 893 mfc_free_output_buffers_.pop_back(); |
| 894 mfc_output_buffer_queued_count_++; |
| 895 return true; |
| 896 } |
| 897 |
| 898 bool ExynosVideoEncodeAccelerator::StartDevicePoll() { |
| 899 DVLOG(3) << "StartDevicePoll()"; |
| 900 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 901 DCHECK(!device_poll_thread_.IsRunning()); |
| 902 |
| 903 // Start up the device poll thread and schedule its first DevicePollTask(). |
| 904 if (!device_poll_thread_.Start()) { |
| 905 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
| 906 NOTIFY_ERROR(kPlatformFailureError); |
| 907 return false; |
| 908 } |
| 909 // Enqueue a poll task with no devices to poll on -- it will wait only on the |
| 910 // interrupt fd. |
| 911 device_poll_thread_.message_loop()->PostTask( |
| 912 FROM_HERE, |
| 913 base::Bind(&ExynosVideoEncodeAccelerator::DevicePollTask, |
| 914 base::Unretained(this), |
| 915 0)); |
| 916 |
| 917 return true; |
| 918 } |
| 919 |
| 920 bool ExynosVideoEncodeAccelerator::StopDevicePoll() { |
| 921 DVLOG(3) << "StopDevicePoll()"; |
| 922 |
| 923 // Signal the DevicePollTask() to stop, and stop the device poll thread. |
| 924 if (!SetDevicePollInterrupt()) |
| 925 return false; |
| 926 device_poll_thread_.Stop(); |
| 927 // Clear the interrupt now, to be sure. |
| 928 if (!ClearDevicePollInterrupt()) |
| 929 return false; |
| 930 |
| 931 // Stop streaming. |
| 932 if (gsc_input_streamon_) { |
| 933 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 934 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 935 } |
| 936 gsc_input_streamon_ = false; |
| 937 if (gsc_output_streamon_) { |
| 938 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 939 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 940 } |
| 941 gsc_output_streamon_ = false; |
| 942 if (mfc_input_streamon_) { |
| 943 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 944 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 945 } |
| 946 mfc_input_streamon_ = false; |
| 947 if (mfc_output_streamon_) { |
| 948 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 949 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 950 } |
| 951 mfc_output_streamon_ = false; |
| 952 |
| 953 // Reset all our accounting info. |
| 954 encoder_input_queue_.clear(); |
| 955 gsc_free_input_buffers_.clear(); |
| 956 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| 957 GscInputRecord& input_record = gsc_input_buffer_map_[i]; |
| 958 input_record.at_device = false; |
| 959 input_record.frame = NULL; |
| 960 gsc_free_input_buffers_.push_back(i); |
| 961 } |
| 962 gsc_input_buffer_queued_count_ = 0; |
| 963 gsc_free_output_buffers_.clear(); |
| 964 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 965 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 966 output_record.at_device = false; |
| 967 output_record.mfc_input = -1; |
| 968 gsc_free_output_buffers_.push_back(i); |
| 969 } |
| 970 gsc_output_buffer_queued_count_ = 0; |
| 971 mfc_ready_input_buffers_.clear(); |
| 972 mfc_free_input_buffers_.clear(); |
| 973 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 974 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; |
| 975 input_record.at_device = false; |
| 976 mfc_free_input_buffers_.push_back(i); |
| 977 } |
| 978 mfc_input_buffer_queued_count_ = 0; |
| 979 mfc_free_output_buffers_.clear(); |
| 980 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 981 MfcOutputRecord& output_record = mfc_output_buffer_map_[i]; |
| 982 output_record.at_device = false; |
| 983 output_record.buffer_ref.reset(); |
| 984 mfc_free_output_buffers_.push_back(i); |
| 985 } |
| 986 mfc_output_buffer_queued_count_ = 0; |
| 987 encoder_output_queue_.clear(); |
| 988 |
| 989 DVLOG(3) << "StopDevicePoll(): device poll stopped"; |
| 990 return true; |
| 991 } |
| 992 |
| 993 bool ExynosVideoEncodeAccelerator::SetDevicePollInterrupt() { |
| 994 DVLOG(3) << "SetDevicePollInterrupt()"; |
| 995 |
| 996 // We might get called here if we fail during initialization, in which case we |
| 997 // don't have a file descriptor. |
| 998 if (device_poll_interrupt_fd_ == -1) |
| 999 return true; |
| 1000 |
| 1001 const uint64 buf = 1; |
| 1002 if (HANDLE_EINTR((write(device_poll_interrupt_fd_, &buf, sizeof(buf)))) < |
| 1003 static_cast<ssize_t>(sizeof(buf))) { |
| 1004 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed"; |
| 1005 NOTIFY_ERROR(kPlatformFailureError); |
| 1006 return false; |
| 1007 } |
| 1008 return true; |
| 1009 } |
| 1010 |
| 1011 bool ExynosVideoEncodeAccelerator::ClearDevicePollInterrupt() { |
| 1012 DVLOG(3) << "ClearDevicePollInterrupt()"; |
| 1013 |
| 1014 // We might get called here if we fail during initialization, in which case we |
| 1015 // don't have a file descriptor. |
| 1016 if (device_poll_interrupt_fd_ == -1) |
| 1017 return true; |
| 1018 |
| 1019 uint64 buf; |
| 1020 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) < |
| 1021 static_cast<ssize_t>(sizeof(buf))) { |
| 1022 if (errno == EAGAIN) { |
| 1023 // No interrupt flag set, and we're reading nonblocking. Not an error. |
| 1024 return true; |
| 1025 } else { |
| 1026 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed"; |
| 1027 NOTIFY_ERROR(kPlatformFailureError); |
| 1028 return false; |
| 1029 } |
| 1030 } |
| 1031 return true; |
| 1032 } |
| 1033 |
| 1034 void ExynosVideoEncodeAccelerator::DevicePollTask(unsigned int poll_fds) { |
| 1035 DVLOG(3) << "DevicePollTask()"; |
| 1036 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); |
| 1037 DCHECK_NE(device_poll_interrupt_fd_, -1); |
| 1038 |
| 1039 // This routine just polls the set of device fds, and schedules a |
| 1040 // ServiceDeviceTask() on encoder_thread_ when processing needs to occur. |
| 1041 // Other threads may notify this task to return early by writing to |
| 1042 // device_poll_interrupt_fd_. |
| 1043 struct pollfd pollfds[3]; |
| 1044 nfds_t nfds; |
| 1045 |
| 1046 // Add device_poll_interrupt_fd_; |
| 1047 pollfds[0].fd = device_poll_interrupt_fd_; |
| 1048 pollfds[0].events = POLLIN | POLLERR; |
| 1049 nfds = 1; |
| 1050 |
| 1051 // Add GSC fd, if we should poll on it. |
| 1052 // GSC has to wait until both input and output buffers are queued. |
| 1053 if (poll_fds & kPollGsc) { |
| 1054 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set"; |
| 1055 pollfds[nfds].fd = gsc_fd_; |
| 1056 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1057 nfds++; |
| 1058 } |
| 1059 if (poll_fds & kPollMfc) { |
| 1060 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set"; |
| 1061 pollfds[nfds].fd = mfc_fd_; |
| 1062 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1063 nfds++; |
| 1064 } |
| 1065 |
| 1066 // Poll it! |
| 1067 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) { |
| 1068 DPLOG(ERROR) << "DevicePollTask(): poll() failed"; |
| 1069 NOTIFY_ERROR(kPlatformFailureError); |
| 1070 return; |
| 1071 } |
| 1072 |
| 1073 // All processing should happen on ServiceDeviceTask(), since we shouldn't |
| 1074 // touch encoder state from this thread. |
| 1075 encoder_thread_.message_loop()->PostTask( |
| 1076 FROM_HERE, |
| 1077 base::Bind(&ExynosVideoEncodeAccelerator::ServiceDeviceTask, |
| 1078 base::Unretained(this))); |
| 1079 } |
| 1080 |
| 1081 void ExynosVideoEncodeAccelerator::NotifyError(Error error) { |
| 1082 DVLOG(1) << "NotifyError(): error=" << error; |
| 1083 |
| 1084 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| 1085 child_message_loop_proxy_->PostTask( |
| 1086 FROM_HERE, |
| 1087 base::Bind( |
| 1088 &ExynosVideoEncodeAccelerator::NotifyError, weak_this_, error)); |
| 1089 return; |
| 1090 } |
| 1091 |
| 1092 if (client_) { |
| 1093 client_->NotifyError(error); |
| 1094 client_ptr_factory_.InvalidateWeakPtrs(); |
| 1095 } |
| 1096 } |
| 1097 |
| 1098 void ExynosVideoEncodeAccelerator::SetEncoderState(State state) { |
| 1099 DVLOG(3) << "SetEncoderState(): state=" << state; |
| 1100 |
| 1101 // We can touch encoder_state_ only if this is the encoder thread or the |
| 1102 // encoder thread isn't running. |
| 1103 if (encoder_thread_.message_loop() != NULL && |
| 1104 encoder_thread_.message_loop() != base::MessageLoop::current()) { |
| 1105 encoder_thread_.message_loop()->PostTask( |
| 1106 FROM_HERE, |
| 1107 base::Bind(&ExynosVideoEncodeAccelerator::SetEncoderState, |
| 1108 base::Unretained(this), |
| 1109 state)); |
| 1110 } else { |
| 1111 encoder_state_ = state; |
| 1112 } |
| 1113 } |
| 1114 |
| 1115 void ExynosVideoEncodeAccelerator::RequestEncodingParametersChangeTask( |
| 1116 uint32 bitrate, |
| 1117 uint32 framerate) { |
| 1118 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate |
| 1119 << ", framerate=" << framerate; |
| 1120 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 1121 |
| 1122 struct v4l2_ext_control ctrls[1]; |
| 1123 struct v4l2_ext_controls control; |
| 1124 memset(&ctrls, 0, sizeof(ctrls)); |
| 1125 memset(&control, 0, sizeof(control)); |
| 1126 ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE; |
| 1127 ctrls[0].value = bitrate; |
| 1128 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; |
| 1129 control.count = arraysize(ctrls); |
| 1130 control.controls = ctrls; |
| 1131 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); |
| 1132 |
| 1133 struct v4l2_streamparm parms; |
| 1134 memset(&parms, 0, sizeof(parms)); |
| 1135 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1136 // Note that we are provided "frames per second" but V4L2 expects "time per |
| 1137 // frame"; hence we provide the reciprocal of the framerate here. |
| 1138 parms.parm.output.timeperframe.numerator = 1; |
| 1139 parms.parm.output.timeperframe.denominator = framerate; |
| 1140 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_PARM, &parms); |
| 1141 } |
| 1142 |
| 1143 bool ExynosVideoEncodeAccelerator::CreateGscInputBuffers() { |
| 1144 DVLOG(3) << "CreateGscInputBuffers()"; |
| 1145 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1146 DCHECK_EQ(encoder_state_, kUninitialized); |
| 1147 DCHECK(!gsc_input_streamon_); |
| 1148 |
| 1149 struct v4l2_control control; |
| 1150 memset(&control, 0, sizeof(control)); |
| 1151 control.id = V4L2_CID_ROTATE; |
| 1152 control.value = 0; |
| 1153 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1154 |
| 1155 // HFLIP actually seems to control vertical mirroring for GSC, and vice-versa. |
| 1156 memset(&control, 0, sizeof(control)); |
| 1157 control.id = V4L2_CID_HFLIP; |
| 1158 control.value = 0; |
| 1159 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1160 |
| 1161 memset(&control, 0, sizeof(control)); |
| 1162 control.id = V4L2_CID_VFLIP; |
| 1163 control.value = 0; |
| 1164 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1165 |
| 1166 memset(&control, 0, sizeof(control)); |
| 1167 control.id = V4L2_CID_GLOBAL_ALPHA; |
| 1168 control.value = 255; |
| 1169 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1170 |
| 1171 struct v4l2_format format; |
| 1172 memset(&format, 0, sizeof(format)); |
| 1173 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1174 format.fmt.pix_mp.width = input_allocated_size_.width(); |
| 1175 format.fmt.pix_mp.height = input_allocated_size_.height(); |
| 1176 format.fmt.pix_mp.pixelformat = input_format_fourcc_; |
| 1177 switch (input_format_fourcc_) { |
| 1178 case V4L2_PIX_FMT_RGB32: |
| 1179 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1180 input_allocated_size_.GetArea() * 4; |
| 1181 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1182 input_allocated_size_.width() * 4; |
| 1183 format.fmt.pix_mp.num_planes = 1; |
| 1184 break; |
| 1185 case V4L2_PIX_FMT_YUV420M: |
| 1186 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1187 input_allocated_size_.GetArea(); |
| 1188 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1189 input_allocated_size_.width(); |
| 1190 format.fmt.pix_mp.plane_fmt[1].sizeimage = |
| 1191 input_allocated_size_.GetArea() / 4; |
| 1192 format.fmt.pix_mp.plane_fmt[1].bytesperline = |
| 1193 input_allocated_size_.width() / 2; |
| 1194 format.fmt.pix_mp.plane_fmt[2].sizeimage = |
| 1195 input_allocated_size_.GetArea() / 4; |
| 1196 format.fmt.pix_mp.plane_fmt[2].bytesperline = |
| 1197 input_allocated_size_.width() / 2; |
| 1198 format.fmt.pix_mp.num_planes = 3; |
| 1199 break; |
| 1200 default: |
| 1201 NOTREACHED(); |
| 1202 NOTIFY_ERROR(kIllegalStateError); |
| 1203 return false; |
| 1204 } |
| 1205 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 1206 |
| 1207 struct v4l2_crop crop; |
| 1208 memset(&crop, 0, sizeof(crop)); |
| 1209 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1210 crop.c.left = 0; |
| 1211 crop.c.top = 0; |
| 1212 crop.c.width = input_visible_size_.width(); |
| 1213 crop.c.height = input_visible_size_.height(); |
| 1214 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); |
| 1215 |
| 1216 struct v4l2_requestbuffers reqbufs; |
| 1217 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1218 reqbufs.count = kGscInputBufferCount; |
| 1219 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1220 reqbufs.memory = V4L2_MEMORY_USERPTR; |
| 1221 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1222 |
| 1223 DCHECK(gsc_input_buffer_map_.empty()); |
| 1224 gsc_input_buffer_map_.resize(reqbufs.count); |
| 1225 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) |
| 1226 gsc_free_input_buffers_.push_back(i); |
| 1227 |
| 1228 return true; |
| 1229 } |
| 1230 |
| 1231 bool ExynosVideoEncodeAccelerator::CreateGscOutputBuffers() { |
| 1232 DVLOG(3) << "CreateGscOutputBuffers()"; |
| 1233 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1234 DCHECK_EQ(encoder_state_, kUninitialized); |
| 1235 DCHECK(!gsc_output_streamon_); |
| 1236 |
| 1237 struct v4l2_format format; |
| 1238 memset(&format, 0, sizeof(format)); |
| 1239 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1240 format.fmt.pix_mp.width = converted_allocated_size_.width(); |
| 1241 format.fmt.pix_mp.height = converted_allocated_size_.height(); |
| 1242 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; |
| 1243 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1244 converted_allocated_size_.GetArea(); |
| 1245 format.fmt.pix_mp.plane_fmt[1].sizeimage = |
| 1246 converted_allocated_size_.GetArea() / 2; |
| 1247 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1248 converted_allocated_size_.width(); |
| 1249 format.fmt.pix_mp.plane_fmt[1].bytesperline = |
| 1250 converted_allocated_size_.width(); |
| 1251 format.fmt.pix_mp.num_planes = 2; |
| 1252 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 1253 |
| 1254 struct v4l2_crop crop; |
| 1255 memset(&crop, 0, sizeof(crop)); |
| 1256 crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1257 crop.c.left = 0; |
| 1258 crop.c.top = 0; |
| 1259 crop.c.width = converted_visible_size_.width(); |
| 1260 crop.c.height = converted_visible_size_.height(); |
| 1261 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); |
| 1262 |
| 1263 struct v4l2_requestbuffers reqbufs; |
| 1264 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1265 reqbufs.count = kGscOutputBufferCount; |
| 1266 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1267 reqbufs.memory = V4L2_MEMORY_DMABUF; |
| 1268 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1269 |
| 1270 DCHECK(gsc_output_buffer_map_.empty()); |
| 1271 gsc_output_buffer_map_.resize(reqbufs.count); |
| 1272 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) |
| 1273 gsc_free_output_buffers_.push_back(i); |
| 1274 return true; |
| 1275 } |
| 1276 |
| 1277 bool ExynosVideoEncodeAccelerator::SetMfcFormats() { |
| 1278 DVLOG(3) << "SetMfcFormats()"; |
| 1279 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1280 DCHECK(!mfc_input_streamon_); |
| 1281 DCHECK(!mfc_output_streamon_); |
| 1282 |
| 1283 // VIDIOC_S_FMT on OUTPUT queue. |
| 1284 struct v4l2_format format; |
| 1285 memset(&format, 0, sizeof(format)); |
| 1286 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1287 format.fmt.pix_mp.width = input_allocated_size_.width(); |
| 1288 format.fmt.pix_mp.height = input_allocated_size_.height(); |
| 1289 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; |
| 1290 format.fmt.pix_mp.num_planes = 2; |
| 1291 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 1292 // We read direct from GSC, so we rely on the HW not changing our set |
| 1293 // size/stride. |
| 1294 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].sizeimage, |
| 1295 static_cast<__u32>(input_allocated_size_.GetArea())); |
| 1296 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].bytesperline, |
| 1297 static_cast<__u32>(input_allocated_size_.width())); |
| 1298 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].sizeimage, |
| 1299 static_cast<__u32>(input_allocated_size_.GetArea() / 2)); |
| 1300 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].bytesperline, |
| 1301 static_cast<__u32>(input_allocated_size_.width())); |
| 1302 |
| 1303 struct v4l2_crop crop; |
| 1304 memset(&crop, 0, sizeof(crop)); |
| 1305 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
| 1306 crop.c.left = 0; |
| 1307 crop.c.top = 0; |
| 1308 crop.c.width = input_visible_size_.width(); |
| 1309 crop.c.height = input_visible_size_.height(); |
| 1310 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CROP, &crop); |
| 1311 |
| 1312 // VIDIOC_S_FMT on CAPTURE queue. |
| 1313 output_buffer_byte_size_ = kMfcOutputBufferSize; |
| 1314 memset(&format, 0, sizeof(format)); |
| 1315 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1316 format.fmt.pix_mp.width = output_visible_size_.width(); |
| 1317 format.fmt.pix_mp.height = output_visible_size_.height(); |
| 1318 format.fmt.pix_mp.pixelformat = output_format_fourcc_; |
| 1319 format.fmt.pix_mp.plane_fmt[0].sizeimage = output_buffer_byte_size_; |
| 1320 format.fmt.pix_mp.num_planes = 1; |
| 1321 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 1322 |
| 1323 struct v4l2_ext_control ctrls[6]; |
| 1324 struct v4l2_ext_controls control; |
| 1325 memset(&ctrls, 0, sizeof(ctrls)); |
| 1326 memset(&control, 0, sizeof(control)); |
| 1327 // No B-frames, for lowest decoding latency. |
| 1328 ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; |
| 1329 ctrls[0].value = 0; |
| 1330 // Enable variable bitrate control. |
| 1331 ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; |
| 1332 ctrls[1].value = 1; |
| 1333 // Enable "loose" variable bitrate. |
| 1334 ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; |
| 1335 ctrls[2].value = 10; |
| 1336 // Force bitrate control to average over a GOP (for tight bitrate tolerance). |
| 1337 ctrls[3].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; |
| 1338 ctrls[3].value = 1; |
| 1339 // Quantization parameter maximum value (for variable bitrate control). |
| 1340 ctrls[4].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; |
| 1341 ctrls[4].value = 51; |
| 1342 // Separate stream header so we can cache it and insert into the stream. |
| 1343 ctrls[5].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; |
| 1344 ctrls[5].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; |
| 1345 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; |
| 1346 control.count = arraysize(ctrls); |
| 1347 control.controls = ctrls; |
| 1348 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); |
| 1349 |
| 1350 return true; |
| 1351 } |
| 1352 |
| 1353 bool ExynosVideoEncodeAccelerator::CreateMfcInputBuffers() { |
| 1354 DVLOG(3) << "CreateMfcInputBuffers()"; |
| 1355 // This function runs on encoder_thread_ after output buffers have been |
| 1356 // provided by the client. |
| 1357 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 1358 DCHECK(!mfc_input_streamon_); |
| 1359 |
| 1360 struct v4l2_requestbuffers reqbufs; |
| 1361 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1362 reqbufs.count = 1; // Driver will allocate the appropriate number of buffers. |
| 1363 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1364 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1365 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1366 |
| 1367 DCHECK(mfc_input_buffer_map_.empty()); |
| 1368 mfc_input_buffer_map_.resize(reqbufs.count); |
| 1369 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 1370 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; |
| 1371 for (int j = 0; j < 2; ++j) { |
| 1372 // Export the DMABUF fd so GSC can write to it. |
| 1373 struct v4l2_exportbuffer expbuf; |
| 1374 memset(&expbuf, 0, sizeof(expbuf)); |
| 1375 expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1376 expbuf.index = i; |
| 1377 expbuf.plane = j; |
| 1378 expbuf.flags = O_CLOEXEC; |
| 1379 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_EXPBUF, &expbuf); |
| 1380 input_record.fd[j] = expbuf.fd; |
| 1381 } |
| 1382 mfc_free_input_buffers_.push_back(i); |
| 1383 } |
| 1384 |
| 1385 return true; |
| 1386 } |
| 1387 |
| 1388 bool ExynosVideoEncodeAccelerator::CreateMfcOutputBuffers() { |
| 1389 DVLOG(3) << "CreateMfcOutputBuffers()"; |
| 1390 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1391 DCHECK(!mfc_output_streamon_); |
| 1392 |
| 1393 struct v4l2_requestbuffers reqbufs; |
| 1394 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1395 reqbufs.count = kMfcOutputBufferCount; |
| 1396 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1397 reqbufs.memory = V4L2_MEMORY_USERPTR; |
| 1398 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1399 |
| 1400 DCHECK(mfc_output_buffer_map_.empty()); |
| 1401 mfc_output_buffer_map_.resize(reqbufs.count); |
| 1402 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) |
| 1403 mfc_free_output_buffers_.push_back(i); |
| 1404 |
| 1405 return true; |
| 1406 } |
| 1407 |
| 1408 void ExynosVideoEncodeAccelerator::DestroyGscInputBuffers() { |
| 1409 DVLOG(3) << "DestroyGscInputBuffers()"; |
| 1410 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1411 DCHECK(!gsc_input_streamon_); |
| 1412 |
| 1413 struct v4l2_requestbuffers reqbufs; |
| 1414 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1415 reqbufs.count = 0; |
| 1416 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1417 reqbufs.memory = V4L2_MEMORY_USERPTR; |
| 1418 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs)) != 0) |
| 1419 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1420 |
| 1421 gsc_input_buffer_map_.clear(); |
| 1422 gsc_free_input_buffers_.clear(); |
| 1423 } |
| 1424 |
| 1425 void ExynosVideoEncodeAccelerator::DestroyGscOutputBuffers() { |
| 1426 DVLOG(3) << "DestroyGscOutputBuffers()"; |
| 1427 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1428 DCHECK(!gsc_output_streamon_); |
| 1429 |
| 1430 struct v4l2_requestbuffers reqbufs; |
| 1431 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1432 reqbufs.count = 0; |
| 1433 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1434 reqbufs.memory = V4L2_MEMORY_DMABUF; |
| 1435 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs)) != 0) |
| 1436 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1437 |
| 1438 gsc_output_buffer_map_.clear(); |
| 1439 gsc_free_output_buffers_.clear(); |
| 1440 } |
| 1441 |
| 1442 void ExynosVideoEncodeAccelerator::DestroyMfcInputBuffers() { |
| 1443 DVLOG(3) << "DestroyMfcInputBuffers()"; |
| 1444 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1445 DCHECK(!mfc_input_streamon_); |
| 1446 |
| 1447 struct v4l2_requestbuffers reqbufs; |
| 1448 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1449 reqbufs.count = 0; |
| 1450 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1451 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1452 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs)) != 0) |
| 1453 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1454 |
| 1455 mfc_input_buffer_map_.clear(); |
| 1456 mfc_free_input_buffers_.clear(); |
| 1457 } |
| 1458 |
| 1459 void ExynosVideoEncodeAccelerator::DestroyMfcOutputBuffers() { |
| 1460 DVLOG(3) << "DestroyMfcOutputBuffers()"; |
| 1461 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1462 DCHECK(!mfc_output_streamon_); |
| 1463 |
| 1464 struct v4l2_requestbuffers reqbufs; |
| 1465 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1466 reqbufs.count = 0; |
| 1467 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1468 reqbufs.memory = V4L2_MEMORY_USERPTR; |
| 1469 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs)) != 0) |
| 1470 DPLOG(ERROR) << "DestroyMfcOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1471 |
| 1472 mfc_output_buffer_map_.clear(); |
| 1473 mfc_free_output_buffers_.clear(); |
| 1474 } |
| 1475 |
| 1476 } // namespace content |
OLD | NEW |