OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/common/gpu/media/exynos_video_encode_accelerator.h" |
| 6 |
| 7 #include <dlfcn.h> |
| 8 #include <fcntl.h> |
| 9 #include <libdrm/drm_fourcc.h> |
| 10 #include <linux/videodev2.h> |
| 11 #include <poll.h> |
| 12 #include <sys/eventfd.h> |
| 13 #include <sys/ioctl.h> |
| 14 #include <sys/mman.h> |
| 15 #include "base/callback.h" |
| 16 #include "base/debug/trace_event.h" |
| 17 #include "base/file_util.h" |
| 18 #include "base/message_loop/message_loop_proxy.h" |
| 19 #include "base/posix/eintr_wrapper.h" |
| 20 #include "media/base/bitstream_buffer.h" |
| 21 #include "ui/gl/scoped_binders.h" |
| 22 |
| 23 #define NOTIFY_ERROR(x) \ |
| 24 do { \ |
| 25 SetEncoderState(kError); \ |
| 26 DLOG(ERROR) << "calling NotifyError(): " << x; \ |
| 27 NotifyError(x); \ |
| 28 } while (0) |
| 29 |
| 30 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \ |
| 31 do { \ |
| 32 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 33 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 34 NOTIFY_ERROR(kPlatformFailureError); \ |
| 35 return; \ |
| 36 } \ |
| 37 } while (0) |
| 38 |
| 39 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \ |
| 40 do { \ |
| 41 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 42 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 43 NOTIFY_ERROR(kPlatformFailureError); \ |
| 44 return false; \ |
| 45 } \ |
| 46 } while (0) |
| 47 |
| 48 namespace content { |
| 49 |
| 50 namespace { |
| 51 |
| 52 const char kExynosGscDevice[] = "/dev/gsc1"; |
| 53 const char kExynosMfcDevice[] = "/dev/mfc-enc"; |
| 54 |
| 55 } // anonymous namespace |
| 56 |
| 57 |
| 58 |
| 59 struct ExynosVideoEncodeAccelerator::BitstreamBufferRef { |
| 60 BitstreamBufferRef(int32 id, |
| 61 scoped_ptr<base::SharedMemory> shm, |
| 62 size_t size) |
| 63 : id(id), |
| 64 shm(shm.Pass()), |
| 65 size(size) {} |
| 66 const int32 id; |
| 67 const scoped_ptr<base::SharedMemory> shm; |
| 68 const size_t size; |
| 69 }; |
| 70 |
| 71 ExynosVideoEncodeAccelerator::GscInputRecord::GscInputRecord() |
| 72 : at_device(false), |
| 73 frame_id(-1), |
| 74 egl_sync(EGL_NO_SYNC_KHR), |
| 75 egl_image(EGL_NO_IMAGE_KHR), |
| 76 texture_id(0) {} |
| 77 |
| 78 ExynosVideoEncodeAccelerator::GscOutputRecord::GscOutputRecord() |
| 79 : at_device(false), |
| 80 mfc_input(-1) { |
| 81 address[0] = address[1] = address[2] = NULL; |
| 82 length[0] = length[1] = length[2] = 0; |
| 83 bytes_used[0] = bytes_used[1] = bytes_used[2] = 0; |
| 84 } |
| 85 |
| 86 ExynosVideoEncodeAccelerator::MfcInputRecord::MfcInputRecord() |
| 87 : at_device(false) { |
| 88 fd[0] = fd[1] = -1; |
| 89 } |
| 90 |
| 91 ExynosVideoEncodeAccelerator::MfcOutputRecord::MfcOutputRecord() |
| 92 : at_device(false), |
| 93 address(NULL), |
| 94 length(0), |
| 95 bytes_used(0) {} |
| 96 |
| 97 ExynosVideoEncodeAccelerator::ExynosVideoEncodeAccelerator( |
| 98 EGLDisplay egl_display, |
| 99 media::VideoEncodeAccelerator::Client* client, |
| 100 const base::Callback<bool(void)>& make_context_current, |
| 101 bool encode_from_backbuffer) |
| 102 : child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 103 weak_this_ptr_factory_(this), |
| 104 weak_this_(weak_this_ptr_factory_.GetWeakPtr()), |
| 105 client_ptr_factory_(client), |
| 106 client_(client_ptr_factory_.GetWeakPtr()), |
| 107 encoder_thread_("ExynosEncoderThread"), |
| 108 encoder_state_(kUninitialized), |
| 109 output_buffer_byte_size_(0), |
| 110 stream_header_size_(0), |
| 111 do_output_encoding_(false), |
| 112 do_encode_from_backbuffer_(encode_from_backbuffer), |
| 113 gsc_fd_(-1), |
| 114 gsc_input_streamon_(false), |
| 115 gsc_input_buffer_queued_count_(0), |
| 116 gsc_output_streamon_(false), |
| 117 gsc_output_buffer_queued_count_(0), |
| 118 mfc_fd_(-1), |
| 119 mfc_input_streamon_(false), |
| 120 mfc_input_buffer_queued_count_(0), |
| 121 mfc_output_streamon_(false), |
| 122 mfc_output_buffer_queued_count_(0), |
| 123 device_poll_thread_("ExynosDevicePollThread"), |
| 124 device_poll_interrupt_fd_(-1), |
| 125 make_context_current_(make_context_current), |
| 126 egl_display_(egl_display), |
| 127 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) { |
| 128 DCHECK(client_); |
| 129 } |
| 130 |
| 131 ExynosVideoEncodeAccelerator::~ExynosVideoEncodeAccelerator() { |
| 132 DCHECK(!encoder_thread_.IsRunning()); |
| 133 DCHECK(!device_poll_thread_.IsRunning()); |
| 134 |
| 135 if (device_poll_interrupt_fd_ != -1) { |
| 136 HANDLE_EINTR(close(device_poll_interrupt_fd_)); |
| 137 device_poll_interrupt_fd_ = -1; |
| 138 } |
| 139 if (mfc_fd_ != -1) { |
| 140 DestroyMfcInputBuffers(); |
| 141 DestroyMfcOutputBuffers(); |
| 142 HANDLE_EINTR(close(mfc_fd_)); |
| 143 mfc_fd_ = -1; |
| 144 } |
| 145 if (gsc_fd_ != -1) { |
| 146 DestroyGscInputBuffers(); |
| 147 DestroyGscOutputBuffers(); |
| 148 HANDLE_EINTR(close(gsc_fd_)); |
| 149 gsc_fd_ = -1; |
| 150 } |
| 151 } |
| 152 |
| 153 void ExynosVideoEncodeAccelerator::Initialize( |
| 154 media::VideoCodecProfile profile, |
| 155 const gfx::Size& input_resolution, |
| 156 const gfx::Size& output_resolution, |
| 157 int32 initial_bitrate) { |
| 158 DVLOG(3) << "Initialize()"; |
| 159 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 160 DCHECK_EQ(encoder_state_, kUninitialized); |
| 161 |
| 162 video_profile_ = profile; |
| 163 input_visible_size_ = input_resolution; |
| 164 output_visible_size_ = output_resolution; |
| 165 |
| 166 switch (video_profile_) { |
| 167 case media::RAWPROFILE_I420: { |
| 168 do_output_encoding_ = false; |
| 169 break; |
| 170 } |
| 171 case media::H264PROFILE_MAIN: { |
| 172 do_output_encoding_ = true; |
| 173 break; |
| 174 } |
| 175 default: { |
| 176 DLOG(ERROR) << "Initialize(): invalid profile=" << video_profile_; |
| 177 NOTIFY_ERROR(kInvalidArgumentError); |
| 178 return; |
| 179 } |
| 180 } |
| 181 |
| 182 input_allocated_size_.SetSize( |
| 183 (input_visible_size_.width() + 0xF) & ~0xF, |
| 184 (input_visible_size_.height() + 0xF) & ~0xF); |
| 185 converted_visible_size_.SetSize( |
| 186 (output_visible_size_.width() + 0x1) & ~0x1, |
| 187 (output_visible_size_.height() + 0x1) & ~0x1); |
| 188 converted_allocated_size_.SetSize( |
| 189 (converted_visible_size_.width() + 0xF) & ~0xF, |
| 190 (converted_visible_size_.height() + 0xF) & ~0xF); |
| 191 |
| 192 // Output size may be modified by the constraints of the format (e.g. |
| 193 // multiple-of-two for YUV formats) or the hardware. |
| 194 output_visible_size_ = converted_visible_size_; |
| 195 |
| 196 if (!make_context_current_.Run()) { |
| 197 DLOG(ERROR) << "Initialize(): could not make context current"; |
| 198 NOTIFY_ERROR(kPlatformFailureError); |
| 199 return; |
| 200 } |
| 201 |
| 202 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { |
| 203 DLOG(ERROR) << "Initialize(): context does not have " |
| 204 << "EGL_KHR_fence_sync"; |
| 205 NOTIFY_ERROR(kPlatformFailureError); |
| 206 return; |
| 207 } |
| 208 |
| 209 // Open the video devices. |
| 210 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice; |
| 211 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice, |
| 212 O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 213 if (gsc_fd_ == -1) { |
| 214 DPLOG(ERROR) << "Initialize(): could not open GSC device: " |
| 215 << kExynosGscDevice; |
| 216 NOTIFY_ERROR(kPlatformFailureError); |
| 217 return; |
| 218 } |
| 219 |
| 220 // Capabilities check. |
| 221 struct v4l2_capability caps; |
| 222 const __u32 kCapsRequired = |
| 223 V4L2_CAP_VIDEO_CAPTURE_MPLANE | |
| 224 V4L2_CAP_VIDEO_OUTPUT_MPLANE | |
| 225 V4L2_CAP_STREAMING; |
| 226 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_QUERYCAP, &caps); |
| 227 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 228 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| 229 ", caps check failed: 0x" << std::hex << caps.capabilities; |
| 230 NOTIFY_ERROR(kPlatformFailureError); |
| 231 return; |
| 232 } |
| 233 |
| 234 if (do_output_encoding_) { |
| 235 // Open the video encoder device. |
| 236 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice, |
| 237 O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 238 if (mfc_fd_ == -1) { |
| 239 DPLOG(ERROR) << "Initialize(): could not open MFC device: " |
| 240 << kExynosMfcDevice; |
| 241 NOTIFY_ERROR(kPlatformFailureError); |
| 242 return; |
| 243 } |
| 244 |
| 245 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_QUERYCAP, &caps); |
| 246 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 247 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| 248 ", caps check failed: 0x" << std::hex << caps.capabilities; |
| 249 NOTIFY_ERROR(kPlatformFailureError); |
| 250 return; |
| 251 } |
| 252 } |
| 253 |
| 254 // Create the interrupt fd. |
| 255 DCHECK_EQ(device_poll_interrupt_fd_, -1); |
| 256 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); |
| 257 if (device_poll_interrupt_fd_ == -1) { |
| 258 DPLOG(ERROR) << "Initialize(): eventfd() failed"; |
| 259 NOTIFY_ERROR(kPlatformFailureError); |
| 260 return; |
| 261 } |
| 262 |
| 263 DLOG(ERROR) |
| 264 << "Initialize(): input_visible_size_=" |
| 265 << input_visible_size_.width() << "x" |
| 266 << input_visible_size_.height() << ", input_allocated_size_=" |
| 267 << input_allocated_size_.width() << "x" |
| 268 << input_allocated_size_.height() << ", converted_visible_size_=" |
| 269 << converted_visible_size_.width() << "x" |
| 270 << converted_visible_size_.height() << ", converted_allocated_size_=" |
| 271 << converted_allocated_size_.width() << "x" |
| 272 << converted_allocated_size_.height() << ", output_visible_size_=" |
| 273 << output_visible_size_.width() << "x" |
| 274 << output_visible_size_.height(); |
| 275 |
| 276 if (!CreateGscInputBuffers() || !CreateGscOutputBuffers()) |
| 277 return; |
| 278 |
| 279 if (do_output_encoding_) { |
| 280 // MFC setup for encoding is rather particular in ordering. |
| 281 // 1. Format (VIDIOC_S_FMT) set first on OUTPUT and CPATURE queues. |
| 282 // 2. VIDIOC_REQBUFS, VIDIOC_QBUF, and VIDIOC_STREAMON on CAPTURE queue. |
| 283 // 3. VIDIOC_REQBUFS (and later VIDIOC_QBUF and VIDIOC_STREAMON) on OUTPUT |
| 284 // queue. |
| 285 |
| 286 if (!SetMfcFormats()) |
| 287 return; |
| 288 |
| 289 // VIDIOC_REQBUFS on CAPTURE queue. |
| 290 if (!CreateMfcOutputBuffers()) |
| 291 return; |
| 292 |
| 293 // VIDIOC_QBUF and VIDIOC_STREAMON on CAPTURE queue. |
| 294 EnqueueMfc(); |
| 295 |
| 296 // VIDIOC_REQBUFS on OUTPUT queue. |
| 297 if (!CreateMfcInputBuffers()) |
| 298 return; |
| 299 |
| 300 SetBitrate(initial_bitrate); |
| 301 } |
| 302 |
| 303 if (!encoder_thread_.Start()) { |
| 304 DLOG(ERROR) << "Initialize(): encoder thread failed to start"; |
| 305 NOTIFY_ERROR(kPlatformFailureError); |
| 306 return; |
| 307 } |
| 308 |
| 309 SetEncoderState(kInitialized); |
| 310 |
| 311 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 312 &Client::NotifyInitializeDone, |
| 313 client_)); |
| 314 |
| 315 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 316 &Client::RequireBitstreamBuffers, |
| 317 client_, |
| 318 output_visible_size_, |
| 319 output_buffer_byte_size_)); |
| 320 } |
| 321 |
| 322 void ExynosVideoEncodeAccelerator::ReturnFreeGscInputBuffer(int index) { |
| 323 DVLOG(3) << "ReturnFreeGscInputBuffer(): index=" << index; |
| 324 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 325 gsc_free_input_buffers_.push_back(index); |
| 326 } |
| 327 |
| 328 void ExynosVideoEncodeAccelerator::Encode( |
| 329 const scoped_refptr<media::VideoFrame>& frame, |
| 330 int32 frame_id, |
| 331 bool force_keyframe) { |
| 332 DVLOG(3) << "Encode(): frame=" << frame.get() << ", frame_id=" << frame_id; |
| 333 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 334 |
| 335 if (gsc_free_input_buffers_.empty()) { |
| 336 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 337 &media::VideoEncodeAccelerator::Client::NotifyEndOfVideoFrame, |
| 338 client_, |
| 339 frame_id)); |
| 340 return; |
| 341 } |
| 342 |
| 343 if (!make_context_current_.Run()) { |
| 344 DLOG(ERROR) << "Encode(): could not make context current"; |
| 345 NOTIFY_ERROR(kPlatformFailureError); |
| 346 return; |
| 347 } |
| 348 |
| 349 const int gsc_buffer = gsc_free_input_buffers_.back(); |
| 350 gsc_free_input_buffers_.pop_back(); |
| 351 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; |
| 352 DCHECK(!input_record.at_device); |
| 353 DCHECK_EQ(input_record.frame_id, -1); |
| 354 DCHECK_EQ(input_record.egl_sync, EGL_NO_SYNC_KHR); |
| 355 input_record.frame_id = frame_id; |
| 356 |
| 357 if (frame->format() == media::VideoFrame::NATIVE_EGLSURFACE) { |
| 358 DCHECK_NE(input_record.texture_id, 0U); |
| 359 gfx::ScopedTextureBinder binder(GL_TEXTURE_2D, input_record.texture_id); |
| 360 glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, |
| 361 input_visible_size_.width(), |
| 362 input_visible_size_.height()); |
| 363 } |
| 364 |
| 365 input_record.egl_sync = |
| 366 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
| 367 if (input_record.egl_sync == EGL_NO_SYNC_KHR) { |
| 368 DLOG(ERROR) << "Encode(): eglCreateSyncKHR() failed"; |
| 369 NOTIFY_ERROR(kPlatformFailureError); |
| 370 return; |
| 371 } |
| 372 |
| 373 if (frame->format() == media::VideoFrame::NATIVE_EGLSURFACE) { |
| 374 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 375 &ExynosVideoEncodeAccelerator::EncodeTask, |
| 376 base::Unretained(this), |
| 377 gsc_buffer)); |
| 378 } else { |
| 379 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 380 &ExynosVideoEncodeAccelerator::CopyFrameAndEncodeTask, |
| 381 base::Unretained(this), |
| 382 frame, |
| 383 gsc_buffer)); |
| 384 } |
| 385 } |
| 386 |
| 387 void ExynosVideoEncodeAccelerator::UseBitstreamBuffer( |
| 388 const media::BitstreamBuffer& buffer) { |
| 389 DVLOG(3) << "UseBitstreamBuffer(): id=" << buffer.id(); |
| 390 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 391 |
| 392 scoped_ptr<BitstreamBufferRef> buffer_ref(new BitstreamBufferRef( |
| 393 buffer.id(), |
| 394 scoped_ptr<base::SharedMemory>( |
| 395 new base::SharedMemory(buffer.handle(), false)).Pass(), |
| 396 buffer.size())); |
| 397 if (!buffer_ref->shm->Map(buffer_ref->size)) { |
| 398 DLOG(ERROR) << "UseBitstreamBuffer(): could not map bitstream_buffer"; |
| 399 NOTIFY_ERROR(kPlatformFailureError); |
| 400 return; |
| 401 } |
| 402 |
| 403 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 404 &ExynosVideoEncodeAccelerator::UseBitstreamBufferTask, |
| 405 base::Unretained(this), |
| 406 base::Passed(&buffer_ref))); |
| 407 } |
| 408 |
| 409 void ExynosVideoEncodeAccelerator::RequestEncodingParameterChange( |
| 410 int32 bitrate) { |
| 411 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 412 &ExynosVideoEncodeAccelerator::SetBitrate, |
| 413 base::Unretained(this), |
| 414 bitrate)); |
| 415 } |
| 416 |
| 417 void ExynosVideoEncodeAccelerator::Destroy() { |
| 418 DVLOG(3) << "Destroy()"; |
| 419 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 420 |
| 421 // We're destroying; cancel all callbacks. |
| 422 client_ptr_factory_.InvalidateWeakPtrs(); |
| 423 |
| 424 // If the encoder thread is running, destroy using posted task. |
| 425 if (encoder_thread_.IsRunning()) { |
| 426 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 427 &ExynosVideoEncodeAccelerator::DestroyTask, base::Unretained(this))); |
| 428 // DestroyTask() will cause the encoder_thread_ to flush all tasks. |
| 429 encoder_thread_.Stop(); |
| 430 } else { |
| 431 // Otherwise, call the destroy task directly. |
| 432 DestroyTask(); |
| 433 } |
| 434 |
| 435 // Set to kError state just in case. |
| 436 SetEncoderState(kError); |
| 437 |
| 438 delete this; |
| 439 } |
| 440 |
| 441 void ExynosVideoEncodeAccelerator::EncodeTask(int gsc_input_index) { |
| 442 DVLOG(3) << "EncodeTask()"; |
| 443 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 444 DCHECK_NE(encoder_state_, kUninitialized); |
| 445 const GscInputRecord& input_record = gsc_input_buffer_map_[gsc_input_index]; |
| 446 TRACE_EVENT1("Video encoder", "EVEA::EncodeTask", |
| 447 "frame_id", input_record.frame_id); |
| 448 |
| 449 if (encoder_state_ == kError) { |
| 450 DVLOG(2) << "EncodeTask(): early out: kError state"; |
| 451 return; |
| 452 } |
| 453 |
| 454 if (encoder_state_ == kInitialized) { |
| 455 if (!StartDevicePoll()) |
| 456 return; |
| 457 encoder_state_ = kEncoding; |
| 458 } |
| 459 |
| 460 encoder_input_queue_.push_back(gsc_input_index); |
| 461 EnqueueGsc(); |
| 462 |
| 463 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 464 &media::VideoEncodeAccelerator::Client::NotifyEndOfVideoFrame, |
| 465 client_, |
| 466 input_record.frame_id)); |
| 467 } |
| 468 |
| 469 void ExynosVideoEncodeAccelerator::CopyFrameAndEncodeTask( |
| 470 const scoped_refptr<media::VideoFrame>& frame, |
| 471 int gsc_input_index) { |
| 472 DVLOG(3) << "CopyFrameAndEncodeTask()"; |
| 473 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 474 |
| 475 if (encoder_state_ == kError) { |
| 476 DVLOG(2) << "CopyFrameAndEncodeTask(): early out: kError state"; |
| 477 return; |
| 478 } |
| 479 |
| 480 if (frame->coded_size() != input_visible_size_) { |
| 481 DLOG(ERROR) << "EncodeFrameTask(): input size change not supported: " |
| 482 << input_visible_size_.width() << "x" |
| 483 << input_visible_size_.height() << " -> " |
| 484 << frame->coded_size().width() << "x" |
| 485 << frame->coded_size().height(); |
| 486 NOTIFY_ERROR(kInvalidArgumentError); |
| 487 return; |
| 488 } |
| 489 |
| 490 NOTIMPLEMENTED(); |
| 491 EncodeTask(gsc_input_index); |
| 492 } |
| 493 |
| 494 void ExynosVideoEncodeAccelerator::UseBitstreamBufferTask( |
| 495 scoped_ptr<BitstreamBufferRef> buffer_ref) { |
| 496 DVLOG(3) << "UseBitstreamBufferTask(): id=" << buffer_ref->id; |
| 497 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 498 TRACE_EVENT1("Video Encoder", "EVEA::UseBitstreamBufferTask", |
| 499 "buffer", buffer_ref->id); |
| 500 |
| 501 encoder_bitstream_buffers_.push_back(linked_ptr<BitstreamBufferRef>( |
| 502 buffer_ref.release())); |
| 503 |
| 504 ReturnCompleteBuffers(); |
| 505 EnqueueGsc(); |
| 506 EnqueueMfc(); |
| 507 } |
| 508 |
| 509 void ExynosVideoEncodeAccelerator::DestroyTask() { |
| 510 DVLOG(3) << "DestroyTask()"; |
| 511 TRACE_EVENT0("Video Encoder", "EVEA::DestroyTask"); |
| 512 |
| 513 // DestroyTask() should run regardless of encoder_state_. |
| 514 |
| 515 // Stop streaming and the device_poll_thread_. |
| 516 StopDevicePoll(); |
| 517 |
| 518 // Set our state to kError. Just in case. |
| 519 encoder_state_ = kError; |
| 520 } |
| 521 |
| 522 void ExynosVideoEncodeAccelerator::ServiceDeviceTask() { |
| 523 DVLOG(3) << "ServiceDeviceTask()"; |
| 524 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 525 DCHECK_NE(encoder_state_, kUninitialized); |
| 526 DCHECK_NE(encoder_state_, kInitialized); |
| 527 TRACE_EVENT0("Video Encoder", "EVEA::ServiceDeviceTask"); |
| 528 |
| 529 if (encoder_state_ == kError) { |
| 530 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; |
| 531 return; |
| 532 } |
| 533 |
| 534 DequeueGsc(); |
| 535 DequeueMfc(); |
| 536 EnqueueGsc(); |
| 537 EnqueueMfc(); |
| 538 |
| 539 // Clear the interrupt fd. |
| 540 if (!ClearDevicePollInterrupt()) |
| 541 return; |
| 542 |
| 543 unsigned int poll_fds = 0; |
| 544 // Add GSC fd, if we should poll on it. |
| 545 // GSC has to wait until both input and output buffers are queued. |
| 546 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0) |
| 547 poll_fds |= kPollGsc; |
| 548 // Add MFC fd, if we should poll on it. |
| 549 // MFC can be polled as soon as either input or output buffers are queued. |
| 550 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0) |
| 551 poll_fds |= kPollMfc; |
| 552 |
| 553 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), |
| 554 // so either: |
| 555 // * device_poll_thread_ is running normally |
| 556 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask() |
| 557 // shut it down, in which case we're either in kError state, and we should |
| 558 // have early-outed already. |
| 559 DCHECK(device_poll_thread_.message_loop()); |
| 560 // Queue the DevicePollTask() now. |
| 561 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 562 &ExynosVideoEncodeAccelerator::DevicePollTask, |
| 563 base::Unretained(this), |
| 564 poll_fds)); |
| 565 |
| 566 DVLOG(1) << "ServiceDeviceTask(): buffer counts: ENC[" |
| 567 << encoder_input_queue_.size() << "] => GSC[" |
| 568 << gsc_free_input_buffers_.size() << "+" |
| 569 << gsc_input_buffer_queued_count_ << "/" |
| 570 << gsc_input_buffer_map_.size() << "->" |
| 571 << gsc_free_output_buffers_.size() << "+" |
| 572 << gsc_output_buffer_queued_count_ << "/" |
| 573 << gsc_output_buffer_map_.size() << "] => " |
| 574 << gsc_output_mfc_input_queue_.size() << " => MFC[" |
| 575 << mfc_free_input_buffers_.size() << "+" |
| 576 << mfc_input_buffer_queued_count_ << "/" |
| 577 << mfc_input_buffer_map_.size() << "->" |
| 578 << mfc_free_output_buffers_.size() << "+" |
| 579 << mfc_output_buffer_queued_count_ << "/" |
| 580 << mfc_output_buffer_map_.size() << "] => OUT[" |
| 581 << encoder_output_queue_.size() << "]"; |
| 582 } |
| 583 |
| 584 void ExynosVideoEncodeAccelerator::EnqueueGsc() { |
| 585 DVLOG(3) << "EnqueueGsc()"; |
| 586 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 587 TRACE_EVENT0("Video Encoder", "EVEA::EnqueueGsc"); |
| 588 |
| 589 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_; |
| 590 while (!encoder_input_queue_.empty()) { |
| 591 if (!EnqueueGscInputRecord()) |
| 592 return; |
| 593 } |
| 594 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) { |
| 595 // We started up a previously empty queue. |
| 596 // Queue state changed; signal interrupt. |
| 597 if (!SetDevicePollInterrupt()) |
| 598 return; |
| 599 // Start VIDIOC_STREAMON if we haven't yet. |
| 600 if (!gsc_input_streamon_) { |
| 601 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 602 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 603 gsc_input_streamon_ = true; |
| 604 } |
| 605 } |
| 606 |
| 607 // Enqueue a GSC output, only if we need one |
| 608 if (gsc_input_buffer_queued_count_ != 0 && |
| 609 gsc_output_buffer_queued_count_ == 0 && |
| 610 !gsc_free_output_buffers_.empty() && |
| 611 (!do_output_encoding_ || !mfc_free_input_buffers_.empty())) { |
| 612 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_; |
| 613 if (!EnqueueGscOutputRecord()) |
| 614 return; |
| 615 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) { |
| 616 // We just started up a previously empty queue. |
| 617 // Queue state changed; signal interrupt. |
| 618 if (!SetDevicePollInterrupt()) |
| 619 return; |
| 620 // Start VIDIOC_STREAMON if we haven't yet. |
| 621 if (!gsc_output_streamon_) { |
| 622 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 623 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 624 gsc_output_streamon_ = true; |
| 625 } |
| 626 } |
| 627 } |
| 628 // Bug check: GSC is liable to race conditions if more than one buffer is |
| 629 // simultaneously queued. |
| 630 DCHECK_GE(1, gsc_output_buffer_queued_count_); |
| 631 } |
| 632 |
| 633 void ExynosVideoEncodeAccelerator::DequeueGsc() { |
| 634 DVLOG(3) << "DequeueGsc()"; |
| 635 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 636 TRACE_EVENT0("Video Encoder", "EVEA::DequeueGsc"); |
| 637 |
| 638 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 639 // list. |
| 640 struct v4l2_buffer dqbuf; |
| 641 struct v4l2_plane planes[3]; |
| 642 while (gsc_input_buffer_queued_count_ > 0) { |
| 643 DCHECK(gsc_input_streamon_); |
| 644 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 645 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 646 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 647 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 648 if (errno == EAGAIN) { |
| 649 // EAGAIN if we're just out of buffers to dequeue. |
| 650 break; |
| 651 } |
| 652 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 653 NOTIFY_ERROR(kPlatformFailureError); |
| 654 return; |
| 655 } |
| 656 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; |
| 657 DCHECK(input_record.at_device); |
| 658 gsc_free_input_buffers_.push_back(dqbuf.index); |
| 659 const int32 frame_id = input_record.frame_id; |
| 660 input_record.at_device = false; |
| 661 input_record.frame_id = -1; |
| 662 gsc_input_buffer_queued_count_--; |
| 663 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 664 &Client::NotifyEndOfVideoFrame, client_, frame_id)); |
| 665 } |
| 666 |
| 667 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and queue to the |
| 668 // completed queue. |
| 669 while (gsc_output_buffer_queued_count_ > 0) { |
| 670 DCHECK(gsc_output_streamon_); |
| 671 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 672 memset(&planes, 0, sizeof(planes)); |
| 673 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 674 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 675 dqbuf.m.planes = planes; |
| 676 dqbuf.length = 3; |
| 677 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 678 if (errno == EAGAIN) { |
| 679 // EAGAIN if we're just out of buffers to dequeue. |
| 680 break; |
| 681 } |
| 682 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 683 NOTIFY_ERROR(kPlatformFailureError); |
| 684 return; |
| 685 } |
| 686 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; |
| 687 DCHECK(output_record.at_device); |
| 688 output_record.at_device = false; |
| 689 if (do_output_encoding_) { |
| 690 gsc_output_mfc_input_queue_.push_back(output_record.mfc_input); |
| 691 output_record.mfc_input = -1; |
| 692 gsc_free_output_buffers_.push_back(dqbuf.index); |
| 693 } else { |
| 694 // Don't recycle to its free list yet -- we can't do that until |
| 695 // ReturnCompleteBuffers() finishes copying the output out. |
| 696 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused; |
| 697 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused; |
| 698 output_record.bytes_used[2] = dqbuf.m.planes[2].bytesused; |
| 699 encoder_output_queue_.push_back(dqbuf.index); |
| 700 } |
| 701 gsc_output_buffer_queued_count_--; |
| 702 } |
| 703 |
| 704 ReturnCompleteBuffers(); |
| 705 } |
| 706 |
| 707 void ExynosVideoEncodeAccelerator::EnqueueMfc() { |
| 708 DVLOG(3) << "EnqueueMfc()"; |
| 709 TRACE_EVENT0("Video Encoder", "EVEA::EnqueueMfc"); |
| 710 |
| 711 if (!do_output_encoding_) |
| 712 return; |
| 713 |
| 714 // Drain the pipe of completed GSC output buffers. |
| 715 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_; |
| 716 while (!gsc_output_mfc_input_queue_.empty()) { |
| 717 if (!EnqueueMfcInputRecord()) |
| 718 return; |
| 719 } |
| 720 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) { |
| 721 // We just started up a previously empty queue. |
| 722 // Queue state changed; signal interrupt. |
| 723 if (!SetDevicePollInterrupt()) |
| 724 return; |
| 725 // Start VIDIOC_STREAMON if we haven't yet. |
| 726 if (!mfc_input_streamon_) { |
| 727 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 728 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 729 mfc_input_streamon_ = true; |
| 730 } |
| 731 } |
| 732 |
| 733 // Enqueue all the MFC outputs we can. |
| 734 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_; |
| 735 while (!mfc_free_output_buffers_.empty()) { |
| 736 if (!EnqueueMfcOutputRecord()) |
| 737 return; |
| 738 } |
| 739 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) { |
| 740 // We just started up a previously empty queue. |
| 741 // Queue state changed; signal interrupt. |
| 742 if (!SetDevicePollInterrupt()) |
| 743 return; |
| 744 // Start VIDIOC_STREAMON if we haven't yet. |
| 745 if (!mfc_output_streamon_) { |
| 746 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 747 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 748 mfc_output_streamon_ = true; |
| 749 } |
| 750 } |
| 751 } |
| 752 |
| 753 void ExynosVideoEncodeAccelerator::DequeueMfc() { |
| 754 DVLOG(3) << "DequeueMfc"; |
| 755 TRACE_EVENT0("Video Encoder", "EVEA::DequeueMfc"); |
| 756 |
| 757 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 758 // list. |
| 759 struct v4l2_buffer dqbuf; |
| 760 struct v4l2_plane planes[2]; |
| 761 while (mfc_input_buffer_queued_count_ > 0) { |
| 762 DCHECK(mfc_input_streamon_); |
| 763 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 764 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 765 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 766 dqbuf.m.planes = planes; |
| 767 dqbuf.length = 2; |
| 768 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 769 if (errno == EAGAIN) { |
| 770 // EAGAIN if we're just out of buffers to dequeue. |
| 771 break; |
| 772 } |
| 773 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 774 NOTIFY_ERROR(kPlatformFailureError); |
| 775 return; |
| 776 } |
| 777 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; |
| 778 DCHECK(input_record.at_device); |
| 779 input_record.at_device = false; |
| 780 mfc_free_input_buffers_.push_back(dqbuf.index); |
| 781 mfc_input_buffer_queued_count_--; |
| 782 } |
| 783 |
| 784 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the |
| 785 // completed queue. Don't recycle to its free list yet -- we can't do that |
| 786 // until ReturnCompleteBuffers() finishes copying the output out. |
| 787 while (mfc_output_buffer_queued_count_ > 0) { |
| 788 DCHECK(mfc_output_streamon_); |
| 789 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 790 memset(planes, 0, sizeof(planes)); |
| 791 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 792 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 793 dqbuf.m.planes = planes; |
| 794 dqbuf.length = 1; |
| 795 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 796 if (errno == EAGAIN) { |
| 797 // EAGAIN if we're just out of buffers to dequeue. |
| 798 break; |
| 799 } |
| 800 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 801 NOTIFY_ERROR(kPlatformFailureError); |
| 802 return; |
| 803 } |
| 804 // Don't recycle to its free list yet -- we can't do that until |
| 805 // ReturnCompleteBuffers() finishes copying the output out. |
| 806 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; |
| 807 DCHECK(output_record.at_device); |
| 808 output_record.at_device = false; |
| 809 output_record.bytes_used = dqbuf.m.planes[0].bytesused; |
| 810 encoder_output_queue_.push_back(dqbuf.index); |
| 811 mfc_output_buffer_queued_count_--; |
| 812 } |
| 813 } |
| 814 |
| 815 bool ExynosVideoEncodeAccelerator::EnqueueGscInputRecord() { |
| 816 DVLOG(3) << "EnqueueGscInputRecord()"; |
| 817 DCHECK(!encoder_input_queue_.empty()); |
| 818 |
| 819 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for an input video frame |
| 820 const int gsc_buffer = encoder_input_queue_.front(); |
| 821 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; |
| 822 DCHECK(!input_record.at_device); |
| 823 if (input_record.egl_sync != EGL_NO_SYNC_KHR) { |
| 824 TRACE_EVENT0( |
| 825 "Video Encoder", |
| 826 "EVEA::EnqueueGscInputRecord: eglClientWaitSyncKHR"); |
| 827 // If we have to wait for completion, wait. |
| 828 eglClientWaitSyncKHR(egl_display_, input_record.egl_sync, 0, |
| 829 EGL_FOREVER_KHR); |
| 830 eglDestroySyncKHR(egl_display_, input_record.egl_sync); |
| 831 input_record.egl_sync = EGL_NO_SYNC_KHR; |
| 832 } |
| 833 struct v4l2_buffer qbuf; |
| 834 struct v4l2_plane qbuf_planes[1]; |
| 835 memset(&qbuf, 0, sizeof(qbuf)); |
| 836 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 837 qbuf.index = gsc_buffer; |
| 838 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 839 qbuf.timestamp.tv_sec = input_record.frame_id; |
| 840 qbuf.memory = V4L2_MEMORY_MMAP; |
| 841 qbuf.m.planes = qbuf_planes; |
| 842 qbuf.length = 1; |
| 843 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 844 encoder_input_queue_.pop_front(); |
| 845 input_record.at_device = true; |
| 846 gsc_input_buffer_queued_count_++; |
| 847 DVLOG(3) << "EnqueueGscInputRecord(): enqueued buffer=" << gsc_buffer |
| 848 << ", frame_id=" << input_record.frame_id; |
| 849 return true; |
| 850 } |
| 851 |
| 852 bool ExynosVideoEncodeAccelerator::EnqueueGscOutputRecord() { |
| 853 DVLOG(3) << "EnqueueGscOutputRecord()"; |
| 854 DCHECK(!gsc_free_output_buffers_.empty()); |
| 855 |
| 856 // Enqueue a GSC output (VIDEO_CAPTURE) buffer. |
| 857 const int gsc_buffer = gsc_free_output_buffers_.back(); |
| 858 GscOutputRecord& output_record = gsc_output_buffer_map_[gsc_buffer]; |
| 859 DCHECK(!output_record.at_device); |
| 860 DCHECK_EQ(output_record.mfc_input, -1); |
| 861 struct v4l2_buffer qbuf; |
| 862 struct v4l2_plane qbuf_planes[3]; |
| 863 memset(&qbuf, 0, sizeof(qbuf)); |
| 864 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 865 qbuf.index = gsc_buffer; |
| 866 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 867 qbuf.m.planes = qbuf_planes; |
| 868 if (do_output_encoding_) { |
| 869 DCHECK(!mfc_free_input_buffers_.empty()); |
| 870 qbuf.memory = V4L2_MEMORY_DMABUF; |
| 871 const int mfc_buffer = mfc_free_input_buffers_.back(); |
| 872 mfc_free_input_buffers_.pop_back(); |
| 873 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; |
| 874 DCHECK(!input_record.at_device); |
| 875 output_record.mfc_input = mfc_buffer; |
| 876 qbuf.m.planes[0].m.fd = input_record.fd[0]; |
| 877 qbuf.m.planes[1].m.fd = input_record.fd[1]; |
| 878 qbuf.length = 2; |
| 879 } else { |
| 880 qbuf.memory = V4L2_MEMORY_MMAP; |
| 881 qbuf.length = 3; |
| 882 } |
| 883 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 884 gsc_free_output_buffers_.pop_back(); |
| 885 output_record.at_device = true; |
| 886 gsc_output_buffer_queued_count_++; |
| 887 return true; |
| 888 } |
| 889 |
| 890 bool ExynosVideoEncodeAccelerator::EnqueueMfcInputRecord() { |
| 891 DVLOG(3) << "EnqueueMfcInputRecord()"; |
| 892 DCHECK(do_output_encoding_); |
| 893 DCHECK(!gsc_output_mfc_input_queue_.empty()); |
| 894 |
| 895 // Enqueue a MFC input (VIDEO_OUTPUT) buffer. |
| 896 const int mfc_buffer = gsc_output_mfc_input_queue_.front(); |
| 897 MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer]; |
| 898 DCHECK(!input_record.at_device); |
| 899 struct v4l2_buffer qbuf; |
| 900 struct v4l2_plane qbuf_planes[2]; |
| 901 memset(&qbuf, 0, sizeof(qbuf)); |
| 902 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 903 qbuf.index = mfc_buffer; |
| 904 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 905 qbuf.memory = V4L2_MEMORY_MMAP; |
| 906 qbuf.m.planes = qbuf_planes; |
| 907 qbuf.length = 2; |
| 908 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 909 gsc_output_mfc_input_queue_.pop_front(); |
| 910 input_record.at_device = true; |
| 911 mfc_input_buffer_queued_count_++; |
| 912 return true; |
| 913 } |
| 914 |
| 915 bool ExynosVideoEncodeAccelerator::EnqueueMfcOutputRecord() { |
| 916 DVLOG(3) << "EnqueueMfcOutputRecord()"; |
| 917 DCHECK(do_output_encoding_); |
| 918 DCHECK(!mfc_free_output_buffers_.empty()); |
| 919 |
| 920 // Enqueue a MFC output (VIDEO_CAPTURE) buffer. |
| 921 const int mfc_buffer = mfc_free_output_buffers_.back(); |
| 922 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; |
| 923 DCHECK(!output_record.at_device); |
| 924 DCHECK_EQ(output_record.bytes_used, 0U); |
| 925 struct v4l2_buffer qbuf; |
| 926 struct v4l2_plane qbuf_planes[1]; |
| 927 memset(&qbuf, 0, sizeof(qbuf)); |
| 928 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 929 qbuf.index = mfc_buffer; |
| 930 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 931 qbuf.memory = V4L2_MEMORY_MMAP; |
| 932 qbuf.m.planes = qbuf_planes; |
| 933 qbuf.length = 1; |
| 934 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 935 mfc_free_output_buffers_.pop_back(); |
| 936 output_record.at_device = true; |
| 937 mfc_output_buffer_queued_count_++; |
| 938 return true; |
| 939 } |
| 940 |
| 941 void ExynosVideoEncodeAccelerator::ReturnCompleteBuffers() { |
| 942 DVLOG(3) << "ReturnCompleteBuffers()"; |
| 943 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 944 DCHECK_NE(encoder_state_, kUninitialized); |
| 945 |
| 946 while (!encoder_output_queue_.empty() && |
| 947 !encoder_bitstream_buffers_.empty()) { |
| 948 const int output_index = encoder_output_queue_.front(); |
| 949 encoder_output_queue_.pop_front(); |
| 950 scoped_ptr<BitstreamBufferRef> buffer_ref( |
| 951 encoder_bitstream_buffers_.back().release()); |
| 952 encoder_bitstream_buffers_.pop_back(); |
| 953 uint8* data = reinterpret_cast<uint8*>(buffer_ref->shm->memory()); |
| 954 size_t offset = 0; |
| 955 bool key_frame = false; |
| 956 if (do_output_encoding_) { |
| 957 MfcOutputRecord& output_record = mfc_output_buffer_map_[output_index]; |
| 958 CHECK_GE(buffer_ref->size, output_record.bytes_used); |
| 959 const uint8* source = |
| 960 reinterpret_cast<const uint8*>(output_record.address); |
| 961 if (stream_header_size_ == 0) { |
| 962 stream_header_size_ = output_record.bytes_used; |
| 963 stream_header_.reset(new uint8[stream_header_size_]); |
| 964 memcpy(stream_header_.get(), source, stream_header_size_); |
| 965 } |
| 966 if (output_record.bytes_used >= 5) { |
| 967 if ((source[4] & 0x1F) == 0x5) { |
| 968 key_frame = true; |
| 969 memcpy(data, stream_header_.get(), stream_header_size_); |
| 970 data += stream_header_size_; |
| 971 offset += stream_header_size_; |
| 972 } |
| 973 } |
| 974 memcpy(data, source, output_record.bytes_used); |
| 975 offset += output_record.bytes_used; |
| 976 output_record.bytes_used = 0; |
| 977 mfc_free_output_buffers_.push_back(output_index); |
| 978 } else { |
| 979 GscOutputRecord& output_record = gsc_output_buffer_map_[output_index]; |
| 980 // GSC output is 16 pixel-aligned; we may have to trim down to our actual |
| 981 // output size. |
| 982 // Copy the Y plane. |
| 983 const uint8* y_plane = reinterpret_cast<uint8*>(output_record.address[0]); |
| 984 for (int i = 0; i < output_visible_size_.height(); ++i) { |
| 985 memcpy(data, y_plane, output_visible_size_.width()); |
| 986 data += output_visible_size_.width(); |
| 987 y_plane += converted_allocated_size_.width(); |
| 988 } |
| 989 // Copy the U plane. |
| 990 const uint8* u_plane = reinterpret_cast<uint8*>(output_record.address[1]); |
| 991 for (int i = 0; i < output_visible_size_.height() / 2; ++i) { |
| 992 memcpy(data, u_plane, output_visible_size_.width() / 2); |
| 993 data += output_visible_size_.width() / 2; |
| 994 u_plane += converted_allocated_size_.width() / 2; |
| 995 } |
| 996 // Copy the V plane. |
| 997 const uint8* v_plane = reinterpret_cast<uint8*>(output_record.address[2]); |
| 998 for (int i = 0; i < output_visible_size_.height() / 2; ++i) { |
| 999 memcpy(data, v_plane, output_visible_size_.width() / 2); |
| 1000 data += output_visible_size_.width() / 2; |
| 1001 v_plane += converted_allocated_size_.width() / 2; |
| 1002 } |
| 1003 offset = output_visible_size_.GetArea() * 3 / 2; |
| 1004 gsc_free_output_buffers_.push_back(output_index); |
| 1005 } |
| 1006 DLOG(ERROR) << "ReturnCompleteBuffers(): BitstreamBufferReady(): " |
| 1007 << "bitstream_buffer_id=" << buffer_ref->id |
| 1008 << ", size=" << offset; |
| 1009 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1010 &Client::BitstreamBufferReady, |
| 1011 client_, |
| 1012 buffer_ref->id, |
| 1013 offset, |
| 1014 key_frame)); |
| 1015 } |
| 1016 } |
| 1017 |
| 1018 bool ExynosVideoEncodeAccelerator::StartDevicePoll() { |
| 1019 DVLOG(3) << "StartDevicePoll()"; |
| 1020 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); |
| 1021 DCHECK(!device_poll_thread_.IsRunning()); |
| 1022 |
| 1023 // Start up the device poll thread and schedule its first DevicePollTask(). |
| 1024 if (!device_poll_thread_.Start()) { |
| 1025 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
| 1026 NOTIFY_ERROR(kPlatformFailureError); |
| 1027 return false; |
| 1028 } |
| 1029 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1030 &ExynosVideoEncodeAccelerator::DevicePollTask, |
| 1031 base::Unretained(this), |
| 1032 0)); |
| 1033 |
| 1034 return true; |
| 1035 } |
| 1036 |
| 1037 bool ExynosVideoEncodeAccelerator::StopDevicePoll() { |
| 1038 DVLOG(3) << "StopDevicePoll()"; |
| 1039 |
| 1040 // Signal the DevicePollTask() to stop, and stop the device poll thread. |
| 1041 if (!SetDevicePollInterrupt()) |
| 1042 return false; |
| 1043 device_poll_thread_.Stop(); |
| 1044 // Clear the interrupt now, to be sure. |
| 1045 if (!ClearDevicePollInterrupt()) |
| 1046 return false; |
| 1047 |
| 1048 // Stop streaming. |
| 1049 if (gsc_input_streamon_) { |
| 1050 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1051 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 1052 } |
| 1053 gsc_input_streamon_ = false; |
| 1054 if (gsc_output_streamon_) { |
| 1055 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1056 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 1057 } |
| 1058 gsc_output_streamon_ = false; |
| 1059 if (mfc_input_streamon_) { |
| 1060 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1061 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 1062 } |
| 1063 mfc_input_streamon_ = false; |
| 1064 if (mfc_output_streamon_) { |
| 1065 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1066 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 1067 } |
| 1068 mfc_output_streamon_ = false; |
| 1069 |
| 1070 // Reset all our accounting info. |
| 1071 encoder_input_queue_.clear(); |
| 1072 gsc_free_input_buffers_.clear(); |
| 1073 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| 1074 gsc_free_input_buffers_.push_back(i); |
| 1075 GscInputRecord& input_record = gsc_input_buffer_map_[i]; |
| 1076 input_record.at_device = false; |
| 1077 input_record.frame_id = -1; |
| 1078 if (input_record.egl_sync) { |
| 1079 eglDestroySyncKHR(egl_display_, input_record.egl_sync); |
| 1080 input_record.egl_sync = EGL_NO_SYNC_KHR; |
| 1081 } |
| 1082 } |
| 1083 gsc_input_buffer_queued_count_ = 0; |
| 1084 gsc_free_output_buffers_.clear(); |
| 1085 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 1086 gsc_free_output_buffers_.push_back(i); |
| 1087 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 1088 output_record.at_device = false; |
| 1089 output_record.mfc_input = -1; |
| 1090 output_record.bytes_used[0] = output_record.bytes_used[1] |
| 1091 = output_record.bytes_used[2] = 0; |
| 1092 } |
| 1093 gsc_output_buffer_queued_count_ = 0; |
| 1094 gsc_output_mfc_input_queue_.clear(); |
| 1095 mfc_free_input_buffers_.clear(); |
| 1096 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 1097 mfc_free_input_buffers_.push_back(i); |
| 1098 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; |
| 1099 input_record.at_device = false; |
| 1100 } |
| 1101 mfc_input_buffer_queued_count_ = 0; |
| 1102 mfc_free_output_buffers_.clear(); |
| 1103 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 1104 mfc_free_output_buffers_.push_back(i); |
| 1105 MfcOutputRecord& output_record = mfc_output_buffer_map_[i]; |
| 1106 output_record.at_device = false; |
| 1107 output_record.bytes_used = 0; |
| 1108 } |
| 1109 encoder_output_queue_.clear(); |
| 1110 |
| 1111 DVLOG(3) << "StopDevicePoll(): device poll stopped"; |
| 1112 return true; |
| 1113 } |
| 1114 |
| 1115 bool ExynosVideoEncodeAccelerator::SetDevicePollInterrupt() { |
| 1116 DVLOG(3) << "SetDevicePollInterrupt()"; |
| 1117 |
| 1118 const uint64 buf = 1; |
| 1119 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { |
| 1120 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed"; |
| 1121 NOTIFY_ERROR(kPlatformFailureError); |
| 1122 return false; |
| 1123 } |
| 1124 return true; |
| 1125 } |
| 1126 |
| 1127 bool ExynosVideoEncodeAccelerator::ClearDevicePollInterrupt() { |
| 1128 DVLOG(3) << "ClearDevicePollInterrupt()"; |
| 1129 |
| 1130 uint64 buf; |
| 1131 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { |
| 1132 if (errno == EAGAIN) { |
| 1133 // No interrupt flag set, and we're reading nonblocking. Not an error. |
| 1134 return true; |
| 1135 } else { |
| 1136 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed"; |
| 1137 NOTIFY_ERROR(kPlatformFailureError); |
| 1138 return false; |
| 1139 } |
| 1140 } |
| 1141 return true; |
| 1142 } |
| 1143 |
| 1144 void ExynosVideoEncodeAccelerator::DevicePollTask(unsigned int poll_fds) { |
| 1145 DVLOG(3) << "DevicePollTask()"; |
| 1146 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); |
| 1147 TRACE_EVENT0("Video Encoder", "EVEA::DevicePollTask"); |
| 1148 |
| 1149 // This routine just polls the set of device fds, and schedules a |
| 1150 // ServiceDeviceTask() on encoder_thread_ when processing needs to occur. |
| 1151 // Other threads may notify this task to return early by writing to |
| 1152 // device_poll_interrupt_fd_. |
| 1153 struct pollfd pollfds[3]; |
| 1154 nfds_t nfds; |
| 1155 |
| 1156 // Add device_poll_interrupt_fd_; |
| 1157 pollfds[0].fd = device_poll_interrupt_fd_; |
| 1158 pollfds[0].events = POLLIN | POLLERR; |
| 1159 nfds = 1; |
| 1160 |
| 1161 // Add GSC fd, if we should poll on it. |
| 1162 // GSC has to wait until both input and output buffers are queued. |
| 1163 if (poll_fds & kPollGsc) { |
| 1164 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set"; |
| 1165 pollfds[nfds].fd = gsc_fd_; |
| 1166 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1167 nfds++; |
| 1168 } |
| 1169 if (poll_fds & kPollMfc) { |
| 1170 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set"; |
| 1171 pollfds[nfds].fd = mfc_fd_; |
| 1172 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1173 nfds++; |
| 1174 } |
| 1175 |
| 1176 // Poll it! |
| 1177 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) { |
| 1178 DPLOG(ERROR) << "DevicePollTask(): poll() failed"; |
| 1179 NOTIFY_ERROR(kPlatformFailureError); |
| 1180 return; |
| 1181 } |
| 1182 |
| 1183 // All processing should happen on ServiceDeviceTask(), since we shouldn't |
| 1184 // touch encoder state from this thread. |
| 1185 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1186 &ExynosVideoEncodeAccelerator::ServiceDeviceTask, |
| 1187 base::Unretained(this))); |
| 1188 } |
| 1189 |
| 1190 void ExynosVideoEncodeAccelerator::NotifyError(Error error) { |
| 1191 DVLOG(2) << "NotifyError()"; |
| 1192 |
| 1193 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| 1194 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1195 &ExynosVideoEncodeAccelerator::NotifyError, weak_this_, error)); |
| 1196 return; |
| 1197 } |
| 1198 |
| 1199 if (client_) { |
| 1200 client_->NotifyError(error); |
| 1201 client_ptr_factory_.InvalidateWeakPtrs(); |
| 1202 } |
| 1203 } |
| 1204 |
| 1205 void ExynosVideoEncodeAccelerator::SetEncoderState(State state) { |
| 1206 DVLOG(3) << "SetEncoderState(): state=" << state; |
| 1207 |
| 1208 // We can touch encoder_state_ only if this is the encoder thread or the |
| 1209 // encoder thread isn't running. |
| 1210 if (encoder_thread_.message_loop() != NULL && |
| 1211 encoder_thread_.message_loop() != base::MessageLoop::current()) { |
| 1212 encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1213 &ExynosVideoEncodeAccelerator::SetEncoderState, |
| 1214 base::Unretained(this), state)); |
| 1215 } else { |
| 1216 encoder_state_ = state; |
| 1217 } |
| 1218 } |
| 1219 |
| 1220 bool ExynosVideoEncodeAccelerator::CreateGscInputBuffers() { |
| 1221 DVLOG(3) << "CreateGscInputBuffers()"; |
| 1222 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1223 DCHECK_EQ(encoder_state_, kUninitialized); |
| 1224 DCHECK(!gsc_input_streamon_); |
| 1225 |
| 1226 struct v4l2_control control; |
| 1227 memset(&control, 0, sizeof(control)); |
| 1228 control.id = V4L2_CID_ROTATE; |
| 1229 control.value = 0; |
| 1230 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1231 |
| 1232 // HFLIP actually seems to control vertical mirroring for GSC, and vice-versa. |
| 1233 memset(&control, 0, sizeof(control)); |
| 1234 control.id = V4L2_CID_HFLIP; |
| 1235 control.value = 1; |
| 1236 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1237 |
| 1238 memset(&control, 0, sizeof(control)); |
| 1239 control.id = V4L2_CID_VFLIP; |
| 1240 control.value = 0; |
| 1241 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1242 |
| 1243 memset(&control, 0, sizeof(control)); |
| 1244 control.id = V4L2_CID_GLOBAL_ALPHA; |
| 1245 control.value = 255; |
| 1246 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 1247 |
| 1248 struct v4l2_format format; |
| 1249 memset(&format, 0, sizeof(format)); |
| 1250 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1251 format.fmt.pix_mp.width = input_allocated_size_.width(); |
| 1252 format.fmt.pix_mp.height = input_allocated_size_.height(); |
| 1253 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32; |
| 1254 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1255 input_allocated_size_.GetArea() * 4; |
| 1256 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1257 input_allocated_size_.width() * 4; |
| 1258 format.fmt.pix_mp.num_planes = 1; |
| 1259 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 1260 |
| 1261 struct v4l2_crop crop; |
| 1262 memset(&crop, 0, sizeof(crop)); |
| 1263 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1264 crop.c.left = 0; |
| 1265 crop.c.top = 0; |
| 1266 crop.c.width = input_visible_size_.width(); |
| 1267 crop.c.height = input_visible_size_.height(); |
| 1268 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); |
| 1269 |
| 1270 struct v4l2_requestbuffers reqbufs; |
| 1271 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1272 reqbufs.count = kGscInputBufferCount; |
| 1273 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1274 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1275 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1276 |
| 1277 DCHECK(gsc_input_buffer_map_.empty()); |
| 1278 gsc_input_buffer_map_.resize(reqbufs.count); |
| 1279 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| 1280 GscInputRecord& input_record = gsc_input_buffer_map_[i]; |
| 1281 if (do_encode_from_backbuffer_) { |
| 1282 // We have to export textures from the GSC input buffers so we can |
| 1283 // glCopyTexSubImage2D() to them. |
| 1284 struct v4l2_exportbuffer expbuf; |
| 1285 memset(&expbuf, 0, sizeof(expbuf)); |
| 1286 expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1287 expbuf.index = i; |
| 1288 expbuf.plane = 0; |
| 1289 expbuf.flags = O_CLOEXEC; |
| 1290 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_EXPBUF, &expbuf); |
| 1291 file_util::ScopedFD autofd(&expbuf.fd); |
| 1292 |
| 1293 EGLint attrs[13]; |
| 1294 { |
| 1295 size_t j = 0; |
| 1296 attrs[j++] = EGL_WIDTH; |
| 1297 attrs[j++] = input_allocated_size_.width(); |
| 1298 attrs[j++] = EGL_HEIGHT; |
| 1299 attrs[j++] = input_allocated_size_.height(); |
| 1300 attrs[j++] = EGL_LINUX_DRM_FOURCC_EXT; |
| 1301 attrs[j++] = DRM_FORMAT_XRGB8888; |
| 1302 attrs[j++] = EGL_DMA_BUF_PLANE0_FD_EXT; |
| 1303 attrs[j++] = expbuf.fd; |
| 1304 attrs[j++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT; |
| 1305 attrs[j++] = 0; |
| 1306 attrs[j++] = EGL_DMA_BUF_PLANE0_PITCH_EXT; |
| 1307 attrs[j++] = input_allocated_size_.width() * 4; |
| 1308 attrs[j++] = EGL_NONE; |
| 1309 DCHECK_EQ(j, arraysize(attrs)); |
| 1310 } |
| 1311 input_record.egl_image = eglCreateImageKHR( |
| 1312 egl_display_, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attrs); |
| 1313 if (input_record.egl_image == EGL_NO_IMAGE_KHR) { |
| 1314 DLOG(ERROR) << "CreateGscInputBuffers(): could not create EGLImageKHR"; |
| 1315 NOTIFY_ERROR(kPlatformFailureError); |
| 1316 return false; |
| 1317 } |
| 1318 glGenTextures(1, &input_record.texture_id); |
| 1319 if (input_record.texture_id == 0) { |
| 1320 DLOG(ERROR) << "CreateGscInputBuffers(): glGenTextures() failed"; |
| 1321 NOTIFY_ERROR(kPlatformFailureError); |
| 1322 return false; |
| 1323 } |
| 1324 gfx::ScopedTextureBinder binder(GL_TEXTURE_2D, input_record.texture_id); |
| 1325 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, input_record.egl_image); |
| 1326 } |
| 1327 gsc_free_input_buffers_.push_back(i); |
| 1328 } |
| 1329 |
| 1330 return true; |
| 1331 } |
| 1332 |
| 1333 bool ExynosVideoEncodeAccelerator::CreateGscOutputBuffers() { |
| 1334 DVLOG(3) << "CreateGscOutputBuffers()"; |
| 1335 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1336 DCHECK_EQ(encoder_state_, kUninitialized); |
| 1337 DCHECK(!gsc_output_streamon_); |
| 1338 |
| 1339 struct v4l2_format format; |
| 1340 memset(&format, 0, sizeof(format)); |
| 1341 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1342 format.fmt.pix_mp.width = converted_allocated_size_.width(); |
| 1343 format.fmt.pix_mp.height = converted_allocated_size_.height(); |
| 1344 if (do_output_encoding_) { |
| 1345 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; |
| 1346 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1347 converted_allocated_size_.GetArea(); |
| 1348 format.fmt.pix_mp.plane_fmt[1].sizeimage = |
| 1349 converted_allocated_size_.GetArea() / 2; |
| 1350 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1351 converted_allocated_size_.width(); |
| 1352 format.fmt.pix_mp.plane_fmt[1].bytesperline = |
| 1353 converted_allocated_size_.width(); |
| 1354 format.fmt.pix_mp.num_planes = 2; |
| 1355 } else { |
| 1356 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420M; |
| 1357 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 1358 converted_allocated_size_.GetArea(); |
| 1359 format.fmt.pix_mp.plane_fmt[1].sizeimage = |
| 1360 converted_allocated_size_.GetArea() / 4; |
| 1361 format.fmt.pix_mp.plane_fmt[2].sizeimage = |
| 1362 converted_allocated_size_.GetArea() / 4; |
| 1363 format.fmt.pix_mp.plane_fmt[0].bytesperline = |
| 1364 converted_allocated_size_.width(); |
| 1365 format.fmt.pix_mp.plane_fmt[1].bytesperline = |
| 1366 converted_allocated_size_.width() / 2; |
| 1367 format.fmt.pix_mp.plane_fmt[2].bytesperline = |
| 1368 converted_allocated_size_.width() / 2; |
| 1369 format.fmt.pix_mp.num_planes = 3; |
| 1370 output_buffer_byte_size_ = output_visible_size_.GetArea() * 3 / 2; |
| 1371 } |
| 1372 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 1373 |
| 1374 struct v4l2_crop crop; |
| 1375 memset(&crop, 0, sizeof(crop)); |
| 1376 crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1377 crop.c.left = 0; |
| 1378 crop.c.top = 0; |
| 1379 crop.c.width = output_visible_size_.width(); |
| 1380 crop.c.height = output_visible_size_.height(); |
| 1381 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop); |
| 1382 |
| 1383 struct v4l2_requestbuffers reqbufs; |
| 1384 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1385 reqbufs.count = kGscOutputBufferCount; |
| 1386 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1387 reqbufs.memory = (do_output_encoding_ ? |
| 1388 V4L2_MEMORY_DMABUF : V4L2_MEMORY_MMAP); |
| 1389 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1390 |
| 1391 DCHECK(gsc_output_buffer_map_.empty()); |
| 1392 gsc_output_buffer_map_.resize(reqbufs.count); |
| 1393 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 1394 gsc_free_output_buffers_.push_back(i); |
| 1395 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 1396 if (!do_output_encoding_) { |
| 1397 // Query for the MEMORY_MMAP pointer. |
| 1398 struct v4l2_plane planes[arraysize(output_record.address)]; |
| 1399 struct v4l2_buffer buffer; |
| 1400 memset(&buffer, 0, sizeof(buffer)); |
| 1401 memset(planes, 0, sizeof(planes)); |
| 1402 buffer.index = i; |
| 1403 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1404 buffer.memory = V4L2_MEMORY_MMAP; |
| 1405 buffer.m.planes = planes; |
| 1406 buffer.length = arraysize(output_record.address); |
| 1407 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYBUF, &buffer); |
| 1408 for (size_t j = 0; j < arraysize(output_record.address); ++j) { |
| 1409 void* address = mmap(NULL, buffer.m.planes[j].length, |
| 1410 PROT_READ | PROT_WRITE, MAP_SHARED, gsc_fd_, |
| 1411 buffer.m.planes[j].m.mem_offset); |
| 1412 if (address == MAP_FAILED) { |
| 1413 DPLOG(ERROR) << "CreateGscOutputBuffers(): mmap() failed"; |
| 1414 return false; |
| 1415 } |
| 1416 output_record.address[j] = address; |
| 1417 output_record.length[j] = buffer.m.planes[j].length; |
| 1418 } |
| 1419 } |
| 1420 } |
| 1421 return true; |
| 1422 } |
| 1423 |
| 1424 bool ExynosVideoEncodeAccelerator::SetMfcFormats() { |
| 1425 DVLOG(3) << "SetMfcFormats()"; |
| 1426 DCHECK(!mfc_input_streamon_); |
| 1427 DCHECK(!mfc_output_streamon_); |
| 1428 DCHECK(do_output_encoding_); |
| 1429 |
| 1430 // VIDIOC_S_FMT on OUTPUT queue. |
| 1431 struct v4l2_format format; |
| 1432 memset(&format, 0, sizeof(format)); |
| 1433 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1434 format.fmt.pix_mp.width = converted_allocated_size_.width(); |
| 1435 format.fmt.pix_mp.height = converted_allocated_size_.height(); |
| 1436 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; |
| 1437 format.fmt.pix_mp.num_planes = 2; |
| 1438 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 1439 // We read direct from GSC, so we rely on the HW not changing our set |
| 1440 // size/stride. |
| 1441 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].sizeimage, |
| 1442 static_cast<__u32>(converted_allocated_size_.GetArea())); |
| 1443 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].bytesperline, |
| 1444 static_cast<__u32>(converted_allocated_size_.width())); |
| 1445 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].sizeimage, |
| 1446 static_cast<__u32>(converted_allocated_size_.GetArea() / 2)); |
| 1447 DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].bytesperline, |
| 1448 static_cast<__u32>(converted_allocated_size_.width())); |
| 1449 |
| 1450 struct v4l2_crop crop; |
| 1451 memset(&crop, 0, sizeof(crop)); |
| 1452 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
| 1453 crop.c.left = 0; |
| 1454 crop.c.top = 0; |
| 1455 crop.c.width = converted_visible_size_.width(); |
| 1456 crop.c.height = converted_visible_size_.height(); |
| 1457 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CROP, &crop); |
| 1458 |
| 1459 // VIDIOC_S_FMT on CAPTURE queue. |
| 1460 output_buffer_byte_size_ = kMfcOutputBufferSize; |
| 1461 __u32 pixelformat = 0; |
| 1462 if (video_profile_ >= media::H264PROFILE_MIN && |
| 1463 video_profile_ <= media::H264PROFILE_MAX) { |
| 1464 pixelformat = V4L2_PIX_FMT_H264; |
| 1465 } |
| 1466 memset(&format, 0, sizeof(format)); |
| 1467 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1468 format.fmt.pix_mp.width = output_visible_size_.width(); |
| 1469 format.fmt.pix_mp.height = output_visible_size_.height(); |
| 1470 format.fmt.pix_mp.pixelformat = pixelformat; |
| 1471 format.fmt.pix_mp.plane_fmt[0].sizeimage = output_buffer_byte_size_; |
| 1472 format.fmt.pix_mp.num_planes = 1; |
| 1473 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 1474 |
| 1475 struct v4l2_ext_control ctrls[7]; |
| 1476 struct v4l2_ext_controls control; |
| 1477 memset(&ctrls, 0, sizeof(ctrls)); |
| 1478 memset(&control, 0, sizeof(control)); |
| 1479 ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; |
| 1480 ctrls[0].value = 0; |
| 1481 ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; |
| 1482 ctrls[1].value = 1; |
| 1483 ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; |
| 1484 ctrls[2].value = 10; |
| 1485 ctrls[3].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; |
| 1486 ctrls[3].value = 16; |
| 1487 ctrls[4].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; |
| 1488 //ctrls[4].value = V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME; |
| 1489 ctrls[4].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; |
| 1490 ctrls[5].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; |
| 1491 ctrls[5].value = 51; |
| 1492 ctrls[6].id = V4L2_CID_MPEG_VIDEO_BITRATE; |
| 1493 ctrls[6].value = 2048000; |
| 1494 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; |
| 1495 control.count = arraysize(ctrls); |
| 1496 control.controls = ctrls; |
| 1497 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); |
| 1498 |
| 1499 struct v4l2_streamparm parms; |
| 1500 memset(&parms, 0, sizeof(parms)); |
| 1501 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1502 parms.parm.output.timeperframe.numerator = 1000; |
| 1503 parms.parm.output.timeperframe.denominator = 25; |
| 1504 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_PARM, &parms); |
| 1505 |
| 1506 return true; |
| 1507 } |
| 1508 |
| 1509 bool ExynosVideoEncodeAccelerator::CreateMfcInputBuffers() { |
| 1510 DVLOG(3) << "CreateMfcInputBuffers()"; |
| 1511 DCHECK(!mfc_input_streamon_); |
| 1512 DCHECK(do_output_encoding_); |
| 1513 |
| 1514 struct v4l2_requestbuffers reqbufs; |
| 1515 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1516 reqbufs.count = 1; |
| 1517 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1518 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1519 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1520 |
| 1521 DCHECK(mfc_input_buffer_map_.empty()); |
| 1522 mfc_input_buffer_map_.resize(reqbufs.count); |
| 1523 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 1524 mfc_free_input_buffers_.push_back(i); |
| 1525 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; |
| 1526 for (size_t j = 0; j < 2; ++j) { |
| 1527 struct v4l2_exportbuffer expbuf; |
| 1528 memset(&expbuf, 0, sizeof(expbuf)); |
| 1529 expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1530 expbuf.index = i; |
| 1531 expbuf.plane = j; |
| 1532 expbuf.flags = O_CLOEXEC; |
| 1533 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_EXPBUF, &expbuf); |
| 1534 input_record.fd[j] = expbuf.fd; |
| 1535 } |
| 1536 } |
| 1537 return true; |
| 1538 } |
| 1539 |
| 1540 bool ExynosVideoEncodeAccelerator::CreateMfcOutputBuffers() { |
| 1541 DVLOG(3) << "CreateMfcOutputBuffers()"; |
| 1542 DCHECK(!mfc_output_streamon_); |
| 1543 DCHECK(do_output_encoding_); |
| 1544 |
| 1545 struct v4l2_requestbuffers reqbufs; |
| 1546 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1547 reqbufs.count = kMfcOutputBufferCount; |
| 1548 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1549 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1550 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1551 |
| 1552 DCHECK(mfc_output_buffer_map_.empty()); |
| 1553 mfc_output_buffer_map_.resize(reqbufs.count); |
| 1554 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 1555 mfc_free_output_buffers_.push_back(i); |
| 1556 MfcOutputRecord& output_record = mfc_output_buffer_map_[i]; |
| 1557 // Query for the MEMORY_MMAP pointer. |
| 1558 struct v4l2_plane planes[1]; |
| 1559 struct v4l2_buffer buffer; |
| 1560 memset(&buffer, 0, sizeof(buffer)); |
| 1561 memset(planes, 0, sizeof(planes)); |
| 1562 buffer.index = i; |
| 1563 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1564 buffer.memory = V4L2_MEMORY_MMAP; |
| 1565 buffer.m.planes = planes; |
| 1566 buffer.length = 1; |
| 1567 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer); |
| 1568 void* address = mmap(NULL, buffer.m.planes[0].length, |
| 1569 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, |
| 1570 buffer.m.planes[0].m.mem_offset); |
| 1571 if (address == MAP_FAILED) { |
| 1572 DPLOG(ERROR) << "CreateMfcOutputBuffers(): mmap() failed"; |
| 1573 return false; |
| 1574 } |
| 1575 output_record.address = address; |
| 1576 output_record.length = buffer.m.planes[0].length; |
| 1577 } |
| 1578 return true; |
| 1579 } |
| 1580 |
| 1581 void ExynosVideoEncodeAccelerator::SetBitrate(int32 bitrate) { |
| 1582 DVLOG(3) << "SetBitrate(): bitrate=" << bitrate; |
| 1583 DCHECK(do_output_encoding_); |
| 1584 |
| 1585 struct v4l2_ext_control ctrls[1]; |
| 1586 struct v4l2_ext_controls control; |
| 1587 memset(&ctrls, 0, sizeof(ctrls)); |
| 1588 memset(&control, 0, sizeof(control)); |
| 1589 ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE; |
| 1590 ctrls[0].value = bitrate; |
| 1591 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; |
| 1592 control.count = 1; |
| 1593 control.controls = ctrls; |
| 1594 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control); |
| 1595 } |
| 1596 |
| 1597 void ExynosVideoEncodeAccelerator::DestroyGscInputBuffers() { |
| 1598 DVLOG(3) << "DestroyGscInputBuffers()"; |
| 1599 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1600 DCHECK(!gsc_input_streamon_); |
| 1601 |
| 1602 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| 1603 GscInputRecord& input_record = gsc_input_buffer_map_[i]; |
| 1604 if (input_record.egl_sync != EGL_NO_SYNC_KHR) |
| 1605 eglDestroySyncKHR(egl_display_, input_record.egl_sync); |
| 1606 if (input_record.egl_image != EGL_NO_IMAGE_KHR) |
| 1607 eglDestroyImageKHR(egl_display_, input_record.egl_image); |
| 1608 if (input_record.texture_id != 0) |
| 1609 glDeleteTextures(1, &input_record.texture_id); |
| 1610 } |
| 1611 |
| 1612 struct v4l2_requestbuffers reqbufs; |
| 1613 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1614 reqbufs.count = 0; |
| 1615 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1616 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1617 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 1618 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1619 |
| 1620 gsc_input_buffer_map_.clear(); |
| 1621 gsc_free_input_buffers_.clear(); |
| 1622 } |
| 1623 |
| 1624 void ExynosVideoEncodeAccelerator::DestroyGscOutputBuffers() { |
| 1625 DVLOG(3) << "DestroyGscOutputBuffers()"; |
| 1626 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1627 DCHECK(!gsc_output_streamon_); |
| 1628 |
| 1629 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 1630 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 1631 for (size_t j = 0; j < arraysize(output_record.address); ++j) { |
| 1632 if (output_record.address[j] != NULL) { |
| 1633 HANDLE_EINTR(munmap(output_record.address[j], output_record.length[j])); |
| 1634 } |
| 1635 } |
| 1636 } |
| 1637 |
| 1638 struct v4l2_requestbuffers reqbufs; |
| 1639 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1640 reqbufs.count = 0; |
| 1641 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1642 reqbufs.memory = do_output_encoding_ ? V4L2_MEMORY_DMABUF : V4L2_MEMORY_MMAP; |
| 1643 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 1644 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1645 |
| 1646 gsc_output_buffer_map_.clear(); |
| 1647 gsc_free_output_buffers_.clear(); |
| 1648 } |
| 1649 |
| 1650 void ExynosVideoEncodeAccelerator::DestroyMfcInputBuffers() { |
| 1651 DVLOG(3) << "DestroyMfcInputBuffers()"; |
| 1652 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1653 DCHECK(do_output_encoding_); |
| 1654 DCHECK(!mfc_input_streamon_); |
| 1655 |
| 1656 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 1657 MfcInputRecord& input_record = mfc_input_buffer_map_[i]; |
| 1658 for (size_t j = 0; j < arraysize(input_record.fd); ++j) { |
| 1659 if (input_record.fd[j] != -1) { |
| 1660 HANDLE_EINTR(close(input_record.fd[j])); |
| 1661 } |
| 1662 } |
| 1663 } |
| 1664 |
| 1665 struct v4l2_requestbuffers reqbufs; |
| 1666 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1667 reqbufs.count = 0; |
| 1668 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1669 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1670 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 1671 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1672 |
| 1673 mfc_input_buffer_map_.clear(); |
| 1674 mfc_free_input_buffers_.clear(); |
| 1675 } |
| 1676 |
| 1677 void ExynosVideoEncodeAccelerator::DestroyMfcOutputBuffers() { |
| 1678 DVLOG(3) << "DestroyMfcOutputBuffers()"; |
| 1679 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 1680 DCHECK(do_output_encoding_); |
| 1681 DCHECK(!mfc_output_streamon_); |
| 1682 |
| 1683 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 1684 MfcOutputRecord& output_record = mfc_output_buffer_map_[i]; |
| 1685 if (output_record.address != NULL) |
| 1686 HANDLE_EINTR(munmap(output_record.address, output_record.length)); |
| 1687 } |
| 1688 |
| 1689 struct v4l2_requestbuffers reqbufs; |
| 1690 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1691 reqbufs.count = 0; |
| 1692 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1693 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1694 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 1695 DPLOG(ERROR) << "DestroyMfcOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 1696 |
| 1697 mfc_output_buffer_map_.clear(); |
| 1698 mfc_free_output_buffers_.clear(); |
| 1699 } |
| 1700 |
| 1701 } // namespace content |
OLD | NEW |