Chromium Code Reviews| Index: content/common/gpu/media/exynos_video_decode_accelerator.cc |
| diff --git a/content/common/gpu/media/exynos_video_decode_accelerator.cc b/content/common/gpu/media/exynos_video_decode_accelerator.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..2cf68bec4d23ea691d46fe67af953b8d8177698d |
| --- /dev/null |
| +++ b/content/common/gpu/media/exynos_video_decode_accelerator.cc |
| @@ -0,0 +1,2097 @@ |
| +// Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include <dlfcn.h> |
| +#include <errno.h> |
| +#include <fcntl.h> |
| +#include <linux/videodev2.h> |
| +#include <sys/epoll.h> |
| +#include <sys/ioctl.h> |
| +#include <sys/mman.h> |
| + |
| +#include "base/bind.h" |
| +#include "base/message_loop.h" |
| +#include "base/message_loop_proxy.h" |
| +#include "base/shared_memory.h" |
| +#include "content/common/gpu/media/exynos_video_decode_accelerator.h" |
| +#include "ui/gl/gl_bindings.h" |
| +#include "ui/gl/gl_context.h" |
| +#include "ui/gl/gl_context_egl.h" |
| +#include "ui/gl/gl_surface_egl.h" |
| + |
| +namespace content { |
| + |
| +#define NOTIFY_ERROR(x) \ |
| + do { \ |
| + SetDecoderState(kError); \ |
| + LOG(ERROR) << "calling NotifyError(): " << x; \ |
| + NotifyError(x); \ |
| + } while (0) |
| + |
| +#define EXYNOS_MFC_DEVICE "/dev/mfc-dec" |
| +#define EXYNOS_GSC_DEVICE "/dev/gsc1" |
|
Pawel Osciak
2012/11/06 19:26:17
We'd like to be using all gsc nodes available. Cou
sheu
2012/11/08 21:16:31
I did some tracing, and GSC time barely factors in
Pawel Osciak
2012/11/13 18:10:39
Hm, what resolution was that? What was the MFC tim
|
| +#define EXYNOS_MALI_DRIVER "libmali.so" |
| + |
| +static void* libmali_handle = NULL; |
| +static EGLBoolean(*mali_egl_image_get_buffer_ext_phandle)( |
| + EGLImageKHR, EGLint*, void*) = NULL; |
| +static EGLSyncKHR(*egl_create_sync_khr)( |
| + EGLDisplay, EGLenum, const EGLint*) = NULL; |
| +static EGLBoolean(*egl_destroy_sync_khr)( |
| + EGLDisplay, EGLSyncKHR) = NULL; |
| +static EGLint(*egl_client_wait_sync_khr)( |
| + EGLDisplay, EGLSyncKHR, EGLint, EGLTimeKHR) = NULL; |
| + |
| +ExynosVideoDecodeAccelerator::BitstreamBufferRecord::BitstreamBufferRecord( |
| + base::SharedMemory* shm, size_t size, int32 input_id) |
| + : shm(shm), |
| + size(size), |
| + input_id(input_id) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::BitstreamBufferRecord::~BitstreamBufferRecord() { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord() |
| + : at_device(false), |
| + offset(NULL), |
| + length(0), |
| + bytes_used(0), |
| + input_id(-1) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord() |
| + : at_device(false), |
| + input_id(-1) { |
| + bytes_used[0] = 0; |
| + bytes_used[1] = 0; |
| + offset[0] = NULL; |
| + offset[1] = NULL; |
| + length[0] = 0; |
| + length[1] = 0; |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord() |
| + : at_device(false), |
| + mfc_output(-1) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord() |
| + : at_device(false), |
| + at_client(false), |
| + fd(-1), |
| + egl_image(EGL_NO_IMAGE_KHR), |
| + egl_sync(EGL_NO_SYNC_KHR), |
| + picture_id(-1) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::EGLImageKHRArrayRef::EGLImageKHRArrayRef( |
| + EGLDisplay egl_display, EGLImageKHR egl_images[], int egl_image_fds[], |
| + int egl_images_count) |
| + : egl_display(egl_display), |
| + egl_images(egl_images), |
| + egl_image_fds(egl_image_fds), |
| + egl_images_count(egl_images_count) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::EGLImageKHRArrayRef::~EGLImageKHRArrayRef() { |
| + DCHECK_EQ(egl_images != NULL, egl_image_fds != NULL); |
| + if (egl_images == NULL) |
| + return; |
| + |
| + for (int i = 0; i < egl_images_count; ++i) { |
| + if (egl_images[i] != EGL_NO_IMAGE_KHR) |
| + eglDestroyImageKHR(egl_display, egl_images[i]); |
| + if (egl_image_fds[i] != -1) |
| + close(egl_image_fds[i]); |
| + } |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef( |
| + EGLDisplay egl_display, EGLSyncKHR egl_sync) |
| + : egl_display(egl_display), |
| + egl_sync(egl_sync) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() { |
| + if (egl_sync != EGL_NO_SYNC_KHR) |
| + egl_destroy_sync_khr(egl_display, egl_sync); |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator( |
| + gfx::GLContext* gl_context, |
| + Client* client, |
| + const base::Callback<bool(void)>& make_context_current) |
| + : child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| + weak_this_(base::AsWeakPtr(this)), |
| + client_ptr_factory_(client), |
| + client_(client_ptr_factory_.GetWeakPtr()), |
| + decoder_thread_("ExynosDecoderThread"), |
| + decoder_state_(kUninitialized), |
| + decoder_current_bitstream_buffer_(NULL), |
| + decoder_current_input_buffer_(-1), |
| + decoder_decode_buffer_tasks_scheduled_(0), |
| + decoder_frames_inflight_(0), |
| + decoder_frames_at_client_(0), |
| + decoder_flush_notify_requested_(false), |
| + mfc_fd_(-1), |
| + mfc_fd_closer_(&mfc_fd_), |
| + mfc_input_streamon_(false), |
| + mfc_input_buffer_count_(0), |
| + mfc_input_buffer_queued_count_(0), |
| + mfc_output_streamon_(false), |
| + mfc_output_buffer_count_(0), |
| + mfc_output_buffer_pixelformat_(0), |
| + gsc_fd_(-1), |
| + gsc_fd_closer_(&gsc_fd_), |
| + gsc_input_streamon_(false), |
| + gsc_input_buffer_count_(0), |
| + gsc_output_streamon_(false), |
| + gsc_output_buffer_count_(0), |
| + gsc_output_buffer_prepared_count_(0), |
| + gsc_output_buffer_queued_count_(0), |
| + frame_buffer_size_(0, 0), |
| + device_poll_thread_("ExynosDevicePollThread"), |
| + gl_context_(gl_context), |
| + make_context_current_(make_context_current), |
| + egl_context_(EGL_NO_CONTEXT), |
| + egl_display_(EGL_NO_DISPLAY), |
| + video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) { |
| +} |
| + |
| +ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() { |
| + // These maps have members that should be manually destroyed, e.g. file |
| + // descriptors, mmap() segments, etc. |
| + DCHECK(mfc_input_buffer_map_.empty()); |
| + DCHECK(mfc_output_buffer_map_.empty()); |
| + DCHECK(gsc_input_buffer_map_.empty()); |
| + DCHECK(gsc_output_buffer_map_.empty()); |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::Initialize( |
| + media::VideoCodecProfile profile) { |
| + DVLOG(3) << "Initialize()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK_EQ(decoder_state_, kUninitialized); |
| + |
| + switch (profile) { |
| + case media::H264PROFILE_BASELINE: |
| + DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE"; |
| + break; |
| + case media::H264PROFILE_MAIN: |
| + DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN"; |
| + break; |
| + case media::H264PROFILE_HIGH: |
| + DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH"; |
| + break; |
| + case media::VP8PROFILE_MAIN: |
| + DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN"; |
| + break; |
| + default: |
| + DLOG(ERROR) << "Initialize(): unsupported profile=" << profile; |
| + return false; |
| + }; |
| + video_profile_ = profile; |
| + |
| + gfx::GLContextEGL* context_egl = static_cast<gfx::GLContextEGL*>(gl_context_); |
| + static bool sandbox_initialized = PostSandboxInitialization(); |
| + if (!sandbox_initialized) { |
| + DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + egl_context_ = reinterpret_cast<EGLContext>(context_egl->GetHandle()); |
| + if (egl_context_ == EGL_NO_CONTEXT) { |
| + DLOG(ERROR) << "Initialize(): could not get EGLContext"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + egl_display_ = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
| + if (egl_display_ == EGL_NO_DISPLAY) { |
| + DLOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + // Open the video devices. |
| + DVLOG(2) << "Initialize(): opening MFC device: " << EXYNOS_MFC_DEVICE; |
| + errno = 0; |
|
Pawel Osciak
2012/11/06 19:26:17
Why zero-out errno? Your are not using it...
sheu
2012/11/08 21:16:31
Paranoia, and DPLOG uses it.
Fine, I'll take it o
|
| + mfc_fd_ = open(EXYNOS_MFC_DEVICE, O_RDWR | O_NONBLOCK); |
| + if (mfc_fd_ == -1) { |
| + DPLOG(ERROR) << "Initialize(): could not open MFC device: " |
| + << EXYNOS_MFC_DEVICE; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + DVLOG(2) << "Initialize(): opening GSC device: " << EXYNOS_GSC_DEVICE; |
| + errno = 0; |
| + gsc_fd_ = open(EXYNOS_GSC_DEVICE, O_RDWR | O_NONBLOCK); |
| + if (gsc_fd_ == -1) { |
| + DPLOG(ERROR) << "Initialize(): could not open GSC device: " |
| + << EXYNOS_GSC_DEVICE; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + // Capabilities check. |
| + struct v4l2_capability caps; |
| + const __u32 kCapsRequired = |
| + V4L2_CAP_VIDEO_CAPTURE_MPLANE | |
| + V4L2_CAP_VIDEO_OUTPUT_MPLANE | |
| + V4L2_CAP_STREAMING; |
| + errno = 0; |
|
Pawel Osciak
2012/11/06 19:26:17
You have quite a lot of those 6 lines everywhere.
sheu
2012/11/08 21:16:31
Done.
|
| + if (ioctl(mfc_fd_, VIDIOC_QUERYCAP, &caps) != 0) { |
| + DPLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } else if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| + DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| + ", caps check failed: 0x" << std::hex << caps.capabilities; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_QUERYCAP, &caps) != 0) { |
| + DPLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } else if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| + DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| + ", caps check failed: 0x" << std::hex << caps.capabilities; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + // Some random ioctls that Exynos requires. |
| + struct v4l2_control control; |
| + memset(&control, 0, sizeof(control)); |
| + control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8 |
| + control.value = 8; // Magic number from Samsung folks. |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_S_CTRL, &control) != 0) { |
| + DPLOG(ERROR) << "Initialize(): ioctl() failed: " |
| + "V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + if (!make_context_current_.Run()) { |
| + DLOG(ERROR) << "Initialize(): could not make context current"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + if (!CreateMfcInputBuffers()) { |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + // MFC output format has to be setup before streaming starts. |
| + struct v4l2_format format; |
| + memset(&format, 0, sizeof(format)); |
| + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16; |
| + if (ioctl(mfc_fd_, VIDIOC_S_FMT, &format) != 0) { |
| + DPLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_S_FMT"; |
| + return false; |
| + } |
| + |
| + if (!decoder_thread_.Start()) { |
| + DLOG(ERROR) << "Initialize(): decoder thread failed to start"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + |
| + SetDecoderState(kInitialized); |
| + |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyInitializeDone, client_)); |
| + return true; |
| +} |
|
Pawel Osciak
2012/11/06 19:26:17
If this function fails somewhere along the way, wi
sheu
2012/11/08 21:16:31
I assume that the client will take care of calling
Pawel Osciak
2012/11/13 18:10:39
Not exactly, in VAVDA, VaapiH264Decoder::Destroy()
sheu
2012/11/17 03:25:08
Ami mentioned that the destructor isn't called exp
Pawel Osciak
2012/11/20 18:36:00
My point is, this is very different from VAVDA. In
|
| + |
| +void ExynosVideoDecodeAccelerator::Decode( |
| + const media::BitstreamBuffer& bitstream_buffer) { |
| + DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() |
| + << ", size=" << bitstream_buffer.size(); |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + scoped_ptr<BitstreamBufferRecord> bitstream_record(new BitstreamBufferRecord( |
| + new base::SharedMemory(bitstream_buffer.handle(), true), |
| + bitstream_buffer.size(), bitstream_buffer.id())); |
| + if (!bitstream_record->shm->Map(bitstream_buffer.size())) { |
| + DLOG(ERROR) << "Decode(): could not map bitstream_buffer"; |
| + NOTIFY_ERROR(UNREADABLE_INPUT); |
| + return; |
| + } |
| + DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory(); |
| + |
| + // DecodeTask() will take care of running a DecodeBufferTask(). |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this), |
| + base::Passed(&bitstream_record))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::AssignPictureBuffers( |
| + const std::vector<media::PictureBuffer>& buffers) { |
| + DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size(); |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + if (!make_context_current_.Run()) { |
| + DLOG(ERROR) << "AssignPictureBuffers(): could not make context current"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + |
| + DCHECK_EQ(gsc_output_buffer_count_, static_cast<int>(buffers.size())); |
| + scoped_ptr<EGLImageKHRArrayRef> egl_images_ref( |
| + new EGLImageKHRArrayRef( |
| + egl_display_, new EGLImageKHR[buffers.size()], |
| + new int[buffers.size()], buffers.size())); |
| + for (int i = 0; i < egl_images_ref->egl_images_count; ++i) { |
| + egl_images_ref->egl_images[i] = EGL_NO_IMAGE_KHR; |
| + egl_images_ref->egl_image_fds[i] = -1; |
| + } |
| + |
| + const static EGLint kImageAttrs[] = { |
| + EGL_IMAGE_PRESERVED_KHR, 0, |
| + EGL_NONE, |
| + }; |
| + Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay(); |
| + glActiveTexture(GL_TEXTURE0); |
| + for (int i = 0; i < egl_images_ref->egl_images_count; ++i) { |
| + // Create the X pixmap and then create an EGLImageKHR from it, so we can |
| + // get dma_buf backing. |
| + Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0), |
| + buffers[i].size().width(), buffers[i].size().height(), 32); |
| + if (!pixmap) { |
| + DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id()); |
| + EGLImageKHR egl_image; |
| + egl_image = eglCreateImageKHR( |
| + egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR, |
| + (EGLClientBuffer)pixmap, kImageAttrs); |
| + // We can free the X pixmap immediately -- according to the |
| + // EGL_KHR_image_base spec, the backing storage does not go away until the |
| + // last referencing EGLImage is destroyed. |
| + XFreePixmap(x_display, pixmap); |
| + if (egl_image == EGL_NO_IMAGE_KHR) { |
| + DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + egl_images_ref->egl_images[i] = egl_image; |
| + int fd; |
| + if (!mali_egl_image_get_buffer_ext_phandle( |
| + egl_images_ref->egl_images[i], NULL, &fd)) { |
| + DLOG(ERROR) << "AssignPictureBuffers(): " |
| + << "could not get EGLImageKHR dmabuf fd"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + egl_images_ref->egl_image_fds[i] = fd; |
| + glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, egl_image); |
| + } |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask, |
| + base::Unretained(this), base::Passed(&egl_images_ref))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) { |
| + DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id; |
| + // Must be run on child thread, as we'll insert a sync in the EGL context. |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + if (!make_context_current_.Run()) { |
| + DLOG(ERROR) << "ReusePictureBuffer(): could not make context current"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + |
| + EGLSyncKHR egl_sync; |
| + egl_sync = egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
| + if (egl_sync == EGL_NO_SYNC_KHR) { |
| + DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + |
| + scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef( |
| + egl_display_, egl_sync)); |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::ReusePictureBufferTask, |
| + base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::Flush() { |
| + DVLOG(3) << "Flush()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::Reset() { |
| + DVLOG(3) << "Reset()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::Destroy() { |
| + DVLOG(3) << "Destroy()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + |
| + // We're destroying; cancel all callbacks. |
| + client_ptr_factory_.InvalidateWeakPtrs(); |
| + |
| + // If the decoder thread is running, destroy using posted task. |
| + if (decoder_thread_.message_loop_proxy()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)))) { |
| + // DestroyTask() will cause the decoder_thread_ to flush all tasks. |
| + decoder_thread_.Stop(); |
| + } else { |
| + // Otherwise, call the destroy task directly. |
| + DestroyTask(); |
| + } |
| + |
| + // Nuke the entire site from orbit -- it's the only way to be sure. |
| + if (gsc_fd_ != -1) { |
| + DestroyGscInputBuffers(); |
| + DestroyGscOutputBuffers(); |
| + close(gsc_fd_); |
| + gsc_fd_ = -1; |
| + } |
| + if (mfc_fd_ != -1) { |
| + DestroyMfcInputBuffers(); |
| + DestroyMfcOutputBuffers(); |
| + close(mfc_fd_); |
| + mfc_fd_ = -1; |
| + } |
| + |
| + // Decoder thread is stopped by now; safe to touch decoder_state_. |
| + // Set to kError state just in case. |
| + SetDecoderState(kError); |
| + |
| + delete this; |
| +} |
| + |
| +// static |
| +void ExynosVideoDecodeAccelerator::PreSandboxInitialization() { |
| + DVLOG(3) << "PreSandboxInitialization()"; |
| + errno = 0; |
| + libmali_handle = dlopen(EXYNOS_MALI_DRIVER, RTLD_LAZY | RTLD_LOCAL); |
| + if (libmali_handle == NULL) { |
| + DPLOG(ERROR) << "failed to dlopen() " << EXYNOS_MALI_DRIVER; |
| + } |
| +} |
| + |
| +// static |
| +bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() { |
| + DVLOG(3) << "PostSandboxInitialization()"; |
| + if (libmali_handle == NULL) { |
| + DLOG(ERROR) << "PostSandboxInitialization(): no " << EXYNOS_MALI_DRIVER |
| + " driver handle"; |
| + return false; |
| + } |
| + |
| + errno = 0; |
| + mali_egl_image_get_buffer_ext_phandle = |
| + reinterpret_cast<EGLBoolean(*)(EGLImageKHR, EGLint*, void*)>( |
| + dlsym(libmali_handle, "mali_egl_image_get_buffer_ext_phandle")); |
| + if (mali_egl_image_get_buffer_ext_phandle == NULL) { |
| + DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym()" |
| + " mali_egl_image_get_buffer_ext_phandle"; |
| + return false; |
| + } |
| + |
| + errno = 0; |
| + egl_create_sync_khr = |
| + reinterpret_cast<EGLSyncKHR(*)(EGLDisplay, EGLenum, const EGLint*)>( |
| + dlsym(libmali_handle, "eglCreateSyncKHR")); |
| + if (egl_create_sync_khr == NULL) { |
| + DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym()" |
| + " eglCreateSyncKHR"; |
| + return false; |
| + } |
| + |
| + errno = 0; |
| + egl_destroy_sync_khr = |
| + reinterpret_cast<EGLBoolean(*)(EGLDisplay, EGLSyncKHR)>( |
| + dlsym(libmali_handle, "eglDestroySyncKHR")); |
| + if (egl_destroy_sync_khr == NULL) { |
| + DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym()" |
| + " eglDestroySyncKHR"; |
| + return false; |
| + } |
| + |
| + errno = 0; |
| + egl_client_wait_sync_khr = |
| + reinterpret_cast<EGLint(*)(EGLDisplay, EGLSyncKHR, EGLint, EGLTimeKHR)>( |
| + dlsym(libmali_handle, "eglClientWaitSyncKHR")); |
| + if (egl_client_wait_sync_khr == NULL) { |
| + DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym()" |
| + " eglClientWaitSyncKHR"; |
| + return false; |
| + } |
| + |
| + return true; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DecodeTask( |
| + scoped_ptr<BitstreamBufferRecord> bitstream_record) { |
| + DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + |
| + if (decoder_state_ == kResetting) { |
| + DVLOG(2) << "DecodeTask(): early out: kResetting state"; |
| + return; |
| + } else if (decoder_state_ == kError) { |
| + DVLOG(2) << "DecodeTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + decoder_input_queue_.push_front( |
| + linked_ptr<BitstreamBufferRecord>(bitstream_record.release())); |
| + decoder_decode_buffer_tasks_scheduled_++; |
| + DecodeBufferTask(); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DecodeBufferTask() { |
| + DVLOG(3) << "DecodeBufferTask()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + |
| + decoder_decode_buffer_tasks_scheduled_--; |
| + |
| + if (decoder_state_ == kResetting) { |
| + DVLOG(2) << "DecodeBufferTask(): early out: kResetting state"; |
| + return; |
| + } else if (decoder_state_ == kError) { |
| + DVLOG(2) << "DecodeBufferTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + if (decoder_current_bitstream_buffer_ == NULL) { |
| + if (decoder_input_queue_.empty()) { |
| + // We're waiting for a new buffer -- exit without scheduling a new task. |
| + return; |
| + } |
| + // Setup to use the next buffer. |
| + decoder_current_bitstream_buffer_.reset( |
| + decoder_input_queue_.back().release()); |
| + decoder_input_queue_.pop_back(); |
| + DVLOG(3) << "DecodeBufferTask(): reading input_id=" |
| + << decoder_current_bitstream_buffer_->input_id |
| + << ", addr=" << decoder_current_bitstream_buffer_->shm->memory() |
| + << ", size=" << decoder_current_bitstream_buffer_->size; |
| + } |
| + bool decode_result = false; |
| + const void* data = decoder_current_bitstream_buffer_->shm->memory(); |
| + size_t size = decoder_current_bitstream_buffer_->size; |
|
Pawel Osciak
2012/11/06 19:26:17
Why not use shm->size() ?
sheu
2012/11/08 21:16:31
shm->size() doesn't exist. I could add it.
Pawel Osciak
2012/11/13 18:10:39
Oh hah, I was seeing things. Nevermind.
|
| + switch (decoder_state_) { |
| + case kInitialized: |
| + case kAfterReset: |
| + decode_result = DecodeBufferInitial(data, size); |
| + break; |
| + case kDecoding: |
| + decode_result = DecodeBufferContinue(data, size); |
| + break; |
| + default: |
| + NOTIFY_ERROR(ILLEGAL_STATE); |
| + return; |
| + } |
| + if (decoder_state_ == kError) { |
| + // Failed during decode. |
| + return; |
| + } else if (!decode_result) { |
| + // We might not have failed decode completely, but returned false due to |
| + // insufficient resources, etc. Retry this this buffer later; exit without |
| + // scheduling another task. |
| + return; |
| + } |
| + |
| + // Our current bitstream buffer is done; return it. |
| + int32 input_id = decoder_current_bitstream_buffer_->input_id; |
| + DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id; |
| + decoder_current_bitstream_buffer_.reset(NULL); |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyEndOfBitstreamBuffer, client_, input_id)); |
| + |
| + // If we're behind on tasks, schedule another one. |
| + if (decoder_decode_buffer_tasks_scheduled_ < |
| + static_cast<int>(decoder_input_queue_.size())) { |
| + decoder_decode_buffer_tasks_scheduled_++; |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::DecodeBufferTask, |
| + base::Unretained(this))); |
| + } |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::DecodeBufferInitial( |
| + const void* data, size_t size) { |
| + DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + DCHECK_NE(decoder_state_, kDecoding); |
| + DCHECK(!device_poll_thread_.IsRunning()); |
| + // Initial decode. We haven't been able to get output stream format info yet. |
| + // Get it, and start decoding. |
| + |
| + // Copy in and send to HW. |
| + if (!AppendToInputFrame(data, size) || !FlushInputFrame()) |
| + return false; |
| + |
| + // Recycle buffers. |
| + DequeueMfc(); |
| + |
| + // Check and see if we have format info yet. |
| + struct v4l2_format format; |
| + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) { |
| + if (errno == EINVAL) { |
| + // We will get EINVAL if we haven't seen sufficient stream to decode the |
| + // format. Return true and go to the next buffer. |
| + return true; |
| + } else { |
| + DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + |
| + // Run this initialization only on first startup. |
| + if (decoder_state_ == kInitialized) { |
| + DVLOG(3) << "DecodeBufferInitial(): running one-time initialization"; |
| + // Success! Setup our parameters. |
| + DCHECK_EQ(format.fmt.pix_mp.num_planes, 2); |
|
Pawel Osciak
2012/11/06 19:26:17
I'm wondering if maybe this should be more than a
sheu
2012/11/08 21:16:31
Done.
|
| + // We don't handle midstream resizes right now. |
| + if (!frame_buffer_size_.IsEmpty() && frame_buffer_size_ != |
| + gfx::Size(format.fmt.pix_mp.width, frame_buffer_size_.height())) { |
| + // We don't handle mistream resizes right now. |
| + NOTIMPLEMENTED(); |
| + NOTIFY_ERROR(UNREADABLE_INPUT); |
| + return false; |
| + } |
| + frame_buffer_size_.SetSize( |
| + format.fmt.pix_mp.width, format.fmt.pix_mp.height); |
| + mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage; |
| + mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage; |
| + mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat; |
| + |
| + // Create our other buffers. |
| + if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() || |
| + !CreateGscOutputBuffers()) { |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + |
| + // StartDevicePoll will raise the error if there is one. |
| + if (!StartDevicePoll()) { |
| + return false; |
| + } |
| + |
| + decoder_state_ = kDecoding; |
| + |
| + // This buffer contained the header that kicks off decoding of the video |
| + // stream. Return false here so it gets recycled into the next |
| + // DecodeBufferContinue() and the start of actual output stream. |
| + return false; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::DecodeBufferContinue( |
| + const void* data, size_t size) { |
| + DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_EQ(decoder_state_, kDecoding); |
| + |
| + // We've already setup our output stream parameters, so just keep on truckin'. |
| + return (AppendToInputFrame(data, size) && FlushInputFrame()); |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::AppendToInputFrame( |
| + const void* data, size_t size) { |
| + DVLOG(3) << "AppendToInputFrame()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + // We should have started streaming when we created MFC input buffers. |
| + DCHECK(mfc_input_streamon_); |
| + DCHECK(decoder_state_ == kInitialized || decoder_state_ == kDecoding); |
| + |
| + // Flush if we're too big |
| + if (decoder_current_input_buffer_ != -1) { |
| + MfcInputRecord& input_record = |
| + mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| + if (input_record.bytes_used + size > input_record.length) { |
| + if (!FlushInputFrame()) |
| + return false; |
| + decoder_current_input_buffer_ = -1; |
| + } |
| + } |
| + |
| + // Try to get an available input buffer |
| + if (decoder_current_input_buffer_ == -1) { |
| + if (mfc_free_input_buffers_.empty()) { |
| + // See if we can get more free buffers from HW |
| + DequeueMfc(); |
| + if (mfc_free_input_buffers_.empty()) { |
| + // Nope! |
| + DVLOG(2) << "AppendToInputFrame(): stalled for input buffers"; |
| + return false; |
| + } |
| + } |
| + decoder_current_input_buffer_ = mfc_free_input_buffers_.back(); |
| + mfc_free_input_buffers_.pop_back(); |
| + MfcInputRecord& input_record = |
| + mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| + DCHECK_EQ(input_record.bytes_used, 0); |
| + DCHECK_EQ(input_record.input_id, -1); |
| + DCHECK(decoder_current_bitstream_buffer_ != NULL); |
| + input_record.input_id = decoder_current_bitstream_buffer_->input_id; |
| + } |
| + |
| + // Copy in to the buffer. |
| + MfcInputRecord& input_record = |
| + mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| + if (size > input_record.length - input_record.bytes_used) { |
| + LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring"; |
| + NOTIFY_ERROR(UNREADABLE_INPUT); |
| + return false; |
| + } |
| + memcpy((char*)input_record.offset + input_record.bytes_used, data, size); |
| + input_record.bytes_used += size; |
| + |
| + return true; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::FlushInputFrame() { |
| + DVLOG(3) << "FlushInputFrame()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK(decoder_state_ == kInitialized || decoder_state_ == kDecoding); |
| + if (decoder_current_input_buffer_ == -1) |
| + return true; |
| + |
| + MfcInputRecord& input_record = |
| + mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| + if (input_record.bytes_used == 0) |
| + return true; |
| + |
| + // Queue it to MFC. |
| + mfc_input_ready_queue_.push_back(decoder_current_input_buffer_); |
| + decoder_frames_inflight_++; |
| + decoder_current_input_buffer_ = -1; |
| + DVLOG(3) << "FlushInputFrame(): submitting input_id=" |
| + << input_record.input_id; |
| + // Kick the MFC once since there's new available input for it. |
| + EnqueueMfc(); |
| + |
| + return (decoder_state_ != kError); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask( |
| + scoped_ptr<EGLImageKHRArrayRef> egl_images_ref) { |
| + DVLOG(3) << "AssignPictureBuffersTask()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + |
| + // We run AssignPictureBuffersTask even if we're in kResetting. |
| + if (decoder_state_ == kError) { |
| + DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + DCHECK_EQ(egl_images_ref->egl_images_count, |
| + static_cast<int>(gsc_output_buffer_map_.size())); |
| + for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| + // We should be blank right now. |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| + DCHECK_EQ(output_record.fd, -1); |
| + DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); |
| + DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| + DCHECK_EQ(output_record.picture_id, -1); |
| + output_record.fd = egl_images_ref->egl_image_fds[i]; |
| + output_record.egl_image = egl_images_ref->egl_images[i]; |
| + output_record.picture_id = i; |
| + |
| + // Take ownership of the EGLImage and fd. |
| + egl_images_ref->egl_images[i] = EGL_NO_IMAGE_KHR; |
| + egl_images_ref->egl_image_fds[i] = -1; |
| + // And add this buffer to the free list. |
| + gsc_free_output_buffers_.push_front(i); |
| + } |
| + |
| + // StartDevicePoll will raise the error if there is one. |
| + StartDevicePoll(); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::ServiceDeviceTask() { |
| + DVLOG(3) << "ServiceDeviceTask()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + DCHECK_NE(decoder_state_, kInitialized); |
| + DCHECK_NE(decoder_state_, kAfterReset); |
| + |
| + if (decoder_state_ == kResetting) { |
| + DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state"; |
| + return; |
| + } else if (decoder_state_ == kError) { |
| + DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + DequeueMfc(); |
| + DequeueGsc(); |
| + EnqueueMfc(); |
| + EnqueueGsc(); |
| + |
| + DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC[" |
| + << decoder_input_queue_.size() << "->" |
| + << mfc_input_ready_queue_.size() << "] => MFC[" |
| + << mfc_free_input_buffers_.size() << "/" |
| + << mfc_input_buffer_count_ << "->" |
| + << mfc_free_output_buffers_.size() << "/" |
| + << mfc_output_buffer_count_ << "] => " |
| + << mfc_output_gsc_input_queue_.size() << " => GSC[" |
| + << gsc_free_input_buffers_.size() << "/" |
| + << gsc_input_buffer_count_ << "->" |
| + << gsc_free_output_buffers_.size() << "/" |
| + << gsc_output_buffer_count_ << "] => VDA[" |
| + << decoder_frames_at_client_ << "]"; |
| + |
| + // If we're behind on decode, schedule another one. |
|
Pawel Osciak
2012/11/06 19:26:17
Extract l.878-885 into a function and use it here
sheu
2012/11/08 21:16:31
It's just two lines -- that seems kind of excessiv
Pawel Osciak
2012/11/13 18:10:39
I meant extracting 8 lines and having:
void Sched
sheu
2012/11/17 03:25:08
Fine :-P
|
| + if (decoder_decode_buffer_tasks_scheduled_ < |
| + static_cast<int>(decoder_input_queue_.size())) { |
| + decoder_decode_buffer_tasks_scheduled_++; |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::DecodeBufferTask, |
| + base::Unretained(this))); |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::EnqueueMfc() { |
| + DVLOG(3) << "EnqueueMfc()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + |
| + // Drain the pipe of completed decode buffers. |
| + struct v4l2_buffer qbuf; |
| + struct v4l2_plane qbuf_planes[2]; |
| + while (!mfc_input_ready_queue_.empty()) { |
| + // Enqueue the MFC input (VIDEO_OUTPUT) buffer. |
| + int buffer = mfc_input_ready_queue_.back(); |
| + MfcInputRecord& input_record = mfc_input_buffer_map_[buffer]; |
| + DCHECK(!input_record.at_device); |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + qbuf.index = buffer; |
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + qbuf.timestamp.tv_sec = input_record.input_id; |
| + qbuf.memory = V4L2_MEMORY_MMAP; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.m.planes[0].bytesused = input_record.bytes_used; |
| + qbuf.length = 1; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "EnqueueMfc(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + input_record.at_device = true; |
| + mfc_input_ready_queue_.pop_back(); |
| + mfc_input_buffer_queued_count_++; |
| + DVLOG(3) << "EnqueueMfc(): enqueued input_id=" << input_record.input_id; |
| + if (!mfc_input_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_STREAMON, &type) != 0) { |
| + DPLOG(ERROR) << "EnqueueMfc(): ioctl() failed: VIDIOC_STREAMON"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + mfc_input_streamon_ = true; |
| + } |
| + } |
| + |
| + // Enqueue all the MFC output (VIDEO_CAPTURE) buffers we can. |
| + while (!mfc_free_output_buffers_.empty()) { |
| + int buffer = mfc_free_output_buffers_.back(); |
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer]; |
| + DCHECK(!output_record.at_device); |
| + DCHECK_EQ(output_record.input_id, -1); |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + qbuf.index = buffer; |
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + qbuf.memory = V4L2_MEMORY_MMAP; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.length = 2; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "EnqueueMfc(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + output_record.at_device = true; |
| + mfc_free_output_buffers_.pop_back(); |
| + if (!mfc_output_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_STREAMON, &type) != 0) { |
| + DPLOG(ERROR) << "EnqueueMfc(): ioctl() failed: VIDIOC_STREAMON"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + mfc_output_streamon_ = true; |
| + } |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DequeueMfc() { |
| + DVLOG(3) << "DequeueMfc()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + DCHECK_NE(decoder_state_, kInitialized); |
| + |
| + // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| + // list. Note that if we ever run completely dry of input buffers on the MFC |
| + // device, epoll() will return EPOLLERR and our DevicePollLoop() will exit |
| + // early. Work around this by never _completely_ draining the MFC input |
| + // queue. |
| + struct v4l2_buffer dqbuf; |
| + struct v4l2_plane planes[2]; |
| + DCHECK_EQ((decoder_current_input_buffer_ != -1 ? 1 : 0) + |
| + static_cast<int>(mfc_input_ready_queue_.size()) + |
| + static_cast<int>(mfc_free_input_buffers_.size()) + |
| + mfc_input_buffer_queued_count_, mfc_input_buffer_count_); |
| + while (mfc_input_buffer_queued_count_ > 1) { |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_MMAP; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| + if (errno == EAGAIN) { |
| + // Done all that we could. |
|
Pawel Osciak
2012/11/06 19:26:17
s/Done all that we could./No buffers to dequeue./
sheu
2012/11/08 21:16:31
Done.
|
| + break; |
| + } else if (errno == EINVAL) { |
|
Pawel Osciak
2012/11/06 19:26:17
s/errno == EINVAL/errno == EINVAL && !mfc_output_s
sheu
2012/11/08 21:16:31
Done.
|
| + // We're not streaming this queue; skip. |
| + DVLOG(2) << "DequeueMfc(): VIDEO_OUTPUT not streaming, skipping"; |
| + break; |
| + } else { |
| + DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + } |
| + MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; |
| + DCHECK(input_record.at_device); |
| + input_record.at_device = false; |
| + input_record.bytes_used = 0; |
| + input_record.input_id = -1; |
| + mfc_free_input_buffers_.push_back(dqbuf.index); |
| + mfc_input_buffer_queued_count_--; |
| + } |
| + |
| + // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the |
| + // completed queue. |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
| + memset(planes, 0, sizeof(planes)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_MMAP; |
| + dqbuf.m.planes = planes; |
| + dqbuf.length = 2; |
| + errno = 0; |
| + while (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) == 0) { |
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; |
| + DCHECK(output_record.at_device); |
| + output_record.at_device = false; |
| + output_record.input_id = dqbuf.timestamp.tv_sec; |
| + output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused; |
| + output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused; |
| + DVLOG(3) << "DequeueMfc(): dequeued input_id=" << dqbuf.timestamp.tv_sec; |
| + mfc_output_gsc_input_queue_.push_front(dqbuf.index); |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
|
Pawel Osciak
2012/11/06 19:26:17
Perhaps refactor the while loop to be:
while(1) {
sheu
2012/11/08 21:16:31
Done.
|
| + memset(planes, 0, sizeof(planes)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_MMAP; |
| + dqbuf.m.planes = planes; |
| + dqbuf.length = 2; |
| + errno = 0; |
| + } |
| + if (errno == EINVAL) { |
| + // We're not streaming this queue; skip. |
| + DVLOG(2) << "DequeueMfc(): VIDEO_CAPTURE not streaming, skipping"; |
| + } else if (errno != EAGAIN) { |
| + DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::EnqueueGsc() { |
| + DVLOG(3) << "EnqueueGsc()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + DCHECK_NE(decoder_state_, kInitialized); |
| + DCHECK_NE(decoder_state_, kAfterReset); |
| + |
| + // Drain the pipe of completed MFC output buffers. |
| + struct v4l2_buffer qbuf; |
| + struct v4l2_plane qbuf_planes[2]; |
| + DCHECK(gsc_output_streamon_); |
| + while (!mfc_output_gsc_input_queue_.empty()) { |
| + if (gsc_free_input_buffers_.empty()) |
| + break; |
| + // Bug workaround: GSC is liable to race conditions if more than one |
|
Pawel Osciak
2012/11/06 19:26:17
This is bug-worthy for SLSI I'd say...
sheu
2012/11/08 21:16:31
They are aware of this bug, and are as usual not d
|
| + // buffer is simultaneously queued. |
| + if (gsc_output_buffer_queued_count_ > 0) |
| + break; |
| + if (gsc_output_buffer_prepared_count_ == 0) { |
| + // Enqueue a GSC output (VIDEO_CAPTURE) buffer for the incoming GSC input |
| + // buffer. |
| + if (gsc_free_output_buffers_.empty()) |
| + break; |
| + int buffer = gsc_free_output_buffers_.back(); |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[buffer]; |
| + DCHECK(!output_record.at_device); |
| + DCHECK(!output_record.at_client); |
| + if (output_record.egl_sync != EGL_NO_SYNC_KHR) { |
| + // If we have to wait for completion, wait. Note that |
| + // gsc_free_output_buffers_ is a FIFO queue. |
| + egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0, |
| + EGL_FOREVER_KHR); |
| + egl_destroy_sync_khr(egl_display_, output_record.egl_sync); |
|
Pawel Osciak
2012/11/06 19:26:17
Is it valid to do this on decoder thread, i.e. a d
sheu
2012/11/08 21:16:31
Yes.
|
| + output_record.egl_sync = EGL_NO_SYNC_KHR; |
| + } |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + qbuf.index = buffer; |
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + qbuf.memory = V4L2_MEMORY_DMABUF; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.m.planes[0].m.fd = output_record.fd; |
| + qbuf.length = 1; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "EnqueueGsc(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + output_record.at_device = true; |
| + gsc_free_output_buffers_.pop_back(); |
| + gsc_output_buffer_prepared_count_++; |
| + } |
| + // Now enqueue the GSC input (VIDEO_OUTPUT) buffer for the complete MFC |
| + // output buffer. We defer requeuing the MFC output buffer to its free |
| + // list, as the GSC input will be using its data. |
| + int mfc_buffer, gsc_buffer; |
| + mfc_buffer = mfc_output_gsc_input_queue_.back(); |
| + gsc_buffer = gsc_free_input_buffers_.back(); |
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; |
| + DCHECK(!output_record.at_device); |
| + GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; |
| + DCHECK(!input_record.at_device); |
| + DCHECK_EQ(input_record.mfc_output, -1); |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + qbuf.index = gsc_buffer; |
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + qbuf.timestamp.tv_sec = output_record.input_id; |
| + qbuf.memory = V4L2_MEMORY_USERPTR; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.m.planes[0].bytesused = output_record.bytes_used[0]; |
| + qbuf.m.planes[0].length = mfc_output_buffer_size_[0]; |
| + qbuf.m.planes[0].m.userptr = (unsigned long)output_record.offset[0]; |
| + qbuf.m.planes[1].bytesused = output_record.bytes_used[1]; |
| + qbuf.m.planes[1].length = mfc_output_buffer_size_[1]; |
| + qbuf.m.planes[1].m.userptr = (unsigned long)output_record.offset[1]; |
| + qbuf.length = 2; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "EnqueueGsc(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + input_record.at_device = true; |
| + input_record.mfc_output = mfc_buffer; |
| + output_record.bytes_used[0] = 0; |
| + output_record.bytes_used[1] = 0; |
| + mfc_output_gsc_input_queue_.pop_back(); |
| + gsc_free_input_buffers_.pop_back(); |
| + gsc_output_buffer_prepared_count_--; |
| + gsc_output_buffer_queued_count_++; |
| + DVLOG(3) << "EnqueueGsc(): enqueued input_id=" << output_record.input_id; |
| + if (!gsc_input_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_STREAMON, &type) != 0) { |
| + DPLOG(ERROR) << "EnqueueGsc(): ioctl() failed: VIDIOC_STREAMON"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + gsc_input_streamon_ = true; |
| + } |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DequeueGsc() { |
| + DVLOG(3) << "DequeueGsc()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + DCHECK_NE(decoder_state_, kUninitialized); |
| + DCHECK_NE(decoder_state_, kInitialized); |
| + DCHECK_NE(decoder_state_, kAfterReset); |
| + |
| + // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| + // list. Also recycle the corresponding MFC output buffers at this time. |
| + // Note that if we ever run completely dry of input buffers on the GSC device, |
| + // epoll() will return EPOLLERR and our DevicePollLoop() will exit early. |
| + // Work around this by never _completely_ draining the GSC input queue. |
| + struct v4l2_buffer dqbuf; |
| + while (gsc_input_buffer_count_ > |
| + static_cast<int>(gsc_free_input_buffers_.size()) + 1) { |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| + if (errno == EAGAIN) { |
| + // Done all that we could. |
| + break; |
| + } else if (errno == EINVAL) { |
| + // We're not streaming this queue; skip. |
| + DVLOG(2) << "DequeueGsc(): VIDEO_OUTPUT not streaming, skipping"; |
| + break; |
| + } else { |
| + DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + } |
| + GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; |
| + DCHECK(input_record.at_device); |
| + input_record.at_device = false; |
| + mfc_output_buffer_map_[input_record.mfc_output].input_id = -1; |
| + mfc_free_output_buffers_.push_back(input_record.mfc_output); |
| + input_record.mfc_output = -1; |
| + gsc_free_input_buffers_.push_back(dqbuf.index); |
| + } |
| + |
| + // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to |
| + // the client. Don't recycle to its free list yet -- we can't do that until |
| + // ReusePictureBuffer() returns it to us. |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + while (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) == 0) { |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; |
| + DCHECK(output_record.at_device); |
| + DCHECK(!output_record.at_client); |
| + DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| + output_record.at_device = false; |
| + output_record.at_client = true; |
| + gsc_output_buffer_queued_count_--; |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::PictureReady, client_, media::Picture( |
| + output_record.picture_id, dqbuf.timestamp.tv_sec))); |
| + decoder_frames_inflight_--; |
| + decoder_frames_at_client_++; |
| + DVLOG(1) << "DequeueGsc(): dequeued input_id=" << dqbuf.timestamp.tv_sec |
| + << " as picture_id=" << output_record.picture_id; |
| + if (decoder_frames_inflight_ == 0 && decoder_flush_notify_requested_) { |
| + // We were asked for a flush notification, so let's do it. |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyFlushDone, client_)); |
| + } |
| + memset(&dqbuf, 0, sizeof(dqbuf)); |
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + dqbuf.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + } |
| + if (errno == EINVAL) { |
| + // We're not streaming this queue; skip. |
| + DVLOG(2) << "DequeueGsc(): VIDEO_CAPTURE not streaming, skipping"; |
| + } else if (errno != EAGAIN) { |
| + DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::ReusePictureBufferTask( |
| + int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) { |
| + DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id=" |
| + << picture_buffer_id; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + |
| + // We run ReusePictureBufferTask even if we're in kResetting. |
| + if (decoder_state_ == kError) { |
| + DVLOG(2) << "ReusePictureBufferTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + size_t index; |
| + for (index = 0; index < gsc_output_buffer_map_.size(); ++index) |
| + if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id) |
| + break; |
| + |
| + if (index >= gsc_output_buffer_map_.size()) { |
| + DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found"; |
| + NOTIFY_ERROR(INVALID_ARGUMENT); |
| + return; |
| + } |
| + |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[index]; |
| + DCHECK(!output_record.at_device); |
| + DCHECK(output_record.at_client); |
| + output_record.at_client = false; |
| + output_record.egl_sync = egl_sync_ref->egl_sync; |
| + gsc_free_output_buffers_.push_front(index); |
| + decoder_frames_at_client_--; |
| + // Take ownership of the EGLSync. |
| + egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; |
| + // We got a buffer back, so kick the GSC. |
| + EnqueueGsc(); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::FlushTask() { |
| + DVLOG(3) << "FlushTask()"; |
| + // Flush the currently-building frame. |
| + |
| + if (decoder_state_ == kResetting) { |
| + DVLOG(2) << "FlushTask(): early out: kResetting state"; |
| + return; |
| + } else if (decoder_state_ == kError) { |
| + DVLOG(2) << "FlushTask(): early out: kError state"; |
| + return; |
| + } |
| + |
| + FlushInputFrame(); |
| + |
| + if (decoder_frames_inflight_ == 0) { |
| + // If we don't have anything actually queued, we can notify immediatey. |
|
Pawel Osciak
2012/11/06 19:26:17
s/immediatey/immediately
sheu
2012/11/08 21:16:31
Done.
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyFlushDone, client_)); |
| + } else { |
| + // We'll flag that we want a flush-finished notification, and just return. |
| + decoder_flush_notify_requested_ = true; |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::ResetTask() { |
| + DVLOG(3) << "ResetTask()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + // We stop streaming, but we _don't_ destroy our buffers. |
| + if (!StopDevicePoll()) |
| + return; |
| + |
| + if (decoder_current_bitstream_buffer_ != NULL) { |
| + int input_id; |
| + input_id = decoder_current_bitstream_buffer_->input_id; |
| + decoder_current_bitstream_buffer_.reset(NULL); |
|
Pawel Osciak
2012/11/06 19:26:17
Enough to say reset()
sheu
2012/11/08 21:16:31
Done.
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyEndOfBitstreamBuffer, client_, input_id)); |
| + } |
| + while (!decoder_input_queue_.empty()) { |
| + int input_id; |
| + scoped_ptr<BitstreamBufferRecord> |
| + bitstream_record(decoder_input_queue_.back().release()); |
| + decoder_input_queue_.pop_back(); |
| + input_id = bitstream_record->input_id; |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyEndOfBitstreamBuffer, client_, input_id)); |
| + } |
| + |
| + decoder_current_input_buffer_ = -1; |
| + decoder_decode_buffer_tasks_scheduled_ = 0; |
| + decoder_frames_inflight_ = 0; |
| + decoder_flush_notify_requested_ = false; |
| + |
| + // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening |
| + // jobs will early-out in the kResetting state. |
| + decoder_state_ = kResetting; |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this))); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::ResetDoneTask() { |
| + DVLOG(3) << "ResetDoneTask()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + |
| + // Jobs drained, we're finished resetting. |
| + DCHECK_EQ(decoder_state_, kResetting); |
| + decoder_state_ = kAfterReset; |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::NotifyResetDone, client_)); |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DestroyTask() { |
| + DVLOG(3) << "DestroyTask()"; |
| + |
| + // DestroyTask() should run regardless of decoder_state_. |
| + |
| + // Stop streaming and the device_poll_thread_. |
| + StopDevicePoll(); |
| + |
| + decoder_current_bitstream_buffer_.reset(NULL); |
| + decoder_current_input_buffer_ = -1; |
| + decoder_decode_buffer_tasks_scheduled_ = 0; |
| + decoder_frames_inflight_ = 0; |
| + decoder_frames_at_client_ = 0; |
| + decoder_flush_notify_requested_ = false; |
| + decoder_input_queue_.clear(); |
| + |
| + // Set our state to kError. This will cause all subsequent tasks to |
| + // early-exit. |
| + decoder_state_ = kError; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::StartDevicePoll() { |
| + DVLOG(3) << "StartDevicePoll()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + |
| + // Early-out if we're already running. |
| + if (device_poll_thread_.IsRunning()) { |
| + DVLOG(2) << "StartDevicePoll(): early out: " |
| + << "device poll thread already running"; |
| + return true; |
| + } |
| + |
| + // At least one of the OUTPUT or CAPTURE queues for each of MFC and GSC must |
| + // be in STREAMON state for epoll() not to return EPOLERR. So: |
| + // * for MFC, we'll start the CAPTURE queue |
| + // * for GSC, we'll start the CAPTURE queue |
| + // STREAMON requires, unfortunately, that we have buffers already queued. |
| + // We'll need to have the buffers available to queue. If they aren't |
| + // available yet, this is not an error; for example, GSC output buffers won't |
| + // be available until AssignPictureBuffers() is called. |
| + if ((!mfc_output_streamon_ && mfc_free_output_buffers_.empty()) || |
| + gsc_free_output_buffers_.empty()) { |
| + DVLOG(2) << "StartDevicePoll(): early out: output buffers unavailable"; |
| + return true; |
| + } |
| + |
| + // The MFC output queue may already have been started when we started |
| + // enqueuing MFC input buffers. |
| + struct v4l2_buffer qbuf; |
| + struct v4l2_plane qbuf_planes[2]; |
| + if (!mfc_output_streamon_) { |
| + // Queue and start the MFC CAPTURE queue |
| + int buffer; |
| + __u32 type; |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + buffer = mfc_free_output_buffers_.back(); |
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer]; |
| + DCHECK(!output_record.at_device); |
| + DCHECK_EQ(output_record.input_id, -1); |
| + qbuf.index = buffer; |
|
Pawel Osciak
2012/11/06 19:26:17
There is some code duplication here, this is basic
|
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + qbuf.memory = V4L2_MEMORY_MMAP; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.length = 2; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "StartDevicePoll(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + output_record.at_device = true; |
| + mfc_free_output_buffers_.pop_back(); |
| + type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_STREAMON, &type) != 0) { |
|
Pawel Osciak
2012/11/06 19:26:17
Same here, streamon-related code is duplicated in
sheu
2012/11/08 21:16:31
Replaced with the IOCTL macro, and it's much small
|
| + DPLOG(ERROR) << "StartDevicePoll(): ioctl() failed: VIDIOC_STREAMON"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + mfc_output_streamon_ = true; |
| + } |
| + |
| + // GSC output should not have been started before we received output buffers. |
| + DCHECK(!gsc_output_streamon_); |
| + { |
| + // Queue and start the GSC CAPTURE queue |
| + int buffer; |
| + __u32 type; |
| + memset(&qbuf, 0, sizeof(qbuf)); |
| + memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| + buffer = gsc_free_output_buffers_.back(); |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[buffer]; |
| + DCHECK(!output_record.at_device); |
| + DCHECK(!output_record.at_client); |
| + qbuf.index = buffer; |
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + qbuf.memory = V4L2_MEMORY_DMABUF; |
| + qbuf.m.planes = qbuf_planes; |
| + qbuf.m.planes[0].m.fd = gsc_output_buffer_map_[buffer].fd; |
| + qbuf.length = 1; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_QBUF, &qbuf) != 0) { |
| + DPLOG(ERROR) << "StartDevicePoll(): ioctl() failed: VIDIOC_QBUF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + output_record.at_device = true; |
| + gsc_free_output_buffers_.pop_back(); |
| + gsc_output_buffer_prepared_count_++; |
| + type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_STREAMON, &type) != 0) { |
| + DPLOG(ERROR) << "StartDevicePoll(): ioctl() failed: VIDIOC_STREAMON"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + gsc_output_streamon_ = true; |
| + } |
| + |
| + if (!device_poll_thread_.Start()) { |
| + DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::DevicePollLoop, base::Unretained(this))); |
| + |
| + return true; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::StopDevicePoll() { |
| + DVLOG(3) << "StopDevicePoll()"; |
| + DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| + |
| + // Stop streaming. |
| + if (mfc_input_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
|
Pawel Osciak
2012/11/06 19:26:17
Wrap all ifs into DeviceStreamoff(fd, type) ?
sheu
2012/11/08 21:16:31
Replaced with the IOCTL macro, and it's much small
|
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_STREAMOFF, &type) != 0) { |
| + DPLOG(ERROR) << "StopDevicePoll(): ioctl() failed: VIDIOC_STREAMOFF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + mfc_input_streamon_ = false; |
| + if (mfc_output_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(mfc_fd_, VIDIOC_STREAMOFF, &type) != 0) { |
| + DPLOG(ERROR) << "StopDevicePoll(): ioctl() failed: VIDIOC_STREAMOFF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + mfc_output_streamon_ = false; |
| + if (gsc_input_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_STREAMOFF, &type) != 0) { |
| + DPLOG(ERROR) << "StopDevicePoll(): ioctl() failed: VIDIOC_STREAMOFF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + gsc_input_streamon_ = false; |
| + if (gsc_output_streamon_) { |
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + errno = 0; |
| + if (ioctl(gsc_fd_, VIDIOC_STREAMOFF, &type) != 0) { |
| + DPLOG(ERROR) << "StopDevicePoll(): ioctl() failed: VIDIOC_STREAMOFF"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return false; |
| + } |
| + } |
| + gsc_output_streamon_ = false; |
| + |
| + // Stopping streaming should cause DevicePollLoop() to exit. Join the thread. |
| + device_poll_thread_.Stop(); |
| + |
| + // Reset all our accounting info. |
| + mfc_input_ready_queue_.clear(); |
| + mfc_free_input_buffers_.clear(); |
| + DCHECK_EQ(mfc_input_buffer_count_, |
| + static_cast<int>(mfc_input_buffer_map_.size())); |
| + for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| + mfc_free_input_buffers_.push_back(i); |
| + mfc_input_buffer_map_[i].at_device = false; |
| + mfc_input_buffer_map_[i].bytes_used = 0; |
| + mfc_input_buffer_map_[i].input_id = -1; |
| + } |
| + mfc_input_buffer_queued_count_ = 0; |
| + mfc_free_output_buffers_.clear(); |
| + DCHECK_EQ(mfc_output_buffer_count_, |
| + static_cast<int>(mfc_output_buffer_map_.size())); |
| + for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| + mfc_free_output_buffers_.push_back(i); |
| + mfc_output_buffer_map_[i].at_device = false; |
| + mfc_output_buffer_map_[i].input_id = -1; |
| + } |
| + mfc_output_gsc_input_queue_.clear(); |
| + gsc_free_input_buffers_.clear(); |
| + DCHECK_EQ(gsc_input_buffer_count_, |
| + static_cast<int>(gsc_input_buffer_map_.size())); |
| + for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| + gsc_free_input_buffers_.push_back(i); |
| + gsc_input_buffer_map_[i].at_device = false; |
| + gsc_input_buffer_map_[i].mfc_output = -1; |
| + } |
| + gsc_free_output_buffers_.clear(); |
| + DCHECK_EQ(gsc_output_buffer_count_, |
| + static_cast<int>(gsc_output_buffer_map_.size())); |
| + for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| + // Only mark those free that aren't being held by the VDA. |
| + if (!gsc_output_buffer_map_[i].at_client) { |
| + gsc_free_output_buffers_.push_back(i); |
| + gsc_output_buffer_map_[i].at_device = false; |
| + } |
| + if (gsc_output_buffer_map_[i].egl_sync != EGL_NO_SYNC_KHR) { |
| + egl_destroy_sync_khr(egl_display_, gsc_output_buffer_map_[i].egl_sync); |
| + gsc_output_buffer_map_[i].egl_sync = EGL_NO_SYNC_KHR; |
| + } |
| + } |
| + gsc_output_buffer_prepared_count_ = 0; |
| + gsc_output_buffer_queued_count_ = 0; |
| + |
| + DVLOG(3) << "StopDevicePoll(): device poll stopped"; |
| + return true; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DevicePollLoop() { |
| + DVLOG(3) << "DevicePollLoop()"; |
| + DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current()); |
| + // This routine just polls on the V4L2 devices, and notifies the |
| + // decoder_thread_ when processing needs to occur. The main loop will |
| + // terminate when we return an EPOLLERR to epoll() on the device file |
| + // descriptors, which should occur when the devices are sent VIDIOC_STREAMOFF |
| + // on their queues. |
| + |
| + int ret; |
| + int epoll_fd = -1; |
| + file_util::ScopedFD epoll_fd_closer(&epoll_fd); |
| + struct epoll_event event; |
| + errno = 0; |
| + epoll_fd = epoll_create(2); |
| + if (epoll_fd == -1) { |
| + DPLOG(ERROR) << "DevicePollLoop(): epoll() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + event.events = EPOLLIN | EPOLLOUT | EPOLLERR | EPOLLET; |
| + event.data.fd = mfc_fd_; |
| + errno = 0; |
| + ret = epoll_ctl(epoll_fd, EPOLL_CTL_ADD, mfc_fd_, &event); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "DevicePollLoop(): epoll_ctl() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + event.events = EPOLLIN | EPOLLOUT | EPOLLERR | EPOLLET; |
| + event.data.fd = gsc_fd_; |
| + errno = 0; |
| + ret = epoll_ctl(epoll_fd, EPOLL_CTL_ADD, gsc_fd_, &event); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "DevicePollLoop(): epoll_ctl() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + |
| + for (;;) { |
| + // We epoll() and wait for more interesting things to happen. |
| + // If we get a VIDIOC_STREAMOFF to the device, epoll() will return EPOLLERR, |
| + // and we know to exit the loop that way. |
| + DVLOG(3) << "DevicePollLoop(): epoll()"; |
| + struct epoll_event event; |
| + do { |
| + errno = 0; |
| + ret = epoll_wait(epoll_fd, &event, 1, -1); |
| + } while (ret < 1 && errno == EINTR); |
| + if (ret == -1) { |
| + DPLOG(ERROR) << "DevicePollLoop(): epoll_wait() failed"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + if (event.data.fd != mfc_fd_ && event.data.fd != gsc_fd_) { |
| + DLOG(ERROR) << "DevicePollLoop(): epoll() returned unknown fd"; |
| + NOTIFY_ERROR(PLATFORM_FAILURE); |
| + return; |
| + } |
| + if ((event.events & EPOLLERR) != 0) { |
| + DVLOG(2) << "DevicePollLoop(): epoll() returned EPOLERR for " |
| + << (event.data.fd == mfc_fd_ ? "mfc_fd_" : "gsc_fd_"); |
| + // Not necessarily an error. |
| + return; |
| + } |
| + decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::ServiceDeviceTask, |
| + base::Unretained(this))); |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::NotifyError(Error error) { |
| + DVLOG(2) << "NotifyError()"; |
| + |
| + if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error)); |
| + return; |
| + } |
| + |
| + if (client_) { |
| + client_->NotifyError(error); |
| + client_ptr_factory_.InvalidateWeakPtrs(); |
| + } |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::SetDecoderState(State state) { |
| + DVLOG(3) << "SetDecoderState()"; |
| + |
| + // We can touch decoder_state_ only if this is the decoder thread or the |
| + // decoder thread isn't running. |
| + if (!decoder_thread_.message_loop_proxy()->BelongsToCurrentThread()) { |
| + if (!decoder_thread_.message_loop_proxy()->PostTask(FROM_HERE, base::Bind( |
| + &ExynosVideoDecodeAccelerator::SetDecoderState, |
|
Pawel Osciak
2012/11/06 19:26:17
Indent error?
sheu
2012/11/08 21:16:31
Done.
|
| + base::Unretained(this), state))) { |
| + decoder_state_ = state; |
| + } |
| + } else { |
| + decoder_state_ = state; |
| + } |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() { |
| + DVLOG(3) << "CreateMfcInputBuffers()"; |
| + // We always run this as we prepare to initialize. |
| + DCHECK_EQ(decoder_state_, kUninitialized); |
| + DCHECK(!mfc_input_streamon_); |
| + DCHECK_EQ(mfc_input_buffer_count_, 0); |
| + |
| + __u32 pixelformat = 0; |
| + if (video_profile_ >= media::H264PROFILE_MIN && |
| + video_profile_ <= media::H264PROFILE_MAX) { |
| + pixelformat = V4L2_PIX_FMT_H264; |
| + } else if (video_profile_ >= media::VP8PROFILE_MIN && |
| + video_profile_ <= media::VP8PROFILE_MAX) { |
| + pixelformat = V4L2_PIX_FMT_VP8; |
| + } else { |
| + NOTREACHED(); |
| + } |
| + |
| + int ret; |
| + struct v4l2_format format; |
| + memset(&format, 0, sizeof(format)); |
| + format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + format.fmt.pix_mp.pixelformat = pixelformat; |
| + format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize; |
| + format.fmt.pix_mp.num_planes = 1; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_S_FMT, &format); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcInputBuffers(): ioctl() failed: VIDIOC_S_FMT"; |
| + return false; |
| + } |
| + |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = kMfcInputBufferCount; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_MMAP; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + return false; |
| + } |
| + mfc_input_buffer_count_ = reqbufs.count; |
| + mfc_input_buffer_map_.resize(mfc_input_buffer_count_); |
| + for (int i = 0; i < mfc_input_buffer_count_; ++i) { |
|
Pawel Osciak
2012/11/06 19:26:17
This whole sequence could be extracted into a func
sheu
2012/11/08 21:16:31
The *_buffer_map_ map that we're updating is diffe
Pawel Osciak
2012/11/13 18:10:39
I'd really suggest making them the same class. The
|
| + mfc_free_input_buffers_.push_back(i); |
| + |
| + // Query for the MEMORY_MMAP pointer. |
| + struct v4l2_plane planes[1]; |
| + struct v4l2_buffer buffer; |
| + memset(&buffer, 0, sizeof(buffer)); |
| + memset(planes, 0, sizeof(planes)); |
| + buffer.index = i; |
| + buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + buffer.memory = V4L2_MEMORY_MMAP; |
| + buffer.m.planes = planes; |
| + buffer.length = 1; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_QUERYBUF, &buffer); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcInputBuffers(): " |
| + << "ioctl() failed: VIDIOC_QUERYBUF"; |
| + return false; |
| + } |
| + errno = 0; |
| + void* offset = mmap(NULL, buffer.m.planes[0].length, |
|
Pawel Osciak
2012/11/06 19:26:17
Nit: calling this an "offset" is a bit misleading,
sheu
2012/11/08 21:16:31
Done.
|
| + PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, |
| + buffer.m.planes[0].m.mem_offset); |
| + if (offset == MAP_FAILED) { |
| + DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; |
| + return false; |
| + } |
| + mfc_input_buffer_map_[i].offset = offset; |
| + mfc_input_buffer_map_[i].length = buffer.m.planes[0].length; |
| + } |
| + |
| + return true; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() { |
| + DVLOG(3) << "CreateMfcOutputBuffers()"; |
| + DCHECK_EQ(decoder_state_, kInitialized); |
| + DCHECK(!mfc_output_streamon_); |
| + DCHECK_EQ(mfc_output_buffer_count_, 0); |
| + |
| + // Number of MFC output buffers we need. |
| + int ret; |
| + struct v4l2_control ctrl; |
| + memset(&ctrl, 0, sizeof(ctrl)); |
| + ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_G_CTRL, &ctrl); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcOutputBuffers(): ioctl() failed: VIDIOC_G_CTRL"; |
| + return false; |
| + } |
| + |
| + // Output format setup in Initialize(). |
| + |
| + // Allocate the output buffers. |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_MMAP; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + return false; |
| + } |
| + |
| + // Fill our free-buffers list, and create DMABUFs from them. |
| + mfc_output_buffer_count_ = reqbufs.count; |
| + mfc_output_buffer_map_.resize(mfc_output_buffer_count_); |
| + for (int i = 0; i < mfc_output_buffer_count_; ++i) { |
| + mfc_free_output_buffers_.push_back(i); |
| + |
| + // Query for the MEMORY_MMAP pointer. |
| + struct v4l2_plane planes[2]; |
| + struct v4l2_buffer buffer; |
| + memset(&buffer, 0, sizeof(buffer)); |
| + memset(planes, 0, sizeof(planes)); |
| + buffer.index = i; |
| + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + buffer.memory = V4L2_MEMORY_MMAP; |
| + buffer.m.planes = planes; |
| + buffer.length = 2; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_QUERYBUF, &buffer); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateMfcOutputBuffers(): " |
| + << "ioctl() failed: VIDIOC_QUERYBUF"; |
| + return false; |
| + } |
| + |
| + // Get their user memory for GSC input. |
| + for (int j = 0; j < 2; ++j) { |
| + errno = 0; |
| + void* offset = mmap(NULL, buffer.m.planes[j].length, |
| + PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, |
| + buffer.m.planes[j].m.mem_offset); |
| + if (offset == MAP_FAILED) { |
| + DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; |
| + return false; |
| + } |
| + mfc_output_buffer_map_[i].offset[j] = offset; |
| + mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length; |
| + } |
| + } |
| + |
| + return true; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() { |
| + DVLOG(3) << "CreateGscInputBuffers()"; |
| + DCHECK_EQ(decoder_state_, kInitialized); |
| + DCHECK(!gsc_input_streamon_); |
| + DCHECK_EQ(gsc_input_buffer_count_, 0); |
| + |
| + int ret; |
| + struct v4l2_format format; |
| + memset(&format, 0, sizeof(format)); |
| + format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + format.fmt.pix_mp.width = frame_buffer_size_.width(); |
| + format.fmt.pix_mp.height = frame_buffer_size_.height(); |
| + DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16); |
| + format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_; |
| + format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0]; |
| + format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1]; |
| + // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much |
| + // sense. Convention seems to be to assume 8bpp for these tiled formats. |
| + format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width(); |
| + format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width(); |
| + format.fmt.pix_mp.num_planes = 2; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_FMT, &format); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_S_FMT"; |
| + return false; |
| + } |
| + |
| + struct v4l2_control control; |
| + memset(&control, 0, sizeof(control)); |
|
Pawel Osciak
2012/11/06 19:26:17
Wrap all of those sequences into DeviceSetCtrl(fd,
sheu
2012/11/08 21:16:31
Possibly, but I replaced these with the IOCTL macr
|
| + control.id = V4L2_CID_ROTATE; |
| + control.value = 0; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_CTRL, &control); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_S_CTRL"; |
| + return false; |
| + } |
| + |
| + memset(&control, 0, sizeof(control)); |
| + control.id = V4L2_CID_HFLIP; |
| + control.value = 0; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_CTRL, &control); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_S_CTRL"; |
| + return false; |
| + } |
| + |
| + memset(&control, 0, sizeof(control)); |
| + control.id = V4L2_CID_VFLIP; |
| + control.value = 0; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_CTRL, &control); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_S_CTRL"; |
| + return false; |
| + } |
| + |
| + memset(&control, 0, sizeof(control)); |
| + control.id = V4L2_CID_GLOBAL_ALPHA; |
| + control.value = 255; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_CTRL, &control); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_S_CTRL"; |
| + return false; |
| + } |
| + |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = kGscOutputBufferCount; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_USERPTR; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0 ) { |
| + DPLOG(ERROR) << "CreateGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + return false; |
| + } |
| + |
| + gsc_input_buffer_count_ = reqbufs.count; |
| + gsc_input_buffer_map_.resize(gsc_input_buffer_count_); |
| + for (int i = 0; i < gsc_input_buffer_count_; ++i) { |
| + gsc_free_input_buffers_.push_back(i); |
| + gsc_input_buffer_map_[i].mfc_output = -1; |
| + } |
| + |
| + return true; |
| +} |
| + |
| +bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() { |
| + DVLOG(3) << "CreateGscOutputBuffers()"; |
| + DCHECK_EQ(decoder_state_, kInitialized); |
| + DCHECK(!gsc_output_streamon_); |
| + DCHECK_EQ(gsc_output_buffer_count_, 0); |
| + |
| + // GSC outputs into the EGLImages we create from the textures we are |
| + // assigned. Assume RGBA8888 format. |
| + int ret; |
| + struct v4l2_format format; |
| + memset(&format, 0, sizeof(format)); |
| + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + format.fmt.pix_mp.width = frame_buffer_size_.width(); |
| + format.fmt.pix_mp.height = frame_buffer_size_.height(); |
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32; |
| + format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| + frame_buffer_size_.width() * frame_buffer_size_.height() * 4; |
| + format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4; |
| + format.fmt.pix_mp.num_planes = 1; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_S_FMT, &format); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscOutputBuffers(): ioctl() failed: VIDIOC_S_FMT"; |
| + return false; |
| + } |
| + |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = kGscOutputBufferCount; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) { |
| + DPLOG(ERROR) << "CreateGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + return false; |
| + } |
| + |
| + // We don't actually fill in the freelist or the map here. That happens once |
| + // we have actual usable buffers, after AssignPictureBuffers(); |
| + gsc_output_buffer_count_ = reqbufs.count; |
| + gsc_output_buffer_map_.resize(gsc_output_buffer_count_); |
| + |
| + DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): " |
| + << "buffer_count=" << gsc_output_buffer_count_ |
| + << ", width=" << frame_buffer_size_.width() |
| + << ", height=" << frame_buffer_size_.height(); |
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| + &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_, |
| + gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()), |
| + GL_TEXTURE_2D)); |
| + |
| + return true; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() { |
|
Pawel Osciak
2012/11/06 19:26:17
DestroyDeviceBuffers(fd, type, memory, num_planes)
sheu
2012/11/08 21:16:31
Unfortunately each device buffer type has its own
Pawel Osciak
2012/11/13 18:10:39
Please see comment at l.1721.
|
| + DVLOG(3) << "DestroyMfcInputBuffers()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!mfc_input_streamon_); |
| + |
| + for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| + if (mfc_input_buffer_map_[i].offset != NULL) { |
| + munmap(mfc_input_buffer_map_[i].offset, |
| + mfc_input_buffer_map_[i].length); |
| + } |
| + } |
| + |
| + int ret; |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = 0; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_MMAP; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) |
| + DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + |
| + mfc_input_buffer_map_.clear(); |
| + mfc_free_input_buffers_.clear(); |
| + mfc_input_buffer_count_ = 0; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() { |
| + DVLOG(3) << "DestroyMfcOutputBuffers()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!mfc_output_streamon_); |
| + |
| + for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| + if (mfc_output_buffer_map_[i].offset[0] != NULL) |
| + munmap(mfc_output_buffer_map_[i].offset[0], |
| + mfc_output_buffer_map_[i].length[0]); |
| + if (mfc_output_buffer_map_[i].offset[1] != NULL) |
| + munmap(mfc_output_buffer_map_[i].offset[1], |
| + mfc_output_buffer_map_[i].length[1]); |
| + } |
| + |
| + int ret; |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = 0; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_MMAP; |
| + errno = 0; |
| + ret = ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) |
| + DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS"; |
| + |
| + mfc_output_buffer_map_.clear(); |
| + mfc_free_output_buffers_.clear(); |
| + mfc_output_buffer_count_ = 0; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() { |
| + DVLOG(3) << "DestroyGscInputBuffers()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!gsc_input_streamon_); |
| + |
| + int ret; |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = 0; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) |
| + DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + |
| + gsc_input_buffer_map_.clear(); |
| + gsc_free_input_buffers_.clear(); |
| + gsc_input_buffer_count_ = 0; |
| +} |
| + |
| +void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() { |
| + DVLOG(3) << "DestroyGscOutputBuffers()"; |
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| + DCHECK(!gsc_output_streamon_); |
| + |
| + if (gsc_output_buffer_map_.size() != 0) { |
| + if (!make_context_current_.Run()) |
| + DLOG(ERROR) << "DestroyGscOutputBuffers(): " |
| + << "could not make context current"; |
| + |
| + size_t i = 0; |
| + do { |
| + GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| + if (output_record.fd != -1) |
| + close(output_record.fd); |
| + if (output_record.egl_image != EGL_NO_IMAGE_KHR) |
| + eglDestroyImageKHR(egl_display_, output_record.egl_image); |
| + if (output_record.egl_sync != EGL_NO_SYNC_KHR) |
| + egl_destroy_sync_khr(egl_display_, output_record.egl_sync); |
| + if (client_) |
| + client_->DismissPictureBuffer(output_record.picture_id); |
| + ++i; |
| + } while (i < gsc_output_buffer_map_.size()); |
| + } |
| + |
| + int ret; |
| + struct v4l2_requestbuffers reqbufs; |
| + memset(&reqbufs, 0, sizeof(reqbufs)); |
| + reqbufs.count = 0; |
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| + reqbufs.memory = V4L2_MEMORY_DMABUF; |
| + errno = 0; |
| + ret = ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| + if (ret != 0) |
| + DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| + |
| + gsc_output_buffer_map_.clear(); |
| + gsc_free_output_buffers_.clear(); |
| + gsc_output_buffer_count_ = 0; |
| +} |
| + |
| +} // namespace content |