| Index: content/common/gpu/media/exynos_video_encode_accelerator.cc
|
| diff --git a/content/common/gpu/media/exynos_video_encode_accelerator.cc b/content/common/gpu/media/exynos_video_encode_accelerator.cc
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..94153cdc9b086e49ae2aacf2358ace8cddbc238c
|
| --- /dev/null
|
| +++ b/content/common/gpu/media/exynos_video_encode_accelerator.cc
|
| @@ -0,0 +1,1701 @@
|
| +// Copyright (c) 2013 The Chromium Authors. All rights reserved.
|
| +// Use of this source code is governed by a BSD-style license that can be
|
| +// found in the LICENSE file.
|
| +
|
| +#include "content/common/gpu/media/exynos_video_encode_accelerator.h"
|
| +
|
| +#include <dlfcn.h>
|
| +#include <fcntl.h>
|
| +#include <libdrm/drm_fourcc.h>
|
| +#include <linux/videodev2.h>
|
| +#include <poll.h>
|
| +#include <sys/eventfd.h>
|
| +#include <sys/ioctl.h>
|
| +#include <sys/mman.h>
|
| +#include "base/callback.h"
|
| +#include "base/debug/trace_event.h"
|
| +#include "base/file_util.h"
|
| +#include "base/message_loop/message_loop_proxy.h"
|
| +#include "base/posix/eintr_wrapper.h"
|
| +#include "media/base/bitstream_buffer.h"
|
| +#include "ui/gl/scoped_binders.h"
|
| +
|
| +#define NOTIFY_ERROR(x) \
|
| + do { \
|
| + SetEncoderState(kError); \
|
| + DLOG(ERROR) << "calling NotifyError(): " << x; \
|
| + NotifyError(x); \
|
| + } while (0)
|
| +
|
| +#define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
|
| + do { \
|
| + if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
|
| + DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
|
| + NOTIFY_ERROR(kPlatformFailureError); \
|
| + return; \
|
| + } \
|
| + } while (0)
|
| +
|
| +#define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
|
| + do { \
|
| + if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
|
| + DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
|
| + NOTIFY_ERROR(kPlatformFailureError); \
|
| + return false; \
|
| + } \
|
| + } while (0)
|
| +
|
| +namespace content {
|
| +
|
| +namespace {
|
| +
|
| +const char kExynosGscDevice[] = "/dev/gsc1";
|
| +const char kExynosMfcDevice[] = "/dev/mfc-enc";
|
| +
|
| +} // anonymous namespace
|
| +
|
| +
|
| +
|
| +struct ExynosVideoEncodeAccelerator::BitstreamBufferRef {
|
| + BitstreamBufferRef(int32 id,
|
| + scoped_ptr<base::SharedMemory> shm,
|
| + size_t size)
|
| + : id(id),
|
| + shm(shm.Pass()),
|
| + size(size) {}
|
| + const int32 id;
|
| + const scoped_ptr<base::SharedMemory> shm;
|
| + const size_t size;
|
| +};
|
| +
|
| +ExynosVideoEncodeAccelerator::GscInputRecord::GscInputRecord()
|
| + : at_device(false),
|
| + frame_id(-1),
|
| + egl_sync(EGL_NO_SYNC_KHR),
|
| + egl_image(EGL_NO_IMAGE_KHR),
|
| + texture_id(0) {}
|
| +
|
| +ExynosVideoEncodeAccelerator::GscOutputRecord::GscOutputRecord()
|
| + : at_device(false),
|
| + mfc_input(-1) {
|
| + address[0] = address[1] = address[2] = NULL;
|
| + length[0] = length[1] = length[2] = 0;
|
| + bytes_used[0] = bytes_used[1] = bytes_used[2] = 0;
|
| +}
|
| +
|
| +ExynosVideoEncodeAccelerator::MfcInputRecord::MfcInputRecord()
|
| + : at_device(false) {
|
| + fd[0] = fd[1] = -1;
|
| +}
|
| +
|
| +ExynosVideoEncodeAccelerator::MfcOutputRecord::MfcOutputRecord()
|
| + : at_device(false),
|
| + address(NULL),
|
| + length(0),
|
| + bytes_used(0) {}
|
| +
|
| +ExynosVideoEncodeAccelerator::ExynosVideoEncodeAccelerator(
|
| + EGLDisplay egl_display,
|
| + media::VideoEncodeAccelerator::Client* client,
|
| + const base::Callback<bool(void)>& make_context_current,
|
| + bool encode_from_backbuffer)
|
| + : child_message_loop_proxy_(base::MessageLoopProxy::current()),
|
| + weak_this_ptr_factory_(this),
|
| + weak_this_(weak_this_ptr_factory_.GetWeakPtr()),
|
| + client_ptr_factory_(client),
|
| + client_(client_ptr_factory_.GetWeakPtr()),
|
| + encoder_thread_("ExynosEncoderThread"),
|
| + encoder_state_(kUninitialized),
|
| + output_buffer_byte_size_(0),
|
| + stream_header_size_(0),
|
| + do_output_encoding_(false),
|
| + do_encode_from_backbuffer_(encode_from_backbuffer),
|
| + gsc_fd_(-1),
|
| + gsc_input_streamon_(false),
|
| + gsc_input_buffer_queued_count_(0),
|
| + gsc_output_streamon_(false),
|
| + gsc_output_buffer_queued_count_(0),
|
| + mfc_fd_(-1),
|
| + mfc_input_streamon_(false),
|
| + mfc_input_buffer_queued_count_(0),
|
| + mfc_output_streamon_(false),
|
| + mfc_output_buffer_queued_count_(0),
|
| + device_poll_thread_("ExynosDevicePollThread"),
|
| + device_poll_interrupt_fd_(-1),
|
| + make_context_current_(make_context_current),
|
| + egl_display_(egl_display),
|
| + video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
|
| + DCHECK(client_);
|
| +}
|
| +
|
| +ExynosVideoEncodeAccelerator::~ExynosVideoEncodeAccelerator() {
|
| + DCHECK(!encoder_thread_.IsRunning());
|
| + DCHECK(!device_poll_thread_.IsRunning());
|
| +
|
| + if (device_poll_interrupt_fd_ != -1) {
|
| + HANDLE_EINTR(close(device_poll_interrupt_fd_));
|
| + device_poll_interrupt_fd_ = -1;
|
| + }
|
| + if (mfc_fd_ != -1) {
|
| + DestroyMfcInputBuffers();
|
| + DestroyMfcOutputBuffers();
|
| + HANDLE_EINTR(close(mfc_fd_));
|
| + mfc_fd_ = -1;
|
| + }
|
| + if (gsc_fd_ != -1) {
|
| + DestroyGscInputBuffers();
|
| + DestroyGscOutputBuffers();
|
| + HANDLE_EINTR(close(gsc_fd_));
|
| + gsc_fd_ = -1;
|
| + }
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::Initialize(
|
| + media::VideoCodecProfile profile,
|
| + const gfx::Size& input_resolution,
|
| + const gfx::Size& output_resolution,
|
| + int32 initial_bitrate) {
|
| + DVLOG(3) << "Initialize()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK_EQ(encoder_state_, kUninitialized);
|
| +
|
| + video_profile_ = profile;
|
| + input_visible_size_ = input_resolution;
|
| + output_visible_size_ = output_resolution;
|
| +
|
| + switch (video_profile_) {
|
| + case media::RAWPROFILE_I420: {
|
| + do_output_encoding_ = false;
|
| + break;
|
| + }
|
| + case media::H264PROFILE_MAIN: {
|
| + do_output_encoding_ = true;
|
| + break;
|
| + }
|
| + default: {
|
| + DLOG(ERROR) << "Initialize(): invalid profile=" << video_profile_;
|
| + NOTIFY_ERROR(kInvalidArgumentError);
|
| + return;
|
| + }
|
| + }
|
| +
|
| + input_allocated_size_.SetSize(
|
| + (input_visible_size_.width() + 0xF) & ~0xF,
|
| + (input_visible_size_.height() + 0xF) & ~0xF);
|
| + converted_visible_size_.SetSize(
|
| + (output_visible_size_.width() + 0x1) & ~0x1,
|
| + (output_visible_size_.height() + 0x1) & ~0x1);
|
| + converted_allocated_size_.SetSize(
|
| + (converted_visible_size_.width() + 0xF) & ~0xF,
|
| + (converted_visible_size_.height() + 0xF) & ~0xF);
|
| +
|
| + // Output size may be modified by the constraints of the format (e.g.
|
| + // multiple-of-two for YUV formats) or the hardware.
|
| + output_visible_size_ = converted_visible_size_;
|
| +
|
| + if (!make_context_current_.Run()) {
|
| + DLOG(ERROR) << "Initialize(): could not make context current";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
|
| + DLOG(ERROR) << "Initialize(): context does not have "
|
| + << "EGL_KHR_fence_sync";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + // Open the video devices.
|
| + DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice;
|
| + gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice,
|
| + O_RDWR | O_NONBLOCK | O_CLOEXEC));
|
| + if (gsc_fd_ == -1) {
|
| + DPLOG(ERROR) << "Initialize(): could not open GSC device: "
|
| + << kExynosGscDevice;
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + // Capabilities check.
|
| + struct v4l2_capability caps;
|
| + const __u32 kCapsRequired =
|
| + V4L2_CAP_VIDEO_CAPTURE_MPLANE |
|
| + V4L2_CAP_VIDEO_OUTPUT_MPLANE |
|
| + V4L2_CAP_STREAMING;
|
| + IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_QUERYCAP, &caps);
|
| + if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
|
| + DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
|
| + ", caps check failed: 0x" << std::hex << caps.capabilities;
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + if (do_output_encoding_) {
|
| + // Open the video encoder device.
|
| + mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice,
|
| + O_RDWR | O_NONBLOCK | O_CLOEXEC));
|
| + if (mfc_fd_ == -1) {
|
| + DPLOG(ERROR) << "Initialize(): could not open MFC device: "
|
| + << kExynosMfcDevice;
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_QUERYCAP, &caps);
|
| + if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
|
| + DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
|
| + ", caps check failed: 0x" << std::hex << caps.capabilities;
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| + }
|
| +
|
| + // Create the interrupt fd.
|
| + DCHECK_EQ(device_poll_interrupt_fd_, -1);
|
| + device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
|
| + if (device_poll_interrupt_fd_ == -1) {
|
| + DPLOG(ERROR) << "Initialize(): eventfd() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + DLOG(ERROR)
|
| + << "Initialize(): input_visible_size_="
|
| + << input_visible_size_.width() << "x"
|
| + << input_visible_size_.height() << ", input_allocated_size_="
|
| + << input_allocated_size_.width() << "x"
|
| + << input_allocated_size_.height() << ", converted_visible_size_="
|
| + << converted_visible_size_.width() << "x"
|
| + << converted_visible_size_.height() << ", converted_allocated_size_="
|
| + << converted_allocated_size_.width() << "x"
|
| + << converted_allocated_size_.height() << ", output_visible_size_="
|
| + << output_visible_size_.width() << "x"
|
| + << output_visible_size_.height();
|
| +
|
| + if (!CreateGscInputBuffers() || !CreateGscOutputBuffers())
|
| + return;
|
| +
|
| + if (do_output_encoding_) {
|
| + // MFC setup for encoding is rather particular in ordering.
|
| + // 1. Format (VIDIOC_S_FMT) set first on OUTPUT and CPATURE queues.
|
| + // 2. VIDIOC_REQBUFS, VIDIOC_QBUF, and VIDIOC_STREAMON on CAPTURE queue.
|
| + // 3. VIDIOC_REQBUFS (and later VIDIOC_QBUF and VIDIOC_STREAMON) on OUTPUT
|
| + // queue.
|
| +
|
| + if (!SetMfcFormats())
|
| + return;
|
| +
|
| + // VIDIOC_REQBUFS on CAPTURE queue.
|
| + if (!CreateMfcOutputBuffers())
|
| + return;
|
| +
|
| + // VIDIOC_QBUF and VIDIOC_STREAMON on CAPTURE queue.
|
| + EnqueueMfc();
|
| +
|
| + // VIDIOC_REQBUFS on OUTPUT queue.
|
| + if (!CreateMfcInputBuffers())
|
| + return;
|
| +
|
| + SetBitrate(initial_bitrate);
|
| + }
|
| +
|
| + if (!encoder_thread_.Start()) {
|
| + DLOG(ERROR) << "Initialize(): encoder thread failed to start";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + SetEncoderState(kInitialized);
|
| +
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &Client::NotifyInitializeDone,
|
| + client_));
|
| +
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &Client::RequireBitstreamBuffers,
|
| + client_,
|
| + output_visible_size_,
|
| + output_buffer_byte_size_));
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::ReturnFreeGscInputBuffer(int index) {
|
| + DVLOG(3) << "ReturnFreeGscInputBuffer(): index=" << index;
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + gsc_free_input_buffers_.push_back(index);
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::Encode(
|
| + const scoped_refptr<media::VideoFrame>& frame,
|
| + int32 frame_id,
|
| + bool force_keyframe) {
|
| + DVLOG(3) << "Encode(): frame=" << frame.get() << ", frame_id=" << frame_id;
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| +
|
| + if (gsc_free_input_buffers_.empty()) {
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &media::VideoEncodeAccelerator::Client::NotifyEndOfVideoFrame,
|
| + client_,
|
| + frame_id));
|
| + return;
|
| + }
|
| +
|
| + if (!make_context_current_.Run()) {
|
| + DLOG(ERROR) << "Encode(): could not make context current";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + const int gsc_buffer = gsc_free_input_buffers_.back();
|
| + gsc_free_input_buffers_.pop_back();
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
|
| + DCHECK(!input_record.at_device);
|
| + DCHECK_EQ(input_record.frame_id, -1);
|
| + DCHECK_EQ(input_record.egl_sync, EGL_NO_SYNC_KHR);
|
| + input_record.frame_id = frame_id;
|
| +
|
| + if (frame->format() == media::VideoFrame::NATIVE_EGLSURFACE) {
|
| + DCHECK_NE(input_record.texture_id, 0U);
|
| + gfx::ScopedTextureBinder binder(GL_TEXTURE_2D, input_record.texture_id);
|
| + glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0,
|
| + input_visible_size_.width(),
|
| + input_visible_size_.height());
|
| + }
|
| +
|
| + input_record.egl_sync =
|
| + eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
|
| + if (input_record.egl_sync == EGL_NO_SYNC_KHR) {
|
| + DLOG(ERROR) << "Encode(): eglCreateSyncKHR() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + if (frame->format() == media::VideoFrame::NATIVE_EGLSURFACE) {
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::EncodeTask,
|
| + base::Unretained(this),
|
| + gsc_buffer));
|
| + } else {
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::CopyFrameAndEncodeTask,
|
| + base::Unretained(this),
|
| + frame,
|
| + gsc_buffer));
|
| + }
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::UseBitstreamBuffer(
|
| + const media::BitstreamBuffer& buffer) {
|
| + DVLOG(3) << "UseBitstreamBuffer(): id=" << buffer.id();
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| +
|
| + scoped_ptr<BitstreamBufferRef> buffer_ref(new BitstreamBufferRef(
|
| + buffer.id(),
|
| + scoped_ptr<base::SharedMemory>(
|
| + new base::SharedMemory(buffer.handle(), false)).Pass(),
|
| + buffer.size()));
|
| + if (!buffer_ref->shm->Map(buffer_ref->size)) {
|
| + DLOG(ERROR) << "UseBitstreamBuffer(): could not map bitstream_buffer";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::UseBitstreamBufferTask,
|
| + base::Unretained(this),
|
| + base::Passed(&buffer_ref)));
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::RequestEncodingParameterChange(
|
| + int32 bitrate) {
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::SetBitrate,
|
| + base::Unretained(this),
|
| + bitrate));
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::Destroy() {
|
| + DVLOG(3) << "Destroy()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| +
|
| + // We're destroying; cancel all callbacks.
|
| + client_ptr_factory_.InvalidateWeakPtrs();
|
| +
|
| + // If the encoder thread is running, destroy using posted task.
|
| + if (encoder_thread_.IsRunning()) {
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::DestroyTask, base::Unretained(this)));
|
| + // DestroyTask() will cause the encoder_thread_ to flush all tasks.
|
| + encoder_thread_.Stop();
|
| + } else {
|
| + // Otherwise, call the destroy task directly.
|
| + DestroyTask();
|
| + }
|
| +
|
| + // Set to kError state just in case.
|
| + SetEncoderState(kError);
|
| +
|
| + delete this;
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::EncodeTask(int gsc_input_index) {
|
| + DVLOG(3) << "EncodeTask()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + DCHECK_NE(encoder_state_, kUninitialized);
|
| + const GscInputRecord& input_record = gsc_input_buffer_map_[gsc_input_index];
|
| + TRACE_EVENT1("Video encoder", "EVEA::EncodeTask",
|
| + "frame_id", input_record.frame_id);
|
| +
|
| + if (encoder_state_ == kError) {
|
| + DVLOG(2) << "EncodeTask(): early out: kError state";
|
| + return;
|
| + }
|
| +
|
| + if (encoder_state_ == kInitialized) {
|
| + if (!StartDevicePoll())
|
| + return;
|
| + encoder_state_ = kEncoding;
|
| + }
|
| +
|
| + encoder_input_queue_.push_back(gsc_input_index);
|
| + EnqueueGsc();
|
| +
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &media::VideoEncodeAccelerator::Client::NotifyEndOfVideoFrame,
|
| + client_,
|
| + input_record.frame_id));
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::CopyFrameAndEncodeTask(
|
| + const scoped_refptr<media::VideoFrame>& frame,
|
| + int gsc_input_index) {
|
| + DVLOG(3) << "CopyFrameAndEncodeTask()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| +
|
| + if (encoder_state_ == kError) {
|
| + DVLOG(2) << "CopyFrameAndEncodeTask(): early out: kError state";
|
| + return;
|
| + }
|
| +
|
| + if (frame->coded_size() != input_visible_size_) {
|
| + DLOG(ERROR) << "EncodeFrameTask(): input size change not supported: "
|
| + << input_visible_size_.width() << "x"
|
| + << input_visible_size_.height() << " -> "
|
| + << frame->coded_size().width() << "x"
|
| + << frame->coded_size().height();
|
| + NOTIFY_ERROR(kInvalidArgumentError);
|
| + return;
|
| + }
|
| +
|
| + NOTIMPLEMENTED();
|
| + EncodeTask(gsc_input_index);
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::UseBitstreamBufferTask(
|
| + scoped_ptr<BitstreamBufferRef> buffer_ref) {
|
| + DVLOG(3) << "UseBitstreamBufferTask(): id=" << buffer_ref->id;
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + TRACE_EVENT1("Video Encoder", "EVEA::UseBitstreamBufferTask",
|
| + "buffer", buffer_ref->id);
|
| +
|
| + encoder_bitstream_buffers_.push_back(linked_ptr<BitstreamBufferRef>(
|
| + buffer_ref.release()));
|
| +
|
| + ReturnCompleteBuffers();
|
| + EnqueueGsc();
|
| + EnqueueMfc();
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DestroyTask() {
|
| + DVLOG(3) << "DestroyTask()";
|
| + TRACE_EVENT0("Video Encoder", "EVEA::DestroyTask");
|
| +
|
| + // DestroyTask() should run regardless of encoder_state_.
|
| +
|
| + // Stop streaming and the device_poll_thread_.
|
| + StopDevicePoll();
|
| +
|
| + // Set our state to kError. Just in case.
|
| + encoder_state_ = kError;
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::ServiceDeviceTask() {
|
| + DVLOG(3) << "ServiceDeviceTask()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + DCHECK_NE(encoder_state_, kUninitialized);
|
| + DCHECK_NE(encoder_state_, kInitialized);
|
| + TRACE_EVENT0("Video Encoder", "EVEA::ServiceDeviceTask");
|
| +
|
| + if (encoder_state_ == kError) {
|
| + DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
|
| + return;
|
| + }
|
| +
|
| + DequeueGsc();
|
| + DequeueMfc();
|
| + EnqueueGsc();
|
| + EnqueueMfc();
|
| +
|
| + // Clear the interrupt fd.
|
| + if (!ClearDevicePollInterrupt())
|
| + return;
|
| +
|
| + unsigned int poll_fds = 0;
|
| + // Add GSC fd, if we should poll on it.
|
| + // GSC has to wait until both input and output buffers are queued.
|
| + if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
|
| + poll_fds |= kPollGsc;
|
| + // Add MFC fd, if we should poll on it.
|
| + // MFC can be polled as soon as either input or output buffers are queued.
|
| + if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
|
| + poll_fds |= kPollMfc;
|
| +
|
| + // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
|
| + // so either:
|
| + // * device_poll_thread_ is running normally
|
| + // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
|
| + // shut it down, in which case we're either in kError state, and we should
|
| + // have early-outed already.
|
| + DCHECK(device_poll_thread_.message_loop());
|
| + // Queue the DevicePollTask() now.
|
| + device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::DevicePollTask,
|
| + base::Unretained(this),
|
| + poll_fds));
|
| +
|
| + DVLOG(1) << "ServiceDeviceTask(): buffer counts: ENC["
|
| + << encoder_input_queue_.size() << "] => GSC["
|
| + << gsc_free_input_buffers_.size() << "+"
|
| + << gsc_input_buffer_queued_count_ << "/"
|
| + << gsc_input_buffer_map_.size() << "->"
|
| + << gsc_free_output_buffers_.size() << "+"
|
| + << gsc_output_buffer_queued_count_ << "/"
|
| + << gsc_output_buffer_map_.size() << "] => "
|
| + << gsc_output_mfc_input_queue_.size() << " => MFC["
|
| + << mfc_free_input_buffers_.size() << "+"
|
| + << mfc_input_buffer_queued_count_ << "/"
|
| + << mfc_input_buffer_map_.size() << "->"
|
| + << mfc_free_output_buffers_.size() << "+"
|
| + << mfc_output_buffer_queued_count_ << "/"
|
| + << mfc_output_buffer_map_.size() << "] => OUT["
|
| + << encoder_output_queue_.size() << "]";
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::EnqueueGsc() {
|
| + DVLOG(3) << "EnqueueGsc()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + TRACE_EVENT0("Video Encoder", "EVEA::EnqueueGsc");
|
| +
|
| + const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
|
| + while (!encoder_input_queue_.empty()) {
|
| + if (!EnqueueGscInputRecord())
|
| + return;
|
| + }
|
| + if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
|
| + // We started up a previously empty queue.
|
| + // Queue state changed; signal interrupt.
|
| + if (!SetDevicePollInterrupt())
|
| + return;
|
| + // Start VIDIOC_STREAMON if we haven't yet.
|
| + if (!gsc_input_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
|
| + gsc_input_streamon_ = true;
|
| + }
|
| + }
|
| +
|
| + // Enqueue a GSC output, only if we need one
|
| + if (gsc_input_buffer_queued_count_ != 0 &&
|
| + gsc_output_buffer_queued_count_ == 0 &&
|
| + !gsc_free_output_buffers_.empty() &&
|
| + (!do_output_encoding_ || !mfc_free_input_buffers_.empty())) {
|
| + const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
|
| + if (!EnqueueGscOutputRecord())
|
| + return;
|
| + if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
|
| + // We just started up a previously empty queue.
|
| + // Queue state changed; signal interrupt.
|
| + if (!SetDevicePollInterrupt())
|
| + return;
|
| + // Start VIDIOC_STREAMON if we haven't yet.
|
| + if (!gsc_output_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
|
| + gsc_output_streamon_ = true;
|
| + }
|
| + }
|
| + }
|
| + // Bug check: GSC is liable to race conditions if more than one buffer is
|
| + // simultaneously queued.
|
| + DCHECK_GE(1, gsc_output_buffer_queued_count_);
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DequeueGsc() {
|
| + DVLOG(3) << "DequeueGsc()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + TRACE_EVENT0("Video Encoder", "EVEA::DequeueGsc");
|
| +
|
| + // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
|
| + // list.
|
| + struct v4l2_buffer dqbuf;
|
| + struct v4l2_plane planes[3];
|
| + while (gsc_input_buffer_queued_count_ > 0) {
|
| + DCHECK(gsc_input_streamon_);
|
| + memset(&dqbuf, 0, sizeof(dqbuf));
|
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + dqbuf.memory = V4L2_MEMORY_MMAP;
|
| + if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
|
| + if (errno == EAGAIN) {
|
| + // EAGAIN if we're just out of buffers to dequeue.
|
| + break;
|
| + }
|
| + DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
|
| + DCHECK(input_record.at_device);
|
| + gsc_free_input_buffers_.push_back(dqbuf.index);
|
| + const int32 frame_id = input_record.frame_id;
|
| + input_record.at_device = false;
|
| + input_record.frame_id = -1;
|
| + gsc_input_buffer_queued_count_--;
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &Client::NotifyEndOfVideoFrame, client_, frame_id));
|
| + }
|
| +
|
| + // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and queue to the
|
| + // completed queue.
|
| + while (gsc_output_buffer_queued_count_ > 0) {
|
| + DCHECK(gsc_output_streamon_);
|
| + memset(&dqbuf, 0, sizeof(dqbuf));
|
| + memset(&planes, 0, sizeof(planes));
|
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + dqbuf.memory = V4L2_MEMORY_MMAP;
|
| + dqbuf.m.planes = planes;
|
| + dqbuf.length = 3;
|
| + if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
|
| + if (errno == EAGAIN) {
|
| + // EAGAIN if we're just out of buffers to dequeue.
|
| + break;
|
| + }
|
| + DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
|
| + DCHECK(output_record.at_device);
|
| + output_record.at_device = false;
|
| + if (do_output_encoding_) {
|
| + gsc_output_mfc_input_queue_.push_back(output_record.mfc_input);
|
| + output_record.mfc_input = -1;
|
| + gsc_free_output_buffers_.push_back(dqbuf.index);
|
| + } else {
|
| + // Don't recycle to its free list yet -- we can't do that until
|
| + // ReturnCompleteBuffers() finishes copying the output out.
|
| + output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
|
| + output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
|
| + output_record.bytes_used[2] = dqbuf.m.planes[2].bytesused;
|
| + encoder_output_queue_.push_back(dqbuf.index);
|
| + }
|
| + gsc_output_buffer_queued_count_--;
|
| + }
|
| +
|
| + ReturnCompleteBuffers();
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::EnqueueMfc() {
|
| + DVLOG(3) << "EnqueueMfc()";
|
| + TRACE_EVENT0("Video Encoder", "EVEA::EnqueueMfc");
|
| +
|
| + if (!do_output_encoding_)
|
| + return;
|
| +
|
| + // Drain the pipe of completed GSC output buffers.
|
| + const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
|
| + while (!gsc_output_mfc_input_queue_.empty()) {
|
| + if (!EnqueueMfcInputRecord())
|
| + return;
|
| + }
|
| + if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
|
| + // We just started up a previously empty queue.
|
| + // Queue state changed; signal interrupt.
|
| + if (!SetDevicePollInterrupt())
|
| + return;
|
| + // Start VIDIOC_STREAMON if we haven't yet.
|
| + if (!mfc_input_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
|
| + mfc_input_streamon_ = true;
|
| + }
|
| + }
|
| +
|
| + // Enqueue all the MFC outputs we can.
|
| + const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
|
| + while (!mfc_free_output_buffers_.empty()) {
|
| + if (!EnqueueMfcOutputRecord())
|
| + return;
|
| + }
|
| + if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
|
| + // We just started up a previously empty queue.
|
| + // Queue state changed; signal interrupt.
|
| + if (!SetDevicePollInterrupt())
|
| + return;
|
| + // Start VIDIOC_STREAMON if we haven't yet.
|
| + if (!mfc_output_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
|
| + mfc_output_streamon_ = true;
|
| + }
|
| + }
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DequeueMfc() {
|
| + DVLOG(3) << "DequeueMfc";
|
| + TRACE_EVENT0("Video Encoder", "EVEA::DequeueMfc");
|
| +
|
| + // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
|
| + // list.
|
| + struct v4l2_buffer dqbuf;
|
| + struct v4l2_plane planes[2];
|
| + while (mfc_input_buffer_queued_count_ > 0) {
|
| + DCHECK(mfc_input_streamon_);
|
| + memset(&dqbuf, 0, sizeof(dqbuf));
|
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + dqbuf.memory = V4L2_MEMORY_MMAP;
|
| + dqbuf.m.planes = planes;
|
| + dqbuf.length = 2;
|
| + if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
|
| + if (errno == EAGAIN) {
|
| + // EAGAIN if we're just out of buffers to dequeue.
|
| + break;
|
| + }
|
| + DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
|
| + DCHECK(input_record.at_device);
|
| + input_record.at_device = false;
|
| + mfc_free_input_buffers_.push_back(dqbuf.index);
|
| + mfc_input_buffer_queued_count_--;
|
| + }
|
| +
|
| + // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
|
| + // completed queue. Don't recycle to its free list yet -- we can't do that
|
| + // until ReturnCompleteBuffers() finishes copying the output out.
|
| + while (mfc_output_buffer_queued_count_ > 0) {
|
| + DCHECK(mfc_output_streamon_);
|
| + memset(&dqbuf, 0, sizeof(dqbuf));
|
| + memset(planes, 0, sizeof(planes));
|
| + dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + dqbuf.memory = V4L2_MEMORY_MMAP;
|
| + dqbuf.m.planes = planes;
|
| + dqbuf.length = 1;
|
| + if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
|
| + if (errno == EAGAIN) {
|
| + // EAGAIN if we're just out of buffers to dequeue.
|
| + break;
|
| + }
|
| + DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| + // Don't recycle to its free list yet -- we can't do that until
|
| + // ReturnCompleteBuffers() finishes copying the output out.
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
|
| + DCHECK(output_record.at_device);
|
| + output_record.at_device = false;
|
| + output_record.bytes_used = dqbuf.m.planes[0].bytesused;
|
| + encoder_output_queue_.push_back(dqbuf.index);
|
| + mfc_output_buffer_queued_count_--;
|
| + }
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::EnqueueGscInputRecord() {
|
| + DVLOG(3) << "EnqueueGscInputRecord()";
|
| + DCHECK(!encoder_input_queue_.empty());
|
| +
|
| + // Enqueue a GSC input (VIDEO_OUTPUT) buffer for an input video frame
|
| + const int gsc_buffer = encoder_input_queue_.front();
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
|
| + DCHECK(!input_record.at_device);
|
| + if (input_record.egl_sync != EGL_NO_SYNC_KHR) {
|
| + TRACE_EVENT0(
|
| + "Video Encoder",
|
| + "EVEA::EnqueueGscInputRecord: eglClientWaitSyncKHR");
|
| + // If we have to wait for completion, wait.
|
| + eglClientWaitSyncKHR(egl_display_, input_record.egl_sync, 0,
|
| + EGL_FOREVER_KHR);
|
| + eglDestroySyncKHR(egl_display_, input_record.egl_sync);
|
| + input_record.egl_sync = EGL_NO_SYNC_KHR;
|
| + }
|
| + struct v4l2_buffer qbuf;
|
| + struct v4l2_plane qbuf_planes[1];
|
| + memset(&qbuf, 0, sizeof(qbuf));
|
| + memset(qbuf_planes, 0, sizeof(qbuf_planes));
|
| + qbuf.index = gsc_buffer;
|
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + qbuf.timestamp.tv_sec = input_record.frame_id;
|
| + qbuf.memory = V4L2_MEMORY_MMAP;
|
| + qbuf.m.planes = qbuf_planes;
|
| + qbuf.length = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
|
| + encoder_input_queue_.pop_front();
|
| + input_record.at_device = true;
|
| + gsc_input_buffer_queued_count_++;
|
| + DVLOG(3) << "EnqueueGscInputRecord(): enqueued buffer=" << gsc_buffer
|
| + << ", frame_id=" << input_record.frame_id;
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::EnqueueGscOutputRecord() {
|
| + DVLOG(3) << "EnqueueGscOutputRecord()";
|
| + DCHECK(!gsc_free_output_buffers_.empty());
|
| +
|
| + // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
|
| + const int gsc_buffer = gsc_free_output_buffers_.back();
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[gsc_buffer];
|
| + DCHECK(!output_record.at_device);
|
| + DCHECK_EQ(output_record.mfc_input, -1);
|
| + struct v4l2_buffer qbuf;
|
| + struct v4l2_plane qbuf_planes[3];
|
| + memset(&qbuf, 0, sizeof(qbuf));
|
| + memset(qbuf_planes, 0, sizeof(qbuf_planes));
|
| + qbuf.index = gsc_buffer;
|
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + qbuf.m.planes = qbuf_planes;
|
| + if (do_output_encoding_) {
|
| + DCHECK(!mfc_free_input_buffers_.empty());
|
| + qbuf.memory = V4L2_MEMORY_DMABUF;
|
| + const int mfc_buffer = mfc_free_input_buffers_.back();
|
| + mfc_free_input_buffers_.pop_back();
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer];
|
| + DCHECK(!input_record.at_device);
|
| + output_record.mfc_input = mfc_buffer;
|
| + qbuf.m.planes[0].m.fd = input_record.fd[0];
|
| + qbuf.m.planes[1].m.fd = input_record.fd[1];
|
| + qbuf.length = 2;
|
| + } else {
|
| + qbuf.memory = V4L2_MEMORY_MMAP;
|
| + qbuf.length = 3;
|
| + }
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
|
| + gsc_free_output_buffers_.pop_back();
|
| + output_record.at_device = true;
|
| + gsc_output_buffer_queued_count_++;
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::EnqueueMfcInputRecord() {
|
| + DVLOG(3) << "EnqueueMfcInputRecord()";
|
| + DCHECK(do_output_encoding_);
|
| + DCHECK(!gsc_output_mfc_input_queue_.empty());
|
| +
|
| + // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
|
| + const int mfc_buffer = gsc_output_mfc_input_queue_.front();
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[mfc_buffer];
|
| + DCHECK(!input_record.at_device);
|
| + struct v4l2_buffer qbuf;
|
| + struct v4l2_plane qbuf_planes[2];
|
| + memset(&qbuf, 0, sizeof(qbuf));
|
| + memset(qbuf_planes, 0, sizeof(qbuf_planes));
|
| + qbuf.index = mfc_buffer;
|
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + qbuf.memory = V4L2_MEMORY_MMAP;
|
| + qbuf.m.planes = qbuf_planes;
|
| + qbuf.length = 2;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
|
| + gsc_output_mfc_input_queue_.pop_front();
|
| + input_record.at_device = true;
|
| + mfc_input_buffer_queued_count_++;
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::EnqueueMfcOutputRecord() {
|
| + DVLOG(3) << "EnqueueMfcOutputRecord()";
|
| + DCHECK(do_output_encoding_);
|
| + DCHECK(!mfc_free_output_buffers_.empty());
|
| +
|
| + // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
|
| + const int mfc_buffer = mfc_free_output_buffers_.back();
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
|
| + DCHECK(!output_record.at_device);
|
| + DCHECK_EQ(output_record.bytes_used, 0U);
|
| + struct v4l2_buffer qbuf;
|
| + struct v4l2_plane qbuf_planes[1];
|
| + memset(&qbuf, 0, sizeof(qbuf));
|
| + memset(qbuf_planes, 0, sizeof(qbuf_planes));
|
| + qbuf.index = mfc_buffer;
|
| + qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + qbuf.memory = V4L2_MEMORY_MMAP;
|
| + qbuf.m.planes = qbuf_planes;
|
| + qbuf.length = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
|
| + mfc_free_output_buffers_.pop_back();
|
| + output_record.at_device = true;
|
| + mfc_output_buffer_queued_count_++;
|
| + return true;
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::ReturnCompleteBuffers() {
|
| + DVLOG(3) << "ReturnCompleteBuffers()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + DCHECK_NE(encoder_state_, kUninitialized);
|
| +
|
| + while (!encoder_output_queue_.empty() &&
|
| + !encoder_bitstream_buffers_.empty()) {
|
| + const int output_index = encoder_output_queue_.front();
|
| + encoder_output_queue_.pop_front();
|
| + scoped_ptr<BitstreamBufferRef> buffer_ref(
|
| + encoder_bitstream_buffers_.back().release());
|
| + encoder_bitstream_buffers_.pop_back();
|
| + uint8* data = reinterpret_cast<uint8*>(buffer_ref->shm->memory());
|
| + size_t offset = 0;
|
| + bool key_frame = false;
|
| + if (do_output_encoding_) {
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[output_index];
|
| + CHECK_GE(buffer_ref->size, output_record.bytes_used);
|
| + const uint8* source =
|
| + reinterpret_cast<const uint8*>(output_record.address);
|
| + if (stream_header_size_ == 0) {
|
| + stream_header_size_ = output_record.bytes_used;
|
| + stream_header_.reset(new uint8[stream_header_size_]);
|
| + memcpy(stream_header_.get(), source, stream_header_size_);
|
| + }
|
| + if (output_record.bytes_used >= 5) {
|
| + if ((source[4] & 0x1F) == 0x5) {
|
| + key_frame = true;
|
| + memcpy(data, stream_header_.get(), stream_header_size_);
|
| + data += stream_header_size_;
|
| + offset += stream_header_size_;
|
| + }
|
| + }
|
| + memcpy(data, source, output_record.bytes_used);
|
| + offset += output_record.bytes_used;
|
| + output_record.bytes_used = 0;
|
| + mfc_free_output_buffers_.push_back(output_index);
|
| + } else {
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[output_index];
|
| + // GSC output is 16 pixel-aligned; we may have to trim down to our actual
|
| + // output size.
|
| + // Copy the Y plane.
|
| + const uint8* y_plane = reinterpret_cast<uint8*>(output_record.address[0]);
|
| + for (int i = 0; i < output_visible_size_.height(); ++i) {
|
| + memcpy(data, y_plane, output_visible_size_.width());
|
| + data += output_visible_size_.width();
|
| + y_plane += converted_allocated_size_.width();
|
| + }
|
| + // Copy the U plane.
|
| + const uint8* u_plane = reinterpret_cast<uint8*>(output_record.address[1]);
|
| + for (int i = 0; i < output_visible_size_.height() / 2; ++i) {
|
| + memcpy(data, u_plane, output_visible_size_.width() / 2);
|
| + data += output_visible_size_.width() / 2;
|
| + u_plane += converted_allocated_size_.width() / 2;
|
| + }
|
| + // Copy the V plane.
|
| + const uint8* v_plane = reinterpret_cast<uint8*>(output_record.address[2]);
|
| + for (int i = 0; i < output_visible_size_.height() / 2; ++i) {
|
| + memcpy(data, v_plane, output_visible_size_.width() / 2);
|
| + data += output_visible_size_.width() / 2;
|
| + v_plane += converted_allocated_size_.width() / 2;
|
| + }
|
| + offset = output_visible_size_.GetArea() * 3 / 2;
|
| + gsc_free_output_buffers_.push_back(output_index);
|
| + }
|
| + DLOG(ERROR) << "ReturnCompleteBuffers(): BitstreamBufferReady(): "
|
| + << "bitstream_buffer_id=" << buffer_ref->id
|
| + << ", size=" << offset;
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &Client::BitstreamBufferReady,
|
| + client_,
|
| + buffer_ref->id,
|
| + offset,
|
| + key_frame));
|
| + }
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::StartDevicePoll() {
|
| + DVLOG(3) << "StartDevicePoll()";
|
| + DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
|
| + DCHECK(!device_poll_thread_.IsRunning());
|
| +
|
| + // Start up the device poll thread and schedule its first DevicePollTask().
|
| + if (!device_poll_thread_.Start()) {
|
| + DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return false;
|
| + }
|
| + device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::DevicePollTask,
|
| + base::Unretained(this),
|
| + 0));
|
| +
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::StopDevicePoll() {
|
| + DVLOG(3) << "StopDevicePoll()";
|
| +
|
| + // Signal the DevicePollTask() to stop, and stop the device poll thread.
|
| + if (!SetDevicePollInterrupt())
|
| + return false;
|
| + device_poll_thread_.Stop();
|
| + // Clear the interrupt now, to be sure.
|
| + if (!ClearDevicePollInterrupt())
|
| + return false;
|
| +
|
| + // Stop streaming.
|
| + if (gsc_input_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
|
| + }
|
| + gsc_input_streamon_ = false;
|
| + if (gsc_output_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
|
| + }
|
| + gsc_output_streamon_ = false;
|
| + if (mfc_input_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
|
| + }
|
| + mfc_input_streamon_ = false;
|
| + if (mfc_output_streamon_) {
|
| + __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
|
| + }
|
| + mfc_output_streamon_ = false;
|
| +
|
| + // Reset all our accounting info.
|
| + encoder_input_queue_.clear();
|
| + gsc_free_input_buffers_.clear();
|
| + for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
|
| + gsc_free_input_buffers_.push_back(i);
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[i];
|
| + input_record.at_device = false;
|
| + input_record.frame_id = -1;
|
| + if (input_record.egl_sync) {
|
| + eglDestroySyncKHR(egl_display_, input_record.egl_sync);
|
| + input_record.egl_sync = EGL_NO_SYNC_KHR;
|
| + }
|
| + }
|
| + gsc_input_buffer_queued_count_ = 0;
|
| + gsc_free_output_buffers_.clear();
|
| + for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
|
| + gsc_free_output_buffers_.push_back(i);
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[i];
|
| + output_record.at_device = false;
|
| + output_record.mfc_input = -1;
|
| + output_record.bytes_used[0] = output_record.bytes_used[1]
|
| + = output_record.bytes_used[2] = 0;
|
| + }
|
| + gsc_output_buffer_queued_count_ = 0;
|
| + gsc_output_mfc_input_queue_.clear();
|
| + mfc_free_input_buffers_.clear();
|
| + for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
|
| + mfc_free_input_buffers_.push_back(i);
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[i];
|
| + input_record.at_device = false;
|
| + }
|
| + mfc_input_buffer_queued_count_ = 0;
|
| + mfc_free_output_buffers_.clear();
|
| + for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
|
| + mfc_free_output_buffers_.push_back(i);
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[i];
|
| + output_record.at_device = false;
|
| + output_record.bytes_used = 0;
|
| + }
|
| + encoder_output_queue_.clear();
|
| +
|
| + DVLOG(3) << "StopDevicePoll(): device poll stopped";
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::SetDevicePollInterrupt() {
|
| + DVLOG(3) << "SetDevicePollInterrupt()";
|
| +
|
| + const uint64 buf = 1;
|
| + if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
|
| + DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return false;
|
| + }
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::ClearDevicePollInterrupt() {
|
| + DVLOG(3) << "ClearDevicePollInterrupt()";
|
| +
|
| + uint64 buf;
|
| + if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
|
| + if (errno == EAGAIN) {
|
| + // No interrupt flag set, and we're reading nonblocking. Not an error.
|
| + return true;
|
| + } else {
|
| + DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return false;
|
| + }
|
| + }
|
| + return true;
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DevicePollTask(unsigned int poll_fds) {
|
| + DVLOG(3) << "DevicePollTask()";
|
| + DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
|
| + TRACE_EVENT0("Video Encoder", "EVEA::DevicePollTask");
|
| +
|
| + // This routine just polls the set of device fds, and schedules a
|
| + // ServiceDeviceTask() on encoder_thread_ when processing needs to occur.
|
| + // Other threads may notify this task to return early by writing to
|
| + // device_poll_interrupt_fd_.
|
| + struct pollfd pollfds[3];
|
| + nfds_t nfds;
|
| +
|
| + // Add device_poll_interrupt_fd_;
|
| + pollfds[0].fd = device_poll_interrupt_fd_;
|
| + pollfds[0].events = POLLIN | POLLERR;
|
| + nfds = 1;
|
| +
|
| + // Add GSC fd, if we should poll on it.
|
| + // GSC has to wait until both input and output buffers are queued.
|
| + if (poll_fds & kPollGsc) {
|
| + DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
|
| + pollfds[nfds].fd = gsc_fd_;
|
| + pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
|
| + nfds++;
|
| + }
|
| + if (poll_fds & kPollMfc) {
|
| + DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
|
| + pollfds[nfds].fd = mfc_fd_;
|
| + pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
|
| + nfds++;
|
| + }
|
| +
|
| + // Poll it!
|
| + if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) {
|
| + DPLOG(ERROR) << "DevicePollTask(): poll() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return;
|
| + }
|
| +
|
| + // All processing should happen on ServiceDeviceTask(), since we shouldn't
|
| + // touch encoder state from this thread.
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::ServiceDeviceTask,
|
| + base::Unretained(this)));
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::NotifyError(Error error) {
|
| + DVLOG(2) << "NotifyError()";
|
| +
|
| + if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
|
| + child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::NotifyError, weak_this_, error));
|
| + return;
|
| + }
|
| +
|
| + if (client_) {
|
| + client_->NotifyError(error);
|
| + client_ptr_factory_.InvalidateWeakPtrs();
|
| + }
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::SetEncoderState(State state) {
|
| + DVLOG(3) << "SetEncoderState(): state=" << state;
|
| +
|
| + // We can touch encoder_state_ only if this is the encoder thread or the
|
| + // encoder thread isn't running.
|
| + if (encoder_thread_.message_loop() != NULL &&
|
| + encoder_thread_.message_loop() != base::MessageLoop::current()) {
|
| + encoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
|
| + &ExynosVideoEncodeAccelerator::SetEncoderState,
|
| + base::Unretained(this), state));
|
| + } else {
|
| + encoder_state_ = state;
|
| + }
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::CreateGscInputBuffers() {
|
| + DVLOG(3) << "CreateGscInputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK_EQ(encoder_state_, kUninitialized);
|
| + DCHECK(!gsc_input_streamon_);
|
| +
|
| + struct v4l2_control control;
|
| + memset(&control, 0, sizeof(control));
|
| + control.id = V4L2_CID_ROTATE;
|
| + control.value = 0;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
|
| +
|
| + // HFLIP actually seems to control vertical mirroring for GSC, and vice-versa.
|
| + memset(&control, 0, sizeof(control));
|
| + control.id = V4L2_CID_HFLIP;
|
| + control.value = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
|
| +
|
| + memset(&control, 0, sizeof(control));
|
| + control.id = V4L2_CID_VFLIP;
|
| + control.value = 0;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
|
| +
|
| + memset(&control, 0, sizeof(control));
|
| + control.id = V4L2_CID_GLOBAL_ALPHA;
|
| + control.value = 255;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
|
| +
|
| + struct v4l2_format format;
|
| + memset(&format, 0, sizeof(format));
|
| + format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + format.fmt.pix_mp.width = input_allocated_size_.width();
|
| + format.fmt.pix_mp.height = input_allocated_size_.height();
|
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
|
| + format.fmt.pix_mp.plane_fmt[0].sizeimage =
|
| + input_allocated_size_.GetArea() * 4;
|
| + format.fmt.pix_mp.plane_fmt[0].bytesperline =
|
| + input_allocated_size_.width() * 4;
|
| + format.fmt.pix_mp.num_planes = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
|
| +
|
| + struct v4l2_crop crop;
|
| + memset(&crop, 0, sizeof(crop));
|
| + crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + crop.c.left = 0;
|
| + crop.c.top = 0;
|
| + crop.c.width = input_visible_size_.width();
|
| + crop.c.height = input_visible_size_.height();
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop);
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = kGscInputBufferCount;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
|
| +
|
| + DCHECK(gsc_input_buffer_map_.empty());
|
| + gsc_input_buffer_map_.resize(reqbufs.count);
|
| + for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[i];
|
| + if (do_encode_from_backbuffer_) {
|
| + // We have to export textures from the GSC input buffers so we can
|
| + // glCopyTexSubImage2D() to them.
|
| + struct v4l2_exportbuffer expbuf;
|
| + memset(&expbuf, 0, sizeof(expbuf));
|
| + expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + expbuf.index = i;
|
| + expbuf.plane = 0;
|
| + expbuf.flags = O_CLOEXEC;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_EXPBUF, &expbuf);
|
| + file_util::ScopedFD autofd(&expbuf.fd);
|
| +
|
| + EGLint attrs[13];
|
| + {
|
| + size_t j = 0;
|
| + attrs[j++] = EGL_WIDTH;
|
| + attrs[j++] = input_allocated_size_.width();
|
| + attrs[j++] = EGL_HEIGHT;
|
| + attrs[j++] = input_allocated_size_.height();
|
| + attrs[j++] = EGL_LINUX_DRM_FOURCC_EXT;
|
| + attrs[j++] = DRM_FORMAT_XRGB8888;
|
| + attrs[j++] = EGL_DMA_BUF_PLANE0_FD_EXT;
|
| + attrs[j++] = expbuf.fd;
|
| + attrs[j++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT;
|
| + attrs[j++] = 0;
|
| + attrs[j++] = EGL_DMA_BUF_PLANE0_PITCH_EXT;
|
| + attrs[j++] = input_allocated_size_.width() * 4;
|
| + attrs[j++] = EGL_NONE;
|
| + DCHECK_EQ(j, arraysize(attrs));
|
| + }
|
| + input_record.egl_image = eglCreateImageKHR(
|
| + egl_display_, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attrs);
|
| + if (input_record.egl_image == EGL_NO_IMAGE_KHR) {
|
| + DLOG(ERROR) << "CreateGscInputBuffers(): could not create EGLImageKHR";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return false;
|
| + }
|
| + glGenTextures(1, &input_record.texture_id);
|
| + if (input_record.texture_id == 0) {
|
| + DLOG(ERROR) << "CreateGscInputBuffers(): glGenTextures() failed";
|
| + NOTIFY_ERROR(kPlatformFailureError);
|
| + return false;
|
| + }
|
| + gfx::ScopedTextureBinder binder(GL_TEXTURE_2D, input_record.texture_id);
|
| + glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, input_record.egl_image);
|
| + }
|
| + gsc_free_input_buffers_.push_back(i);
|
| + }
|
| +
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::CreateGscOutputBuffers() {
|
| + DVLOG(3) << "CreateGscOutputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK_EQ(encoder_state_, kUninitialized);
|
| + DCHECK(!gsc_output_streamon_);
|
| +
|
| + struct v4l2_format format;
|
| + memset(&format, 0, sizeof(format));
|
| + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + format.fmt.pix_mp.width = converted_allocated_size_.width();
|
| + format.fmt.pix_mp.height = converted_allocated_size_.height();
|
| + if (do_output_encoding_) {
|
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M;
|
| + format.fmt.pix_mp.plane_fmt[0].sizeimage =
|
| + converted_allocated_size_.GetArea();
|
| + format.fmt.pix_mp.plane_fmt[1].sizeimage =
|
| + converted_allocated_size_.GetArea() / 2;
|
| + format.fmt.pix_mp.plane_fmt[0].bytesperline =
|
| + converted_allocated_size_.width();
|
| + format.fmt.pix_mp.plane_fmt[1].bytesperline =
|
| + converted_allocated_size_.width();
|
| + format.fmt.pix_mp.num_planes = 2;
|
| + } else {
|
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420M;
|
| + format.fmt.pix_mp.plane_fmt[0].sizeimage =
|
| + converted_allocated_size_.GetArea();
|
| + format.fmt.pix_mp.plane_fmt[1].sizeimage =
|
| + converted_allocated_size_.GetArea() / 4;
|
| + format.fmt.pix_mp.plane_fmt[2].sizeimage =
|
| + converted_allocated_size_.GetArea() / 4;
|
| + format.fmt.pix_mp.plane_fmt[0].bytesperline =
|
| + converted_allocated_size_.width();
|
| + format.fmt.pix_mp.plane_fmt[1].bytesperline =
|
| + converted_allocated_size_.width() / 2;
|
| + format.fmt.pix_mp.plane_fmt[2].bytesperline =
|
| + converted_allocated_size_.width() / 2;
|
| + format.fmt.pix_mp.num_planes = 3;
|
| + output_buffer_byte_size_ = output_visible_size_.GetArea() * 3 / 2;
|
| + }
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
|
| +
|
| + struct v4l2_crop crop;
|
| + memset(&crop, 0, sizeof(crop));
|
| + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + crop.c.left = 0;
|
| + crop.c.top = 0;
|
| + crop.c.width = output_visible_size_.width();
|
| + crop.c.height = output_visible_size_.height();
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CROP, &crop);
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = kGscOutputBufferCount;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + reqbufs.memory = (do_output_encoding_ ?
|
| + V4L2_MEMORY_DMABUF : V4L2_MEMORY_MMAP);
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
|
| +
|
| + DCHECK(gsc_output_buffer_map_.empty());
|
| + gsc_output_buffer_map_.resize(reqbufs.count);
|
| + for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
|
| + gsc_free_output_buffers_.push_back(i);
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[i];
|
| + if (!do_output_encoding_) {
|
| + // Query for the MEMORY_MMAP pointer.
|
| + struct v4l2_plane planes[arraysize(output_record.address)];
|
| + struct v4l2_buffer buffer;
|
| + memset(&buffer, 0, sizeof(buffer));
|
| + memset(planes, 0, sizeof(planes));
|
| + buffer.index = i;
|
| + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + buffer.memory = V4L2_MEMORY_MMAP;
|
| + buffer.m.planes = planes;
|
| + buffer.length = arraysize(output_record.address);
|
| + IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYBUF, &buffer);
|
| + for (size_t j = 0; j < arraysize(output_record.address); ++j) {
|
| + void* address = mmap(NULL, buffer.m.planes[j].length,
|
| + PROT_READ | PROT_WRITE, MAP_SHARED, gsc_fd_,
|
| + buffer.m.planes[j].m.mem_offset);
|
| + if (address == MAP_FAILED) {
|
| + DPLOG(ERROR) << "CreateGscOutputBuffers(): mmap() failed";
|
| + return false;
|
| + }
|
| + output_record.address[j] = address;
|
| + output_record.length[j] = buffer.m.planes[j].length;
|
| + }
|
| + }
|
| + }
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::SetMfcFormats() {
|
| + DVLOG(3) << "SetMfcFormats()";
|
| + DCHECK(!mfc_input_streamon_);
|
| + DCHECK(!mfc_output_streamon_);
|
| + DCHECK(do_output_encoding_);
|
| +
|
| + // VIDIOC_S_FMT on OUTPUT queue.
|
| + struct v4l2_format format;
|
| + memset(&format, 0, sizeof(format));
|
| + format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + format.fmt.pix_mp.width = converted_allocated_size_.width();
|
| + format.fmt.pix_mp.height = converted_allocated_size_.height();
|
| + format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M;
|
| + format.fmt.pix_mp.num_planes = 2;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
|
| + // We read direct from GSC, so we rely on the HW not changing our set
|
| + // size/stride.
|
| + DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].sizeimage,
|
| + static_cast<__u32>(converted_allocated_size_.GetArea()));
|
| + DCHECK_EQ(format.fmt.pix_mp.plane_fmt[0].bytesperline,
|
| + static_cast<__u32>(converted_allocated_size_.width()));
|
| + DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].sizeimage,
|
| + static_cast<__u32>(converted_allocated_size_.GetArea() / 2));
|
| + DCHECK_EQ(format.fmt.pix_mp.plane_fmt[1].bytesperline,
|
| + static_cast<__u32>(converted_allocated_size_.width()));
|
| +
|
| + struct v4l2_crop crop;
|
| + memset(&crop, 0, sizeof(crop));
|
| + crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
|
| + crop.c.left = 0;
|
| + crop.c.top = 0;
|
| + crop.c.width = converted_visible_size_.width();
|
| + crop.c.height = converted_visible_size_.height();
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CROP, &crop);
|
| +
|
| + // VIDIOC_S_FMT on CAPTURE queue.
|
| + output_buffer_byte_size_ = kMfcOutputBufferSize;
|
| + __u32 pixelformat = 0;
|
| + if (video_profile_ >= media::H264PROFILE_MIN &&
|
| + video_profile_ <= media::H264PROFILE_MAX) {
|
| + pixelformat = V4L2_PIX_FMT_H264;
|
| + }
|
| + memset(&format, 0, sizeof(format));
|
| + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + format.fmt.pix_mp.width = output_visible_size_.width();
|
| + format.fmt.pix_mp.height = output_visible_size_.height();
|
| + format.fmt.pix_mp.pixelformat = pixelformat;
|
| + format.fmt.pix_mp.plane_fmt[0].sizeimage = output_buffer_byte_size_;
|
| + format.fmt.pix_mp.num_planes = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
|
| +
|
| + struct v4l2_ext_control ctrls[7];
|
| + struct v4l2_ext_controls control;
|
| + memset(&ctrls, 0, sizeof(ctrls));
|
| + memset(&control, 0, sizeof(control));
|
| + ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES;
|
| + ctrls[0].value = 0;
|
| + ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE;
|
| + ctrls[1].value = 1;
|
| + ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF;
|
| + ctrls[2].value = 10;
|
| + ctrls[3].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
|
| + ctrls[3].value = 16;
|
| + ctrls[4].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
|
| + //ctrls[4].value = V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME;
|
| + ctrls[4].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE;
|
| + ctrls[5].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP;
|
| + ctrls[5].value = 51;
|
| + ctrls[6].id = V4L2_CID_MPEG_VIDEO_BITRATE;
|
| + ctrls[6].value = 2048000;
|
| + control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
|
| + control.count = arraysize(ctrls);
|
| + control.controls = ctrls;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control);
|
| +
|
| + struct v4l2_streamparm parms;
|
| + memset(&parms, 0, sizeof(parms));
|
| + parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + parms.parm.output.timeperframe.numerator = 1000;
|
| + parms.parm.output.timeperframe.denominator = 25;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_PARM, &parms);
|
| +
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::CreateMfcInputBuffers() {
|
| + DVLOG(3) << "CreateMfcInputBuffers()";
|
| + DCHECK(!mfc_input_streamon_);
|
| + DCHECK(do_output_encoding_);
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = 1;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
|
| +
|
| + DCHECK(mfc_input_buffer_map_.empty());
|
| + mfc_input_buffer_map_.resize(reqbufs.count);
|
| + for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
|
| + mfc_free_input_buffers_.push_back(i);
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[i];
|
| + for (size_t j = 0; j < 2; ++j) {
|
| + struct v4l2_exportbuffer expbuf;
|
| + memset(&expbuf, 0, sizeof(expbuf));
|
| + expbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + expbuf.index = i;
|
| + expbuf.plane = j;
|
| + expbuf.flags = O_CLOEXEC;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_EXPBUF, &expbuf);
|
| + input_record.fd[j] = expbuf.fd;
|
| + }
|
| + }
|
| + return true;
|
| +}
|
| +
|
| +bool ExynosVideoEncodeAccelerator::CreateMfcOutputBuffers() {
|
| + DVLOG(3) << "CreateMfcOutputBuffers()";
|
| + DCHECK(!mfc_output_streamon_);
|
| + DCHECK(do_output_encoding_);
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = kMfcOutputBufferCount;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
|
| +
|
| + DCHECK(mfc_output_buffer_map_.empty());
|
| + mfc_output_buffer_map_.resize(reqbufs.count);
|
| + for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
|
| + mfc_free_output_buffers_.push_back(i);
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[i];
|
| + // Query for the MEMORY_MMAP pointer.
|
| + struct v4l2_plane planes[1];
|
| + struct v4l2_buffer buffer;
|
| + memset(&buffer, 0, sizeof(buffer));
|
| + memset(planes, 0, sizeof(planes));
|
| + buffer.index = i;
|
| + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + buffer.memory = V4L2_MEMORY_MMAP;
|
| + buffer.m.planes = planes;
|
| + buffer.length = 1;
|
| + IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
|
| + void* address = mmap(NULL, buffer.m.planes[0].length,
|
| + PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
|
| + buffer.m.planes[0].m.mem_offset);
|
| + if (address == MAP_FAILED) {
|
| + DPLOG(ERROR) << "CreateMfcOutputBuffers(): mmap() failed";
|
| + return false;
|
| + }
|
| + output_record.address = address;
|
| + output_record.length = buffer.m.planes[0].length;
|
| + }
|
| + return true;
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::SetBitrate(int32 bitrate) {
|
| + DVLOG(3) << "SetBitrate(): bitrate=" << bitrate;
|
| + DCHECK(do_output_encoding_);
|
| +
|
| + struct v4l2_ext_control ctrls[1];
|
| + struct v4l2_ext_controls control;
|
| + memset(&ctrls, 0, sizeof(ctrls));
|
| + memset(&control, 0, sizeof(control));
|
| + ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE;
|
| + ctrls[0].value = bitrate;
|
| + control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
|
| + control.count = 1;
|
| + control.controls = ctrls;
|
| + IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_S_EXT_CTRLS, &control);
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DestroyGscInputBuffers() {
|
| + DVLOG(3) << "DestroyGscInputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK(!gsc_input_streamon_);
|
| +
|
| + for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
|
| + GscInputRecord& input_record = gsc_input_buffer_map_[i];
|
| + if (input_record.egl_sync != EGL_NO_SYNC_KHR)
|
| + eglDestroySyncKHR(egl_display_, input_record.egl_sync);
|
| + if (input_record.egl_image != EGL_NO_IMAGE_KHR)
|
| + eglDestroyImageKHR(egl_display_, input_record.egl_image);
|
| + if (input_record.texture_id != 0)
|
| + glDeleteTextures(1, &input_record.texture_id);
|
| + }
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = 0;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
|
| + DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
|
| +
|
| + gsc_input_buffer_map_.clear();
|
| + gsc_free_input_buffers_.clear();
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DestroyGscOutputBuffers() {
|
| + DVLOG(3) << "DestroyGscOutputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK(!gsc_output_streamon_);
|
| +
|
| + for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
|
| + GscOutputRecord& output_record = gsc_output_buffer_map_[i];
|
| + for (size_t j = 0; j < arraysize(output_record.address); ++j) {
|
| + if (output_record.address[j] != NULL) {
|
| + HANDLE_EINTR(munmap(output_record.address[j], output_record.length[j]));
|
| + }
|
| + }
|
| + }
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = 0;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + reqbufs.memory = do_output_encoding_ ? V4L2_MEMORY_DMABUF : V4L2_MEMORY_MMAP;
|
| + if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
|
| + DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
|
| +
|
| + gsc_output_buffer_map_.clear();
|
| + gsc_free_output_buffers_.clear();
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DestroyMfcInputBuffers() {
|
| + DVLOG(3) << "DestroyMfcInputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK(do_output_encoding_);
|
| + DCHECK(!mfc_input_streamon_);
|
| +
|
| + for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
|
| + MfcInputRecord& input_record = mfc_input_buffer_map_[i];
|
| + for (size_t j = 0; j < arraysize(input_record.fd); ++j) {
|
| + if (input_record.fd[j] != -1) {
|
| + HANDLE_EINTR(close(input_record.fd[j]));
|
| + }
|
| + }
|
| + }
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = 0;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
|
| + DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
|
| +
|
| + mfc_input_buffer_map_.clear();
|
| + mfc_free_input_buffers_.clear();
|
| +}
|
| +
|
| +void ExynosVideoEncodeAccelerator::DestroyMfcOutputBuffers() {
|
| + DVLOG(3) << "DestroyMfcOutputBuffers()";
|
| + DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
|
| + DCHECK(do_output_encoding_);
|
| + DCHECK(!mfc_output_streamon_);
|
| +
|
| + for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
|
| + MfcOutputRecord& output_record = mfc_output_buffer_map_[i];
|
| + if (output_record.address != NULL)
|
| + HANDLE_EINTR(munmap(output_record.address, output_record.length));
|
| + }
|
| +
|
| + struct v4l2_requestbuffers reqbufs;
|
| + memset(&reqbufs, 0, sizeof(reqbufs));
|
| + reqbufs.count = 0;
|
| + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
| + reqbufs.memory = V4L2_MEMORY_MMAP;
|
| + if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
|
| + DPLOG(ERROR) << "DestroyMfcOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
|
| +
|
| + mfc_output_buffer_map_.clear();
|
| + mfc_free_output_buffers_.clear();
|
| +}
|
| +
|
| +} // namespace content
|
|
|