Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(172)

Unified Diff: media/video/capture/linux/v4l2_video_capture_delegate.cc

Issue 967793002: Linux Video Capture: Add V4L2VideoCaptureDelegate{Single,Multi}Plane. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: magjed@ comments Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/video/capture/linux/v4l2_video_capture_delegate.cc
diff --git a/media/video/capture/linux/v4l2_video_capture_delegate.cc b/media/video/capture/linux/v4l2_video_capture_delegate.cc
new file mode 100644
index 0000000000000000000000000000000000000000..c1f5f7322080b62ac23b36417318201967f8e37e
--- /dev/null
+++ b/media/video/capture/linux/v4l2_video_capture_delegate.cc
@@ -0,0 +1,647 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/video/capture/linux/v4l2_video_capture_delegate.h"
+
+#include <poll.h>
+#include <sys/fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+
+#include "base/bind.h"
+#include "base/files/file_enumerator.h"
+#include "base/posix/eintr_wrapper.h"
+#include "base/strings/stringprintf.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/video/capture/linux/video_capture_device_linux.h"
+
+namespace media {
+
+// Max number of video buffers VideoCaptureDeviceLinux can allocate.
+const uint32_t kMaxVideoBuffers = 2;
+// Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw.
+const int kCaptureTimeoutMs = 200;
+// The number of continuous timeouts tolerated before treated as error.
+const int kContinuousTimeoutLimit = 10;
+// MJPEG is preferred if the requested width or height is larger than this.
+const int kMjpegWidth = 640;
+const int kMjpegHeight = 480;
+// Typical framerate, in fps
+const int kTypicalFramerate = 30;
+
+// V4L2 color formats supported by V4L2CaptureDelegateSinglePlane. This list is
+// ordered by precedence of use.
+static const uint32_t kSinglePlaneSupportedFormats[] = {
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY};
+
+// List of supported formats and their respective amount of sub-buffers for
+// V4L2CaptureDelegateMultiPlane.
+static const struct {
+ uint32_t fourcc;
+ size_t num_planes;
+} kMultiPlaneSupportedFormats[] = {
+ {V4L2_PIX_FMT_YUV420M, 3}
+ // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots.
+};
+
+// Returns the input fourcc as a std::string four char representation.
+static std::string FourccToString(uint32_t fourcc) {
+ return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF,
+ (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF);
+}
+
+static std::list<uint32_t> GetListOfUsableFourCcsSinglePlane() {
+ return std::list<uint32_t>(
+ kSinglePlaneSupportedFormats,
+ kSinglePlaneSupportedFormats + arraysize(kSinglePlaneSupportedFormats));
+}
+
+static size_t GetNumPlanesForFourCc(uint32_t fourcc) {
+ for (const auto& fourcc_and_pixel_format : kMultiPlaneSupportedFormats) {
+ if (fourcc_and_pixel_format.fourcc == fourcc)
+ return fourcc_and_pixel_format.num_planes;
+ }
+ NOTREACHED() << "Unknown fourcc " << FourccToString(fourcc);
+ return 0;
+}
+
+static std::list<uint32_t> GetListOfUsableFourCcsMultiPlane() {
+ std::list<uint32_t> supported_formats;
+ for (const auto& i : kMultiPlaneSupportedFormats)
+ supported_formats.push_back(i.fourcc);
+ return supported_formats;
+}
+
+// Class keeping track of SPLANE/MPLANE V4L2 buffers, mmap()ed on construction
+// and munmap()ed on destruction. Destruction is syntactically equal for
+// S/MPLANE but not construction, so this is implemented in derived classes.
+// Internally it has a vector of planes, which for SPLANE will contain only
+// one element.
+class V4L2VideoCaptureDelegate::BufferTracker
+ : public base::RefCounted<BufferTracker> {
+ public:
+ struct Plane {
Pawel Osciak 2015/03/13 09:52:52 This doesn't need to be public.
mcasas 2015/03/14 03:36:11 Done, see below.
+ void* start;
+ size_t length;
+ };
+ virtual bool Init(int fd, const v4l2_buffer& buffer) = 0;
Pawel Osciak 2015/03/13 09:52:53 Documentation please.
mcasas 2015/03/14 03:36:11 Done.
+
+ uint8_t* const GetPlaneStart(size_t plane) const {
+ return static_cast<uint8_t* const>(planes_[plane].start);
+ }
+ size_t GetPlaneLength(size_t plane) const { return planes_[plane].length; }
+
+ std::vector<Plane>& planes() { return planes_; }
Pawel Osciak 2015/03/13 09:52:52 As mentioned before, this shouldn't be public non-
mcasas 2015/03/14 03:36:11 (Cont'd from elsewhere) Since the only non-const o
+
+ protected:
+ friend class base::RefCounted<BufferTracker>;
+ virtual ~BufferTracker();
+
+ private:
+ std::vector<Plane> planes_;
+};
+
+// V4L2 specifics for SPLANE API.
+class V4L2CaptureDelegateSinglePlane final : public V4L2VideoCaptureDelegate {
+ public:
+ V4L2CaptureDelegateSinglePlane(
+ const VideoCaptureDevice::Name& device_name,
+ const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
+ int power_line_frequency)
+ : V4L2VideoCaptureDelegate(device_name,
+ v4l2_task_runner,
+ power_line_frequency) {}
+
+ private:
+ // BufferTracker derivation to implement construction semantics for SPLANE.
+ class BufferTrackerSPlane final : public BufferTracker {
+ public:
+ bool Init(int fd, const v4l2_buffer& buffer) override;
+
+ private:
+ ~BufferTrackerSPlane() override {};
Pawel Osciak 2015/03/13 09:52:52 s/;//
mcasas 2015/03/14 03:36:12 Done.
+ };
+
+ ~V4L2CaptureDelegateSinglePlane() override {};
Pawel Osciak 2015/03/13 09:52:53 s/;//
mcasas 2015/03/14 03:36:11 Done.
+
+ // V4L2VideoCaptureDelegate virtual methods implementation.
+ scoped_refptr<BufferTracker> CreateBufferTracker() override;
+ bool FillV4L2Format(v4l2_format* format,
+ uint32_t width,
+ uint32_t height,
+ uint32_t pixelformat_fourcc) override;
+ void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override;
+ void SendBuffer(const scoped_refptr<BufferTracker>& buffer) override;
+};
+
+// V4L2 specifics for MPLANE API.
+class V4L2CaptureDelegateMultiPlane final : public V4L2VideoCaptureDelegate {
+ public:
+ V4L2CaptureDelegateMultiPlane(
+ const VideoCaptureDevice::Name& device_name,
+ const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
+ int power_line_frequency)
+ : V4L2VideoCaptureDelegate(device_name,
+ v4l2_task_runner,
+ power_line_frequency),
+ fourcc_(0),
+ num_planes_(0) {}
+
+ private:
+ // BufferTracker derivation to implement construction semantics for MPLANE.
+ class BufferTrackerMPlane final : public BufferTracker {
+ public:
+ bool Init(int fd, const v4l2_buffer& buffer) override;
+
+ private:
+ ~BufferTrackerMPlane() override {};
Pawel Osciak 2015/03/13 09:52:53 s/;//
mcasas 2015/03/14 03:36:11 Done.
+ };
+
+ ~V4L2CaptureDelegateMultiPlane() override {};
Pawel Osciak 2015/03/13 09:52:53 s/;//
mcasas 2015/03/14 03:36:11 Done.
+
+ // V4L2VideoCaptureDelegate virtual methods implementation.
+ scoped_refptr<BufferTracker> CreateBufferTracker() override;
+ bool FillV4L2Format(v4l2_format* format,
+ uint32_t width,
+ uint32_t height,
+ uint32_t pixelformat_fourcc) override;
+ void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override;
+ void SendBuffer(const scoped_refptr<BufferTracker>& buffer) override;
+
+ // Actual pixel format and number of planes, known after FillV4L2Format().
+ uint32_t fourcc_;
Pawel Osciak 2015/03/13 09:52:52 We don't seem to be using it anywhere but in FillV
mcasas 2015/03/14 03:36:12 Well spotted, removed.
+ size_t num_planes_;
Pawel Osciak 2015/03/13 09:52:52 Better use v4l2_plane_.size() and remove this, als
mcasas 2015/03/14 03:36:11 I'm answering this in l.607 comments.
+
+ // Scoped_ptr to allocate and track as many v4l2_plane structs as planes,
+ // needed inside v4l2_buffer.
+ scoped_ptr<struct v4l2_plane[]> v4l2_plane_;
+};
+
+// static
+scoped_refptr<V4L2VideoCaptureDelegate>
+V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate(
+ const VideoCaptureDevice::Name& device_name,
+ const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
+ int power_line_frequency) {
+ switch (device_name.capture_api_type()) {
+ case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE:
+ return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane(
+ device_name, v4l2_task_runner, power_line_frequency));
+ case VideoCaptureDevice::Name::V4L2_MULTI_PLANE:
+ return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane(
+ device_name, v4l2_task_runner, power_line_frequency));
+ default:
+ NOTIMPLEMENTED() << "Unknown V4L2 capture API type";
+ return scoped_refptr<V4L2VideoCaptureDelegate>();
+ }
+}
+
+// static
+VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat(
+ uint32_t v4l2_fourcc) {
+ const struct {
+ uint32_t fourcc;
+ VideoPixelFormat pixel_format;
+ } kFourCcAndChromiumPixelFormats[] = {
+ {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420},
Pawel Osciak 2015/03/13 09:52:52 Could we please have one array instead of three (k
mcasas 2015/03/14 03:36:11 I think it mixes things that pertain to different
+ {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420},
+ {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2},
+ {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY},
+ {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG},
+ {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG},
+ };
+ for (const auto& fourcc_and_pixel_format : kFourCcAndChromiumPixelFormats) {
+ if (fourcc_and_pixel_format.fourcc == v4l2_fourcc)
+ return fourcc_and_pixel_format.pixel_format;
+ }
+ DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc);
+ return PIXEL_FORMAT_UNKNOWN;
+}
+
+// static
+std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCcs(
+ bool prefer_mjpeg) {
+ std::list<uint32_t> singleplane_formats = GetListOfUsableFourCcsSinglePlane();
+ std::list<uint32_t> multiplane_formats = GetListOfUsableFourCcsMultiPlane();
+ multiplane_formats.insert(multiplane_formats.end(),
+ singleplane_formats.begin(),
+ singleplane_formats.end());
+ // Add MJPEG to the front or the back of the list depending on |prefer_mjpeg|.
+ if (prefer_mjpeg)
+ multiplane_formats.insert(multiplane_formats.begin(), V4L2_PIX_FMT_MJPEG);
+ else
+ multiplane_formats.insert(multiplane_formats.end(), V4L2_PIX_FMT_MJPEG);
+
+ // JPEG works as MJPEG on some gspca webcams from field reports.
+ // Put it as the least preferred format.
+ multiplane_formats.push_back(V4L2_PIX_FMT_JPEG);
+
+ return multiplane_formats;
+}
+
+V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() {
+ for (const auto& plane : planes_) {
+ if (plane.start == NULL)
Pawel Osciak 2015/03/13 09:52:53 s/NULL/nullptr/
mcasas 2015/03/14 03:36:11 Done.
+ continue;
+ const int result = munmap(plane.start, plane.length);
+ PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer";
+ }
+}
+
+V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate(
+ const VideoCaptureDevice::Name& device_name,
+ const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
+ int power_line_frequency)
+ : capture_type_((device_name.capture_api_type() ==
+ VideoCaptureDevice::Name::V4L2_SINGLE_PLANE)
+ ? V4L2_BUF_TYPE_VIDEO_CAPTURE
+ : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE),
+ v4l2_task_runner_(v4l2_task_runner),
+ device_name_(device_name),
+ power_line_frequency_(power_line_frequency),
+ is_capturing_(false),
+ timeout_count_(0),
+ rotation_(0) {
+}
+
+void V4L2VideoCaptureDelegate::AllocateAndStart(
+ int width,
+ int height,
+ float frame_rate,
+ scoped_ptr<VideoCaptureDevice::Client> client) {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ DCHECK(client);
+ client_ = client.Pass();
+
+ // Need to open camera with O_RDWR after Linux kernel 3.3.
+ device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR)));
+ if (!device_fd_.is_valid()) {
+ SetErrorState("Failed to open V4L2 device driver file.");
+ return;
+ }
+
+ v4l2_capability cap = {};
+ if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
+ ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE ||
+ cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) {
+ device_fd_.reset();
+ SetErrorState("This is not a V4L2 video capture device");
+ return;
+ }
+
+ // Get supported video formats in preferred order.
+ // For large resolutions, favour mjpeg over raw formats.
+ const std::list<uint32_t>& desired_v4l2_formats =
+ GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight);
+ std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end();
+
+ v4l2_fmtdesc fmtdesc = {};
+ fmtdesc.type = capture_type_;
+ for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
+ ++fmtdesc.index) {
+ best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat);
+ }
+ if (best == desired_v4l2_formats.end()) {
+ SetErrorState("Failed to find a supported camera format.");
+ return;
+ }
+
+ DVLOG(1) << "Chosen pixel format is " << FourccToString(*best);
+
+ v4l2_format video_fmt = {};
+ video_fmt.type = capture_type_;
+ if (!FillV4L2Format(&video_fmt, width, height, *best)) {
+ SetErrorState("Failed filling in V4L2 Format");
+ return;
+ }
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) {
+ SetErrorState("Failed to set video capture format");
+ return;
+ }
+ const VideoPixelFormat pixel_format =
+ V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat);
+ if (pixel_format == PIXEL_FORMAT_UNKNOWN) {
+ SetErrorState("Unsupported pixel format");
+ return;
+ }
+
+ // Set capture framerate in the form of capture interval.
+ v4l2_streamparm streamparm = {};
+ streamparm.type = capture_type_;
+ // The following line checks that the driver knows about framerate get/set.
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) {
+ // Now check if the device is able to accept a capture framerate set.
+ if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
+ // |frame_rate| is float, approximate by a fraction.
+ streamparm.parm.capture.timeperframe.numerator =
+ media::kFrameRatePrecision;
+ streamparm.parm.capture.timeperframe.denominator =
+ (frame_rate) ? (frame_rate * media::kFrameRatePrecision)
+ : (kTypicalFramerate * media::kFrameRatePrecision);
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) <
+ 0) {
+ SetErrorState("Failed to set camera framerate");
+ return;
+ }
+ DVLOG(2) << "Actual camera driverframerate: "
+ << streamparm.parm.capture.timeperframe.denominator << "/"
+ << streamparm.parm.capture.timeperframe.numerator;
+ }
+ }
+ // TODO(mcasas): what should be done if the camera driver does not allow
+ // framerate configuration, or the actual one is different from the desired?
+
+ // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported
+ // operation (|errno| == EINVAL in this case) or plain failure.
+ if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) ||
+ (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ)) {
Pawel Osciak 2015/03/13 09:52:53 What if it's V4L2_CID_POWER_LINE_FREQUENCY_AUTO? W
mcasas 2015/03/14 03:36:11 Done.
+ struct v4l2_control control = {};
+ control.id = V4L2_CID_POWER_LINE_FREQUENCY;
+ control.value = power_line_frequency_;
+ HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control));
Pawel Osciak 2015/03/13 09:52:53 Better DVLOG on failure at least, even if not fata
mcasas 2015/03/14 03:36:11 Done.
+ }
+
+ capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width,
+ video_fmt.fmt.pix.height);
+ capture_format_.frame_rate = frame_rate;
+ capture_format_.pixel_format = pixel_format;
+
+ v4l2_requestbuffers r_buffer = {};
+ r_buffer.type = capture_type_;
+ r_buffer.memory = V4L2_MEMORY_MMAP;
+ r_buffer.count = kMaxVideoBuffers;
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) {
+ SetErrorState("Error requesting MMAP buffers from V4L2");
+ return;
+ }
+ DCHECK_EQ(r_buffer.count, kMaxVideoBuffers);
+ for (unsigned int i = 0; i < r_buffer.count; ++i) {
+ if (!AllocateVideoBuffer(i)) {
+ SetErrorState("Allocate buffer failed");
+ return;
+ }
+ }
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_))
+ < 0) {
+ SetErrorState("VIDIOC_STREAMON failed");
+ return;
+ }
+
+ is_capturing_ = true;
+ // Post task to start fetching frames from v4l2.
+ v4l2_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this));
+}
+
+void V4L2VideoCaptureDelegate::StopAndDeAllocate() {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ // The order is important: stop streaming, clear |buffer_pool_|,
+ // thus munmap()ing the v4l2_buffers, and then return them to the OS.
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_))
+ < 0) {
+ SetErrorState("VIDIOC_STREAMOFF failed");
+ return;
+ }
+
+ buffer_tracker_pool_.clear();
+
+ v4l2_requestbuffers r_buffer = {};
+ r_buffer.type = capture_type_;
+ r_buffer.memory = V4L2_MEMORY_MMAP;
+ r_buffer.count = 0;
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0)
+ SetErrorState("Failed to VIDIOC_REQBUFS with count = 0");
+
+ // At this point we can close the device.
+ // This is also needed for correctly changing settings later via VIDIOC_S_FMT.
+ device_fd_.reset();
+ is_capturing_ = false;
+ client_.reset();
+}
+
+void V4L2VideoCaptureDelegate::SetRotation(int rotation) {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
+ rotation_ = rotation;
+}
+
+bool V4L2VideoCaptureDelegate::AllocateVideoBuffer(int index) {
Pawel Osciak 2015/03/13 09:52:52 This is not allocating, but mapping and queuing on
mcasas 2015/03/14 03:36:11 Done.
+ v4l2_buffer buffer = {};
Pawel Osciak 2015/03/13 09:52:53 s/ = {};/;/ Fill...() already memsets it.
mcasas 2015/03/14 03:36:11 Done.
+ FillV4L2Buffer(&buffer, index);
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
+ DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer";
+ return false;
+ }
+
+ const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker();
+ if (!buffer_tracker->Init(device_fd_.get(), buffer)) {
+ DLOG(ERROR) << "Error creating BufferTracker";
+ return false;
+ }
+ buffer_tracker_pool_.push_back(buffer_tracker);
+
+ // Enqueue the buffer in the drivers incoming queue.
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
+ DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver";
+ return false;
+ }
+ return true;
+}
+
+void V4L2VideoCaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer,
+ int i) const {
+ memset(buffer, 0, sizeof(*buffer));
+ buffer->memory = V4L2_MEMORY_MMAP;
+ buffer->index = i;
+ FinishFillingV4L2Buffer(buffer);
+}
+
+void V4L2VideoCaptureDelegate::DoCapture() {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ if (!is_capturing_)
+ return;
+
+ pollfd device_pfd = {};
+ device_pfd.fd = device_fd_.get();
+ device_pfd.events = POLLIN;
+ const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs));
+ if (result < 0) {
+ SetErrorState("Poll failed");
+ return;
+ }
+ // Check if poll() timed out; track the amount of times it did in a row and
+ // throw an error if it times out too many times.
+ if (result == 0) {
+ timeout_count_++;
+ if (timeout_count_ >= kContinuousTimeoutLimit) {
+ SetErrorState("Multiple continuous timeouts while read-polling.");
+ timeout_count_ = 0;
+ return;
+ }
+ } else {
+ timeout_count_ = 0;
+ }
+
+ // Deenqueue, send and reenqueue a buffer if the driver has filled one in.
+ if (device_pfd.revents & POLLIN) {
+ v4l2_buffer buffer = {};
Pawel Osciak 2015/03/13 09:52:53 You could replace lines 496-500 with FillV4L2Buffe
mcasas 2015/03/14 03:36:11 Done.
+ buffer.type = capture_type_;
+ buffer.memory = V4L2_MEMORY_MMAP;
+ buffer.index = 0;
+ FinishFillingV4L2Buffer(&buffer);
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) {
+ SetErrorState("Failed to dequeue capture buffer");
+ return;
+ }
+
+ SendBuffer(buffer_tracker_pool_[buffer.index]);
+
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
+ SetErrorState("Failed to enqueue capture buffer");
+ return;
+ }
+ }
+
+ v4l2_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this));
+}
+
+void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ is_capturing_ = false;
+ client_->OnError(reason);
+}
+
+V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() {
+}
+
+scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker>
+V4L2CaptureDelegateSinglePlane::CreateBufferTracker() {
+ return make_scoped_refptr(new BufferTrackerSPlane());
+}
+
+bool V4L2CaptureDelegateSinglePlane::FillV4L2Format(
+ v4l2_format* format,
+ uint32_t width,
+ uint32_t height,
+ uint32_t pixelformat_fourcc) {
+ format->fmt.pix.width = width;
+ format->fmt.pix.height = height;
+ format->fmt.pix.pixelformat = pixelformat_fourcc;
+ return true;
+}
+
+void V4L2CaptureDelegateSinglePlane::FinishFillingV4L2Buffer(
+ v4l2_buffer* buffer) const {
+ buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+}
+
+void V4L2CaptureDelegateSinglePlane::SendBuffer(
+ const scoped_refptr<BufferTracker>& buffer) {
+ BufferTrackerSPlane* const buffer_tracker =
+ reinterpret_cast<BufferTrackerSPlane*>(buffer.get());
Pawel Osciak 2015/03/13 09:52:53 Downcasting shouldn't be needed. GetPlaneStart/Len
mcasas 2015/03/14 03:36:11 Done.
+ client()->OnIncomingCapturedData(
+ buffer_tracker->GetPlaneStart(0),
+ buffer_tracker->GetPlaneLength(0),
+ capture_format(),
+ rotation(),
+ base::TimeTicks::Now());
+}
+
+bool V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::Init(
+ int fd,
+ const v4l2_buffer& buffer) {
+ Plane plane;
+ // Some devices require mmap() to be called with both READ and WRITE.
+ // See http://crbug.com/178582.
+ plane.start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+ fd, buffer.m.offset);
+ if (plane.start == MAP_FAILED) {
+ DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
Pawel Osciak 2015/03/13 09:52:52 plane.start = nullptr;
mcasas 2015/03/14 03:36:12 Actually unneeded given the change in BufferTrack
+ return false;
+ }
+ plane.length = buffer.length;
+ planes().push_back(plane);
+ return true;
+}
+
+scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker>
+V4L2CaptureDelegateMultiPlane::CreateBufferTracker() {
+ return make_scoped_refptr(new BufferTrackerMPlane());
+}
+
+bool V4L2CaptureDelegateMultiPlane::FillV4L2Format(
+ v4l2_format* format,
+ uint32_t width,
+ uint32_t height,
+ uint32_t pixelformat_fourcc) {
+ format->fmt.pix_mp.width = width;
+ format->fmt.pix_mp.height = height;
+
+ fourcc_ = pixelformat_fourcc;
+ format->fmt.pix_mp.pixelformat = fourcc_;
+
+ num_planes_ = GetNumPlanesForFourCc(fourcc_);
+ if (num_planes_ == 0u)
+ return false;
+ DCHECK_LE(num_planes_, static_cast<unsigned long>(VIDEO_MAX_PLANES));
+ format->fmt.pix_mp.num_planes = num_planes_;
+
+ v4l2_plane_.reset(new v4l2_plane[num_planes_]);
+ return true;
+}
+
+void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer(
+ v4l2_buffer* buffer) const {
+ buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ buffer->length = num_planes_;
Pawel Osciak 2015/03/13 09:52:53 v4l2_plane_.size()
mcasas 2015/03/14 03:36:11 Actually it's not that simple. We need to have an
Pawel Osciak 2015/03/17 11:05:25 Oh, I forgot that you were using this as a scoped_
+ buffer->m.planes = v4l2_plane_.get();
+}
+
+void V4L2CaptureDelegateMultiPlane::SendBuffer(
+ const scoped_refptr<BufferTracker>& buffer) {
+ DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420);
+
+ BufferTrackerMPlane* const buffer_tracker =
+ reinterpret_cast<BufferTrackerMPlane*>(buffer.get());
Pawel Osciak 2015/03/13 09:52:53 Please remove downcast.
mcasas 2015/03/14 03:36:12 Done.
+ client()->OnIncomingCapturedYuvData(buffer_tracker->GetPlaneStart(0),
Pawel Osciak 2015/03/13 09:52:52 S_FMT may adjust values for plane sizes, resolutio
mcasas 2015/03/14 03:36:11 It's totally accepted that the device might, and o
Pawel Osciak 2015/03/17 11:05:25 My bad, I misread the code, you are actually using
+ buffer_tracker->GetPlaneStart(1),
+ buffer_tracker->GetPlaneStart(2),
+ buffer_tracker->GetPlaneLength(0),
+ buffer_tracker->GetPlaneLength(1),
+ buffer_tracker->GetPlaneLength(2),
+ capture_format(),
+ rotation(),
+ base::TimeTicks::Now());
+}
+
+bool V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::Init(
+ int fd,
+ const v4l2_buffer& buffer) {
+ for (size_t p = 0; p < buffer.length; ++p) {
+ Plane plane;
+ plane.start = mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE,
+ MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset);
+ if (plane.start == MAP_FAILED) {
+ DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
+ plane.start = nullptr;
+ return false;
+ }
+ plane.length = buffer.m.planes[p].length;
+ DVLOG(3) << "Mmap()ed plane #" << p << ", length " << plane.length << "B";
+ planes().push_back(plane);
+ }
+ return true;
+}
+
+} // namespace media

Powered by Google App Engine
This is Rietveld 408576698