OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/video/capture/linux/v4l2_video_capture_delegate.h" | |
6 | |
7 #include <poll.h> | |
8 #include <sys/fcntl.h> | |
9 #include <sys/ioctl.h> | |
10 #include <sys/mman.h> | |
11 | |
12 #include "base/bind.h" | |
13 #include "base/files/file_enumerator.h" | |
14 #include "base/posix/eintr_wrapper.h" | |
15 #include "base/strings/stringprintf.h" | |
16 #include "media/base/bind_to_current_loop.h" | |
17 #include "media/video/capture/linux/video_capture_device_linux.h" | |
18 | |
19 namespace media { | |
20 | |
21 // Max number of video buffers VideoCaptureDeviceLinux can allocate. | |
22 const uint32_t kMaxVideoBuffers = 2; | |
23 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | |
24 const int kCaptureTimeoutMs = 200; | |
25 // The number of continuous timeouts tolerated before treated as error. | |
26 const int kContinuousTimeoutLimit = 10; | |
27 // MJPEG is preferred if the requested width or height is larger than this. | |
28 const int kMjpegWidth = 640; | |
29 const int kMjpegHeight = 480; | |
30 // Typical framerate, in fps | |
31 const int kTypicalFramerate = 30; | |
32 | |
33 // V4L2 color formats supported by V4L2CaptureDelegateSinglePlane. This list is | |
34 // ordered by precedence of use. | |
35 static const uint32_t kSinglePlaneSupportedFormats[] = { | |
36 V4L2_PIX_FMT_YUV420, | |
37 V4L2_PIX_FMT_YUYV, | |
38 V4L2_PIX_FMT_UYVY, | |
39 // JPEG works as MJPEG on some gspca webcams from field reports. | |
40 V4L2_PIX_FMT_JPEG}; | |
41 | |
42 // List of supported formats and their respective amount of sub-buffers for | |
43 // V4L2CaptureDelegateMultiPlane. | |
44 static const struct { | |
45 uint32_t fourcc; | |
46 size_t num_planes; | |
47 } kMultiPlaneSupportedFormats[] = { | |
48 {V4L2_PIX_FMT_YUV420M, 3} | |
49 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. | |
50 }; | |
51 | |
52 // Returns the input fourcc as a std::string four char representation. | |
53 static std::string FourccToString(uint32_t fourcc) { | |
54 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
55 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
56 } | |
57 | |
58 static std::list<uint32_t> GetListOfUsableFourCcsSinglePlane() { | |
59 return std::list<uint32_t>( | |
60 kSinglePlaneSupportedFormats, | |
61 kSinglePlaneSupportedFormats + arraysize(kSinglePlaneSupportedFormats)); | |
62 } | |
63 | |
64 static size_t GetNumPlanesForFourCc(uint32_t fourcc) { | |
65 for (const auto& fourcc_and_pixel_format : kMultiPlaneSupportedFormats) { | |
66 if (fourcc_and_pixel_format.fourcc == fourcc) | |
67 return fourcc_and_pixel_format.num_planes; | |
68 } | |
69 NOTREACHED() << "Unknown fourcc " << FourccToString(fourcc); | |
70 return 0; | |
71 } | |
72 | |
73 static std::list<uint32_t> GetListOfUsableFourCcsMultiPlane() { | |
74 std::list<uint32_t> supported_formats; | |
75 for (const auto& i : kMultiPlaneSupportedFormats) | |
76 supported_formats.push_back(i.fourcc); | |
77 return supported_formats; | |
78 } | |
79 | |
80 // V4L2 specifics for SPLANE API. | |
81 class V4L2CaptureDelegateSinglePlane final : public V4L2VideoCaptureDelegate { | |
82 public: | |
83 V4L2CaptureDelegateSinglePlane( | |
84 const VideoCaptureDevice::Name& device_name, | |
85 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
86 int power_line_frequency) | |
87 : V4L2VideoCaptureDelegate(device_name, | |
88 v4l2_task_runner, | |
89 power_line_frequency) {} | |
90 | |
91 private: | |
92 // BufferTracker derivation to implement construction semantics for SPLANE. | |
93 class BufferTrackerSPlane final : public BufferTracker { | |
94 public: | |
95 BufferTrackerSPlane(int fd, const v4l2_buffer& buffer); | |
Pawel Osciak
2015/03/06 10:43:54
We need Initialize() methods as I mention elsewher
mcasas
2015/03/09 21:23:56
Done.
| |
96 | |
97 private: | |
98 ~BufferTrackerSPlane() override {}; | |
99 }; | |
100 | |
101 ~V4L2CaptureDelegateSinglePlane() override {}; | |
102 | |
103 // V4L2VideoCaptureDelegate virtual methods implementation. | |
104 scoped_refptr<BufferTracker> CreateBufferTracker( | |
105 int fd, | |
106 const v4l2_buffer& buffer) override; | |
107 void FillV4L2Format(v4l2_format* format, | |
108 uint32_t width, | |
109 uint32_t height, | |
110 uint32_t pixelformat_fourcc) override; | |
111 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) override {} | |
112 void SendBuffer(const v4l2_buffer& buffer) override; | |
113 }; | |
114 | |
115 // V4L2 specifics for MPLANE API. | |
116 class V4L2CaptureDelegateMultiPlane final : public V4L2VideoCaptureDelegate { | |
117 public: | |
118 V4L2CaptureDelegateMultiPlane( | |
119 const VideoCaptureDevice::Name& device_name, | |
120 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
121 int power_line_frequency) | |
122 : V4L2VideoCaptureDelegate(device_name, | |
123 v4l2_task_runner, | |
124 power_line_frequency), | |
125 fourcc_(0), | |
126 num_planes_(0) {} | |
127 | |
128 private: | |
129 // BufferTracker derivation to implement construction semantics for MPLANE. | |
130 class BufferTrackerMPlane final : public BufferTracker { | |
131 public: | |
132 BufferTrackerMPlane(int fd, const v4l2_buffer& buffer); | |
133 | |
134 private: | |
135 ~BufferTrackerMPlane() override {}; | |
136 }; | |
137 | |
138 ~V4L2CaptureDelegateMultiPlane() override {}; | |
139 | |
140 // V4L2VideoCaptureDelegate virtual methods implementation. | |
141 scoped_refptr<BufferTracker> CreateBufferTracker( | |
142 int fd, | |
143 const v4l2_buffer& buffer) override; | |
144 void FillV4L2Format(v4l2_format* format, | |
145 uint32_t width, | |
146 uint32_t height, | |
147 uint32_t pixelformat_fourcc) override; | |
148 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) override; | |
149 void SendBuffer(const v4l2_buffer& buffer) override; | |
150 | |
151 // Actual pixel format and number of planes, known after FillV4L2Format(). | |
152 uint32_t fourcc_; | |
153 size_t num_planes_; | |
154 | |
155 // Scoped_ptr to allocate and track as many v4l2_plane structs as planes, | |
156 // needed inside v4l2_buffer. | |
157 scoped_ptr<struct v4l2_plane[]> v4l2_plane_; | |
158 }; | |
159 | |
160 // static | |
161 scoped_refptr<V4L2VideoCaptureDelegate> | |
162 V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate( | |
163 const VideoCaptureDevice::Name& device_name, | |
164 const VideoPixelFormat pixel_format, | |
165 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
166 int power_line_frequency) { | |
167 switch (device_name.capture_api_type()) { | |
168 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: | |
169 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( | |
170 device_name, v4l2_task_runner, power_line_frequency)); | |
171 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: | |
172 if (pixel_format != PIXEL_FORMAT_I420) | |
Pawel Osciak
2015/03/06 10:43:54
This may bitrot. Please instead look up pixel_form
mcasas
2015/03/09 21:23:56
I think this early-bail-out is not really needed
a
Pawel Osciak
2015/03/13 09:52:52
Acknowledged.
| |
173 return scoped_refptr<V4L2VideoCaptureDelegate>(); | |
174 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( | |
175 device_name, v4l2_task_runner, power_line_frequency)); | |
176 default: | |
177 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; | |
178 return scoped_refptr<V4L2VideoCaptureDelegate>(); | |
179 } | |
180 } | |
181 | |
182 // static | |
183 VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat( | |
184 uint32_t v4l2_fourcc) { | |
185 const struct { | |
186 uint32_t fourcc; | |
187 VideoPixelFormat pixel_format; | |
188 } kFourCcAndChromiumPixelFormats[] = { | |
189 {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420}, | |
190 {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420}, | |
191 {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2}, | |
192 {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY}, | |
193 {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG}, | |
194 {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG}, | |
195 }; | |
196 for (const auto& fourcc_and_pixel_format : kFourCcAndChromiumPixelFormats) { | |
197 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) | |
198 return fourcc_and_pixel_format.pixel_format; | |
199 } | |
200 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); | |
201 return PIXEL_FORMAT_UNKNOWN; | |
202 } | |
203 | |
204 // static | |
205 std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCss( | |
emircan
2015/03/04 02:47:47
/s/FourCss/FourCcs/
mcasas
2015/03/09 21:23:56
Done.
| |
206 bool prefer_mjpeg) { | |
207 std::list<uint32_t> singleplane_formats = GetListOfUsableFourCcsSinglePlane(); | |
208 std::list<uint32_t> multiplane_formats = GetListOfUsableFourCcsMultiPlane(); | |
209 multiplane_formats.insert(multiplane_formats.end(), | |
210 singleplane_formats.begin(), | |
211 singleplane_formats.end()); | |
212 // Add MJPEG to the front or the back of the list depending on |prefer_mjpeg|. | |
213 multiplane_formats.insert( | |
Pawel Osciak
2015/03/06 10:43:54
if (prefer_mjpeg)
multiplane_formats.push_front(
mcasas
2015/03/09 21:23:56
Done.
| |
214 (prefer_mjpeg ? multiplane_formats.begin() : multiplane_formats.end()), | |
215 V4L2_PIX_FMT_MJPEG); | |
Pawel Osciak
2015/03/06 10:43:55
This is different from existing code, which always
mcasas
2015/03/09 21:23:56
Done.
| |
216 return multiplane_formats; | |
217 } | |
218 | |
219 V4L2VideoCaptureDelegate::BufferTracker::BufferTracker( | |
Pawel Osciak
2015/03/06 10:43:55
This is not needed?
mcasas
2015/03/09 21:23:56
Needed due to:
[chromium-style] Complex class/str
| |
220 int fd, | |
221 const v4l2_buffer& buffer) { | |
222 } | |
223 | |
224 V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() { | |
225 for (const auto& plane : planes_) { | |
226 if (plane->start == NULL) | |
227 continue; | |
228 const int result = munmap(plane->start, plane->length); | |
229 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
230 } | |
231 } | |
232 | |
233 V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate( | |
234 const VideoCaptureDevice::Name& device_name, | |
235 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
236 int power_line_frequency) | |
237 : capture_type_((device_name.capture_api_type() == | |
238 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) | |
239 ? V4L2_BUF_TYPE_VIDEO_CAPTURE | |
240 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), | |
241 v4l2_task_runner_(v4l2_task_runner), | |
242 device_name_(device_name), | |
243 power_line_frequency_(power_line_frequency), | |
244 is_capturing_(false), | |
245 timeout_count_(0), | |
246 rotation_(0) { | |
247 } | |
248 | |
249 void V4L2VideoCaptureDelegate::AllocateAndStart( | |
250 int width, | |
251 int height, | |
252 float frame_rate, | |
253 scoped_ptr<VideoCaptureDevice::Client> client) { | |
254 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
255 DCHECK(client); | |
256 client_ = client.Pass(); | |
257 | |
258 // Need to open camera with O_RDWR after Linux kernel 3.3. | |
259 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | |
260 if (!device_fd_.is_valid()) { | |
261 SetErrorState("Failed to open V4L2 device driver file."); | |
262 return; | |
263 } | |
264 | |
265 v4l2_capability cap = {}; | |
266 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | |
267 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || | |
268 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && | |
269 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && | |
270 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { | |
271 device_fd_.reset(); | |
272 SetErrorState("This is not a V4L2 video capture device"); | |
273 return; | |
274 } | |
275 | |
276 // Get supported video formats in preferred order. | |
277 // For large resolutions, favour mjpeg over raw formats. | |
278 const std::list<uint32_t>& desired_v4l2_formats = | |
279 GetListOfUsableFourCss(width > kMjpegWidth || height > kMjpegHeight); | |
280 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); | |
281 | |
282 v4l2_fmtdesc fmtdesc = {}; | |
283 fmtdesc.type = capture_type_; | |
284 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | |
285 ++fmtdesc.index) { | |
286 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | |
Pawel Osciak
2015/03/06 10:43:55
Maybe I'm missing something, but I think this will
mcasas
2015/03/09 21:23:56
According to the fact that it works and the list
o
Pawel Osciak
2015/03/13 09:52:52
Acknowledged and true. Sorry.
| |
287 } | |
emircan
2015/03/04 02:47:47
You can end the iteration when (best == desired_v4
mcasas
2015/03/09 21:23:56
That would be an overoptimization, wouldn't it?
M
| |
288 if (best == desired_v4l2_formats.end()) { | |
289 SetErrorState("Failed to find a supported camera format."); | |
290 return; | |
291 } | |
292 | |
293 DVLOG(1) << "chosen pixel format is " << FourccToString(*best); | |
294 | |
295 v4l2_format video_fmt = {}; | |
296 video_fmt.type = capture_type_; | |
297 FillV4L2Format(&video_fmt, width, height, *best); | |
298 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { | |
299 SetErrorState("Failed to set video capture format"); | |
300 return; | |
301 } | |
302 | |
303 // Set capture framerate in the form of capture interval. | |
304 v4l2_streamparm streamparm = {}; | |
305 streamparm.type = capture_type_; | |
306 // The following line checks that the driver knows about framerate get/set. | |
307 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | |
308 // Now check if the device is able to accept a capture framerate set. | |
309 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | |
310 // |frame_rate| is float, approximate by a fraction. | |
311 streamparm.parm.capture.timeperframe.numerator = | |
312 media::kFrameRatePrecision; | |
313 streamparm.parm.capture.timeperframe.denominator = | |
314 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) | |
315 : (kTypicalFramerate * media::kFrameRatePrecision); | |
316 | |
317 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < | |
318 0) { | |
319 SetErrorState("Failed to set camera framerate"); | |
320 return; | |
321 } | |
322 DVLOG(2) << "Actual camera driverframerate: " | |
323 << streamparm.parm.capture.timeperframe.denominator << "/" | |
324 << streamparm.parm.capture.timeperframe.numerator; | |
325 } | |
326 } | |
327 // TODO(mcasas): what should be done if the camera driver does not allow | |
328 // framerate configuration, or the actual one is different from the desired? | |
329 | |
330 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported | |
331 // operation (|errno| == EINVAL in this case) or plain failure. | |
332 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || | |
333 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ)) { | |
334 struct v4l2_control control = {}; | |
335 control.id = V4L2_CID_POWER_LINE_FREQUENCY; | |
336 control.value = power_line_frequency_; | |
337 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | |
338 } | |
339 | |
340 capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, | |
341 video_fmt.fmt.pix.height); | |
342 capture_format_.frame_rate = frame_rate; | |
343 capture_format_.pixel_format = | |
344 V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); | |
Pawel Osciak
2015/03/06 10:43:55
This needs to be checked for success, we don't wan
mcasas
2015/03/09 21:23:56
Done, but moved a bit farther above.
| |
345 | |
346 if (!AllocateVideoBuffers()) { | |
347 SetErrorState("Allocate buffer failed"); | |
348 return; | |
349 } | |
350 | |
351 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) | |
352 < 0) { | |
353 SetErrorState("VIDIOC_STREAMON failed"); | |
354 return; | |
355 } | |
356 | |
357 is_capturing_ = true; | |
358 // Post task to start fetching frames from v4l2. | |
359 v4l2_task_runner_->PostTask( | |
360 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
361 } | |
362 | |
363 void V4L2VideoCaptureDelegate::StopAndDeAllocate() { | |
364 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
365 // The order is important: stop streaming, clear |buffer_pool_|, | |
366 // thus munmap()ing the v4l2_buffers, and then return them to the OS. | |
367 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) | |
368 < 0) { | |
369 SetErrorState("VIDIOC_STREAMOFF failed"); | |
370 return; | |
371 } | |
372 | |
373 buffer_tracker_pool_.clear(); | |
374 | |
375 v4l2_requestbuffers r_buffer = {}; | |
376 r_buffer.type = capture_type_; | |
377 r_buffer.memory = V4L2_MEMORY_MMAP; | |
378 r_buffer.count = 0; | |
379 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | |
380 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); | |
381 | |
382 // At this point we can close the device. | |
383 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. | |
384 device_fd_.reset(); | |
385 is_capturing_ = false; | |
386 client_.reset(); | |
387 } | |
388 | |
389 void V4L2VideoCaptureDelegate::SetRotation(int rotation) { | |
390 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
391 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | |
392 rotation_ = rotation; | |
393 } | |
394 | |
395 bool V4L2VideoCaptureDelegate::AllocateVideoBuffers() { | |
396 v4l2_requestbuffers r_buffer = {}; | |
397 r_buffer.type = capture_type_; | |
398 r_buffer.memory = V4L2_MEMORY_MMAP; | |
399 r_buffer.count = kMaxVideoBuffers; | |
400 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | |
401 DLOG(ERROR) << "Error requesting MMAP buffers from V4L2"; | |
402 return false; | |
403 } | |
404 DCHECK_EQ(r_buffer.count, kMaxVideoBuffers); | |
405 r_buffer.count = std::min(r_buffer.count, kMaxVideoBuffers); | |
406 for (unsigned int i = 0; i < r_buffer.count; ++i) { | |
407 v4l2_buffer buffer = {}; | |
408 buffer.type = capture_type_; | |
409 buffer.memory = V4L2_MEMORY_MMAP; | |
410 buffer.index = i; | |
411 FinishFillingV4L2Buffer(&buffer); | |
412 | |
413 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | |
414 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | |
415 return false; | |
416 } | |
Pawel Osciak
2015/03/06 10:43:54
As described in h, this all could go to CreateBuff
mcasas
2015/03/09 21:23:56
Acknowledged.
| |
417 | |
418 buffer_tracker_pool_.push_back(CreateBufferTracker(device_fd_.get(), | |
419 buffer)); | |
420 | |
421 // Enqueue the buffer in the drivers incoming queue. | |
422 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
423 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; | |
424 return false; | |
425 } | |
426 } | |
427 return true; | |
428 } | |
429 | |
430 void V4L2VideoCaptureDelegate::DoCapture() { | |
431 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
432 if (!is_capturing_) | |
433 return; | |
434 | |
435 pollfd device_pfd = {}; | |
436 device_pfd.fd = device_fd_.get(); | |
437 device_pfd.events = POLLIN; | |
438 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | |
439 if (result < 0) { | |
440 SetErrorState("Poll failed"); | |
441 return; | |
442 } | |
443 // Check if poll() timed out; track the amount of times it did in a row and | |
444 // throw an error if it times out too many times. | |
445 if (result == 0) { | |
446 timeout_count_++; | |
447 if (timeout_count_ >= kContinuousTimeoutLimit) { | |
448 SetErrorState("Multiple continuous timeouts while read-polling."); | |
449 timeout_count_ = 0; | |
450 return; | |
451 } | |
452 } else { | |
453 timeout_count_ = 0; | |
454 } | |
455 | |
456 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. | |
457 if (device_pfd.revents & POLLIN) { | |
458 v4l2_buffer buffer = {}; | |
459 buffer.type = capture_type_; | |
460 buffer.memory = V4L2_MEMORY_MMAP; | |
461 buffer.index = 0; | |
462 FinishFillingV4L2Buffer(&buffer); | |
Pawel Osciak
2015/03/06 10:43:54
This would just be FillV4L2Buffer().
mcasas
2015/03/09 21:23:56
Done.
| |
463 | |
464 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | |
465 SetErrorState("Failed to dequeue capture buffer"); | |
466 return; | |
467 } | |
468 | |
469 SendBuffer(buffer); | |
470 | |
471 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
472 SetErrorState("Failed to enqueue capture buffer"); | |
473 return; | |
474 } | |
475 } | |
476 | |
477 v4l2_task_runner_->PostTask( | |
478 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
479 } | |
480 | |
481 void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) { | |
482 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
483 is_capturing_ = false; | |
484 client_->OnError(reason); | |
485 } | |
486 | |
487 V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() { | |
488 } | |
489 | |
490 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
491 V4L2CaptureDelegateSinglePlane::CreateBufferTracker(int fd, | |
492 const v4l2_buffer& buffer) { | |
493 return make_scoped_refptr(new BufferTrackerSPlane(fd, buffer)); | |
494 } | |
495 | |
496 void V4L2CaptureDelegateSinglePlane::FillV4L2Format( | |
497 v4l2_format* format, | |
498 uint32_t width, | |
499 uint32_t height, | |
500 uint32_t pixelformat_fourcc) { | |
501 format->fmt.pix.width = width; | |
502 format->fmt.pix.height = height; | |
503 format->fmt.pix.pixelformat = pixelformat_fourcc; | |
504 } | |
505 | |
506 void V4L2CaptureDelegateSinglePlane::SendBuffer(const v4l2_buffer& buffer) { | |
507 BufferTrackerSPlane* const buffer_tracker = | |
508 reinterpret_cast<BufferTrackerSPlane*>( | |
509 buffer_tracker_pool()[buffer.index].get()); | |
510 DCHECK_EQ(buffer_tracker->planes()[0]->length, buffer.length); | |
511 client()->OnIncomingCapturedData( | |
512 static_cast<uint8*>(buffer_tracker->planes()[0]->start), | |
513 buffer_tracker->planes()[0]->length, | |
514 capture_format(), | |
515 rotation(), | |
516 base::TimeTicks::Now()); | |
517 } | |
518 | |
519 V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::BufferTrackerSPlane( | |
520 int fd, | |
521 const v4l2_buffer& buffer) | |
522 : BufferTracker(fd, buffer) { | |
523 scoped_ptr<Plane> plane(new Plane()); | |
524 // Some devices require mmap() to be called with both READ and WRITE. | |
525 // See http://crbug.com/178582. | |
526 plane->start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, | |
527 fd, buffer.m.offset); | |
528 if (plane->start == MAP_FAILED) { | |
529 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
530 return; | |
531 } | |
532 plane->length = buffer.length; | |
533 planes().push_back(plane.Pass()); | |
534 } | |
535 | |
536 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
537 V4L2CaptureDelegateMultiPlane::CreateBufferTracker(int fd, | |
538 const v4l2_buffer& buffer) { | |
539 DCHECK_LE(buffer.length, num_planes_); | |
540 return make_scoped_refptr(new BufferTrackerMPlane(fd, buffer)); | |
541 } | |
542 | |
543 void V4L2CaptureDelegateMultiPlane::FillV4L2Format( | |
544 v4l2_format* format, | |
545 uint32_t width, | |
546 uint32_t height, | |
547 uint32_t pixelformat_fourcc) { | |
548 format->fmt.pix_mp.width = width; | |
549 format->fmt.pix_mp.height = height; | |
550 | |
551 fourcc_ = pixelformat_fourcc; | |
552 format->fmt.pix_mp.pixelformat = fourcc_; | |
553 | |
554 num_planes_ = GetNumPlanesForFourCc(fourcc_); | |
555 DCHECK_GT(num_planes_, 0u); | |
556 DCHECK_LE(num_planes_, static_cast<unsigned long>(VIDEO_MAX_PLANES)); | |
557 format->fmt.pix_mp.num_planes = num_planes_; | |
558 | |
559 v4l2_plane_.reset(new v4l2_plane[num_planes_]); | |
Pawel Osciak
2015/03/06 10:43:55
We still don't want to call new foo[0] if GetNumPl
mcasas
2015/03/09 21:23:56
Done. I return a bool (false) if this goes south
a
| |
560 } | |
561 | |
562 void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer( | |
563 v4l2_buffer* buffer) { | |
564 buffer->length = num_planes_; | |
565 buffer->m.planes = v4l2_plane_.get(); | |
566 } | |
567 | |
568 void V4L2CaptureDelegateMultiPlane::SendBuffer(const v4l2_buffer& buffer) { | |
569 DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420); | |
570 | |
571 BufferTrackerMPlane* const buffer_tracker = | |
572 reinterpret_cast<BufferTrackerMPlane*>( | |
573 buffer_tracker_pool()[buffer.index].get()); | |
574 | |
575 client()->OnIncomingCapturedYuvData( | |
576 static_cast<uint8*>(buffer_tracker->planes()[0]->start), | |
577 static_cast<uint8*>(buffer_tracker->planes()[1]->start), | |
578 static_cast<uint8*>(buffer_tracker->planes()[2]->start), | |
579 buffer_tracker->planes()[0]->length, | |
580 buffer_tracker->planes()[1]->length, | |
581 buffer_tracker->planes()[2]->length, | |
582 capture_format(), | |
583 rotation(), | |
584 base::TimeTicks::Now()); | |
585 } | |
586 | |
587 V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::BufferTrackerMPlane( | |
588 int fd, | |
589 const v4l2_buffer& buffer) | |
590 : BufferTracker(fd, buffer) { | |
591 for (size_t p = 0; p < buffer.length; ++p) { | |
592 scoped_ptr<Plane> plane(new Plane()); | |
593 plane->start = mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE, | |
594 MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset); | |
595 if (plane->start == MAP_FAILED) { | |
596 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
597 return; | |
Pawel Osciak
2015/03/06 10:43:55
This way you'd still use the plane as normal if th
mcasas
2015/03/09 21:23:56
Done.
| |
598 } | |
599 plane->length = buffer.m.planes[p].length; | |
600 DVLOG(3) << "Mmap()ed plane #" << p << ", length " << plane->length << "B"; | |
601 planes().push_back(plane.Pass()); | |
602 } | |
603 } | |
604 | |
605 } // namespace media | |
OLD | NEW |