Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(290)

Side by Side Diff: content/browser/renderer_host/media/video_capture_controller.cc

Issue 967793002: Linux Video Capture: Add V4L2VideoCaptureDelegate{Single,Multi}Plane. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/browser/renderer_host/media/video_capture_controller.h" 5 #include "content/browser/renderer_host/media/video_capture_controller.h"
6 6
7 #include <map> 7 #include <map>
8 #include <set> 8 #include <set>
9 9
10 #include "base/bind.h" 10 #include "base/bind.h"
(...skipping 10 matching lines...) Expand all
21 #include "media/base/video_frame.h" 21 #include "media/base/video_frame.h"
22 #include "media/base/video_util.h" 22 #include "media/base/video_util.h"
23 #include "media/base/yuv_convert.h" 23 #include "media/base/yuv_convert.h"
24 #include "third_party/libyuv/include/libyuv.h" 24 #include "third_party/libyuv/include/libyuv.h"
25 25
26 #if !defined(OS_ANDROID) 26 #if !defined(OS_ANDROID)
27 #include "content/browser/compositor/image_transport_factory.h" 27 #include "content/browser/compositor/image_transport_factory.h"
28 #endif 28 #endif
29 29
30 using media::VideoCaptureFormat; 30 using media::VideoCaptureFormat;
31 using media::VideoFrame;
31 32
32 namespace content { 33 namespace content {
33 34
34 namespace { 35 namespace {
35 36
36 static const int kInfiniteRatio = 99999; 37 static const int kInfiniteRatio = 99999;
37 38
38 #define UMA_HISTOGRAM_ASPECT_RATIO(name, width, height) \ 39 #define UMA_HISTOGRAM_ASPECT_RATIO(name, width, height) \
39 UMA_HISTOGRAM_SPARSE_SLOWLY( \ 40 UMA_HISTOGRAM_SPARSE_SLOWLY( \
40 name, \ 41 name, \
(...skipping 19 matching lines...) Expand all
60 61
61 private: 62 private:
62 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } 63 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); }
63 64
64 const scoped_refptr<VideoCaptureBufferPool> pool_; 65 const scoped_refptr<VideoCaptureBufferPool> pool_;
65 const int id_; 66 const int id_;
66 void* const data_; 67 void* const data_;
67 const size_t size_; 68 const size_t size_;
68 }; 69 };
69 70
70 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { 71 class SyncPointClientImpl : public VideoFrame::SyncPointClient {
71 public: 72 public:
72 explicit SyncPointClientImpl(GLHelper* gl_helper) : gl_helper_(gl_helper) {} 73 explicit SyncPointClientImpl(GLHelper* gl_helper) : gl_helper_(gl_helper) {}
73 ~SyncPointClientImpl() override {} 74 ~SyncPointClientImpl() override {}
74 uint32 InsertSyncPoint() override { return gl_helper_->InsertSyncPoint(); } 75 uint32 InsertSyncPoint() override { return gl_helper_->InsertSyncPoint(); }
75 void WaitSyncPoint(uint32 sync_point) override { 76 void WaitSyncPoint(uint32 sync_point) override {
76 gl_helper_->WaitSyncPoint(sync_point); 77 gl_helper_->WaitSyncPoint(sync_point);
77 } 78 }
78 79
79 private: 80 private:
80 GLHelper* gl_helper_; 81 GLHelper* gl_helper_;
81 }; 82 };
82 83
83 void ReturnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, 84 void ReturnVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
84 uint32 sync_point) { 85 uint32 sync_point) {
85 DCHECK_CURRENTLY_ON(BrowserThread::UI); 86 DCHECK_CURRENTLY_ON(BrowserThread::UI);
86 #if defined(OS_ANDROID) 87 #if defined(OS_ANDROID)
87 NOTREACHED(); 88 NOTREACHED();
88 #else 89 #else
89 GLHelper* gl_helper = ImageTransportFactory::GetInstance()->GetGLHelper(); 90 GLHelper* gl_helper = ImageTransportFactory::GetInstance()->GetGLHelper();
90 // UpdateReleaseSyncPoint() creates a new sync_point using |gl_helper|, so 91 // UpdateReleaseSyncPoint() creates a new sync_point using |gl_helper|, so
91 // wait the given |sync_point| using |gl_helper|. 92 // wait the given |sync_point| using |gl_helper|.
92 if (gl_helper) { 93 if (gl_helper) {
93 gl_helper->WaitSyncPoint(sync_point); 94 gl_helper->WaitSyncPoint(sync_point);
(...skipping 28 matching lines...) Expand all
122 // Handle to the render process that will receive the capture buffers. 123 // Handle to the render process that will receive the capture buffers.
123 const base::ProcessHandle render_process_handle; 124 const base::ProcessHandle render_process_handle;
124 const media::VideoCaptureSessionId session_id; 125 const media::VideoCaptureSessionId session_id;
125 const media::VideoCaptureParams parameters; 126 const media::VideoCaptureParams parameters;
126 127
127 // Buffers that are currently known to this client. 128 // Buffers that are currently known to this client.
128 std::set<int> known_buffers; 129 std::set<int> known_buffers;
129 130
130 // Buffers currently held by this client, and syncpoint callback to call when 131 // Buffers currently held by this client, and syncpoint callback to call when
131 // they are returned from the client. 132 // they are returned from the client.
132 typedef std::map<int, scoped_refptr<media::VideoFrame> > ActiveBufferMap; 133 typedef std::map<int, scoped_refptr<VideoFrame> > ActiveBufferMap;
133 ActiveBufferMap active_buffers; 134 ActiveBufferMap active_buffers;
134 135
135 // State of capture session, controlled by VideoCaptureManager directly. This 136 // State of capture session, controlled by VideoCaptureManager directly. This
136 // transitions to true as soon as StopSession() occurs, at which point the 137 // transitions to true as soon as StopSession() occurs, at which point the
137 // client is sent an OnEnded() event. However, because the client retains a 138 // client is sent an OnEnded() event. However, because the client retains a
138 // VideoCaptureController* pointer, its ControllerClient entry lives on until 139 // VideoCaptureController* pointer, its ControllerClient entry lives on until
139 // it unregisters itself via RemoveClient(), which may happen asynchronously. 140 // it unregisters itself via RemoveClient(), which may happen asynchronously.
140 // 141 //
141 // TODO(nick): If we changed the semantics of VideoCaptureHost so that 142 // TODO(nick): If we changed the semantics of VideoCaptureHost so that
142 // OnEnded() events were processed synchronously (with the RemoveClient() done 143 // OnEnded() events were processed synchronously (with the RemoveClient() done
(...skipping 19 matching lines...) Expand all
162 public: 163 public:
163 explicit VideoCaptureDeviceClient( 164 explicit VideoCaptureDeviceClient(
164 const base::WeakPtr<VideoCaptureController>& controller, 165 const base::WeakPtr<VideoCaptureController>& controller,
165 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool); 166 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool);
166 ~VideoCaptureDeviceClient() override; 167 ~VideoCaptureDeviceClient() override;
167 168
168 // VideoCaptureDevice::Client implementation. 169 // VideoCaptureDevice::Client implementation.
169 void OnIncomingCapturedData(const uint8* data, 170 void OnIncomingCapturedData(const uint8* data,
170 int length, 171 int length,
171 const VideoCaptureFormat& frame_format, 172 const VideoCaptureFormat& frame_format,
172 int rotation, 173 int clockwise_rotation,
173 const base::TimeTicks& timestamp) override; 174 const base::TimeTicks& timestamp) override;
174 scoped_refptr<Buffer> ReserveOutputBuffer(media::VideoFrame::Format format, 175 void OnIncomingCapturedYuvData(uint8* y_data,
176 uint8* u_data,
177 uint8* v_data,
178 int y_length,
179 int u_length,
180 int v_length,
181 const VideoCaptureFormat& frame_format,
182 int clockwise_rotation,
183 const base::TimeTicks& timestamp) override;
184 scoped_refptr<Buffer> ReserveOutputBuffer(VideoFrame::Format format,
175 const gfx::Size& size) override; 185 const gfx::Size& size) override;
176 void OnIncomingCapturedVideoFrame( 186 void OnIncomingCapturedVideoFrame(
177 const scoped_refptr<Buffer>& buffer, 187 const scoped_refptr<Buffer>& buffer,
178 const scoped_refptr<media::VideoFrame>& frame, 188 const scoped_refptr<VideoFrame>& frame,
179 const base::TimeTicks& timestamp) override; 189 const base::TimeTicks& timestamp) override;
180 void OnError(const std::string& reason) override; 190 void OnError(const std::string& reason) override;
181 void OnLog(const std::string& message) override; 191 void OnLog(const std::string& message) override;
182 192
183 private: 193 private:
184 // The controller to which we post events. 194 // The controller to which we post events.
185 const base::WeakPtr<VideoCaptureController> controller_; 195 const base::WeakPtr<VideoCaptureController> controller_;
186 196
187 // The pool of shared-memory buffers used for capturing. 197 // The pool of shared-memory buffers used for capturing.
188 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; 198 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 ControllerClient* client = FindClient(id, event_handler, controller_clients_); 329 ControllerClient* client = FindClient(id, event_handler, controller_clients_);
320 330
321 // If this buffer is not held by this client, or this client doesn't exist 331 // If this buffer is not held by this client, or this client doesn't exist
322 // in controller, do nothing. 332 // in controller, do nothing.
323 ControllerClient::ActiveBufferMap::iterator iter; 333 ControllerClient::ActiveBufferMap::iterator iter;
324 if (!client || (iter = client->active_buffers.find(buffer_id)) == 334 if (!client || (iter = client->active_buffers.find(buffer_id)) ==
325 client->active_buffers.end()) { 335 client->active_buffers.end()) {
326 NOTREACHED(); 336 NOTREACHED();
327 return; 337 return;
328 } 338 }
329 scoped_refptr<media::VideoFrame> frame = iter->second; 339 scoped_refptr<VideoFrame> frame = iter->second;
330 client->active_buffers.erase(iter); 340 client->active_buffers.erase(iter);
331 buffer_pool_->RelinquishConsumerHold(buffer_id, 1); 341 buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
332 342
333 #if defined(OS_ANDROID) 343 #if defined(OS_ANDROID)
334 DCHECK_EQ(0u, sync_point); 344 DCHECK_EQ(0u, sync_point);
335 #endif 345 #endif
336 if (sync_point) 346 if (sync_point)
337 BrowserThread::PostTask(BrowserThread::UI, 347 BrowserThread::PostTask(BrowserThread::UI,
338 FROM_HERE, 348 FROM_HERE,
339 base::Bind(&ReturnVideoFrame, frame, sync_point)); 349 base::Bind(&ReturnVideoFrame, frame, sync_point));
340 } 350 }
341 351
342 const media::VideoCaptureFormat& 352 const media::VideoCaptureFormat&
343 VideoCaptureController::GetVideoCaptureFormat() const { 353 VideoCaptureController::GetVideoCaptureFormat() const {
344 DCHECK_CURRENTLY_ON(BrowserThread::IO); 354 DCHECK_CURRENTLY_ON(BrowserThread::IO);
345 return video_capture_format_; 355 return video_capture_format_;
346 } 356 }
347 357
348 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> 358 scoped_refptr<media::VideoCaptureDevice::Client::Buffer>
349 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( 359 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer(
350 media::VideoFrame::Format format, 360 VideoFrame::Format format,
351 const gfx::Size& dimensions) { 361 const gfx::Size& dimensions) {
352 size_t frame_bytes = 0; 362 size_t frame_bytes = 0;
353 if (format == media::VideoFrame::NATIVE_TEXTURE) { 363 if (format == VideoFrame::NATIVE_TEXTURE) {
354 DCHECK_EQ(dimensions.width(), 0); 364 DCHECK_EQ(dimensions.width(), 0);
355 DCHECK_EQ(dimensions.height(), 0); 365 DCHECK_EQ(dimensions.height(), 0);
356 } else { 366 } else {
357 // The capture pipeline expects I420 for now. 367 // The capture pipeline expects I420 for now.
358 DCHECK_EQ(format, media::VideoFrame::I420) 368 DCHECK_EQ(format, VideoFrame::I420)
359 << " Non-I420 output buffer format " << format << " requested"; 369 << " Non-I420 output buffer format " << format << " requested";
360 frame_bytes = media::VideoFrame::AllocationSize(format, dimensions); 370 frame_bytes = VideoFrame::AllocationSize(format, dimensions);
361 } 371 }
362 372
363 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; 373 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
364 int buffer_id = 374 int buffer_id =
365 buffer_pool_->ReserveForProducer(frame_bytes, &buffer_id_to_drop); 375 buffer_pool_->ReserveForProducer(frame_bytes, &buffer_id_to_drop);
366 if (buffer_id == VideoCaptureBufferPool::kInvalidId) 376 if (buffer_id == VideoCaptureBufferPool::kInvalidId)
367 return NULL; 377 return NULL;
368 void* data; 378 void* data;
369 size_t size; 379 size_t size;
370 buffer_pool_->GetBufferInfo(buffer_id, &data, &size); 380 buffer_pool_->GetBufferInfo(buffer_id, &data, &size);
371 381
372 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( 382 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer(
373 new AutoReleaseBuffer(buffer_pool_, buffer_id, data, size)); 383 new AutoReleaseBuffer(buffer_pool_, buffer_id, data, size));
374 384
375 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { 385 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
376 BrowserThread::PostTask(BrowserThread::IO, 386 BrowserThread::PostTask(BrowserThread::IO,
377 FROM_HERE, 387 FROM_HERE,
378 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, 388 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread,
379 controller_, buffer_id_to_drop)); 389 controller_, buffer_id_to_drop));
380 } 390 }
381 391
382 return output_buffer; 392 return output_buffer;
383 } 393 }
384 394
385 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( 395 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData(
386 const uint8* data, 396 const uint8* data,
387 int length, 397 int length,
388 const VideoCaptureFormat& frame_format, 398 const VideoCaptureFormat& frame_format,
389 int rotation, 399 int clockwise_rotation,
390 const base::TimeTicks& timestamp) { 400 const base::TimeTicks& timestamp) {
391 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedData"); 401 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedData");
392 402
393 if (last_captured_pixel_format_ != frame_format.pixel_format) { 403 if (last_captured_pixel_format_ != frame_format.pixel_format) {
394 OnLog("Pixel format: " + media::VideoCaptureFormat::PixelFormatToString( 404 OnLog("Pixel format: " + media::VideoCaptureFormat::PixelFormatToString(
395 frame_format.pixel_format)); 405 frame_format.pixel_format));
396 last_captured_pixel_format_ = frame_format.pixel_format; 406 last_captured_pixel_format_ = frame_format.pixel_format;
397 } 407 }
398 408
399 if (!frame_format.IsValid()) 409 if (!frame_format.IsValid())
400 return; 410 return;
401 411
402 // Chopped pixels in width/height in case video capture device has odd 412 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest
403 // numbers for width/height. 413 // bit decomposition of {width, height}, grabbing the odd and even parts.
404 int chopped_width = 0; 414 const int chopped_width = frame_format.frame_size.width() & 1;
405 int chopped_height = 0; 415 const int chopped_height = frame_format.frame_size.height() & 1;
406 int new_unrotated_width = frame_format.frame_size.width(); 416 const int new_unrotated_width = frame_format.frame_size.width() & ~1;
407 int new_unrotated_height = frame_format.frame_size.height(); 417 const int new_unrotated_height = frame_format.frame_size.height() & ~1;
408
409 if (new_unrotated_width & 1) {
410 --new_unrotated_width;
411 chopped_width = 1;
412 }
413 if (new_unrotated_height & 1) {
414 --new_unrotated_height;
415 chopped_height = 1;
416 }
417 418
418 int destination_width = new_unrotated_width; 419 int destination_width = new_unrotated_width;
419 int destination_height = new_unrotated_height; 420 int destination_height = new_unrotated_height;
420 if (rotation == 90 || rotation == 270) { 421 if (clockwise_rotation == 90 || clockwise_rotation == 270) {
421 destination_width = new_unrotated_height; 422 destination_width = new_unrotated_height;
422 destination_height = new_unrotated_width; 423 destination_height = new_unrotated_width;
423 } 424 }
425
426 DLOG_IF(WARNING, (clockwise_rotation % 90))
427 << " Rotation must be a multiple of 90 and is " << clockwise_rotation;
428 libyuv::RotationMode rotation_mode = libyuv::kRotate0;
429 if (clockwise_rotation == 90)
430 rotation_mode = libyuv::kRotate90;
431 else if (clockwise_rotation == 180)
432 rotation_mode = libyuv::kRotate180;
433 else if (clockwise_rotation == 270)
434 rotation_mode = libyuv::kRotate270;
emircan 2015/03/04 02:47:47 Consider a switch statement.
mcasas 2015/03/09 21:23:56 Acknowledged.
435
424 const gfx::Size dimensions(destination_width, destination_height); 436 const gfx::Size dimensions(destination_width, destination_height);
425 if (!media::VideoFrame::IsValidConfig(media::VideoFrame::I420, 437 if (!VideoFrame::IsValidConfig(VideoFrame::I420, dimensions,
426 dimensions, 438 gfx::Rect(dimensions), dimensions)) {
427 gfx::Rect(dimensions),
428 dimensions)) {
429 return; 439 return;
430 } 440 }
431 441
432 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(media::VideoFrame::I420, 442 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420,
433 dimensions); 443 dimensions);
434 if (!buffer.get()) 444 if (!buffer.get())
435 return; 445 return;
436 uint8* yplane = NULL;
437 bool flip = false; 446 bool flip = false;
438 yplane = reinterpret_cast<uint8*>(buffer->data()); 447 uint8* const yplane = reinterpret_cast<uint8*>(buffer->data());
439 uint8* uplane = 448 uint8* const uplane =
440 yplane + 449 yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420,
441 media::VideoFrame::PlaneAllocationSize( 450 VideoFrame::kYPlane, dimensions);
442 media::VideoFrame::I420, media::VideoFrame::kYPlane, dimensions); 451 uint8* const vplane =
443 uint8* vplane = 452 uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420,
444 uplane + 453 VideoFrame::kUPlane, dimensions);
445 media::VideoFrame::PlaneAllocationSize( 454 const int yplane_stride = dimensions.width();
446 media::VideoFrame::I420, media::VideoFrame::kUPlane, dimensions); 455 const int uv_plane_stride = yplane_stride / 2;
447 int yplane_stride = dimensions.width(); 456 const int crop_x = 0;
448 int uv_plane_stride = yplane_stride / 2; 457 const int crop_y = 0;
449 int crop_x = 0; 458
450 int crop_y = 0;
451 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; 459 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY;
452
453 libyuv::RotationMode rotation_mode = libyuv::kRotate0;
454 if (rotation == 90)
455 rotation_mode = libyuv::kRotate90;
456 else if (rotation == 180)
457 rotation_mode = libyuv::kRotate180;
458 else if (rotation == 270)
459 rotation_mode = libyuv::kRotate270;
460
461 switch (frame_format.pixel_format) { 460 switch (frame_format.pixel_format) {
462 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. 461 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set.
463 break; 462 break;
464 case media::PIXEL_FORMAT_I420: 463 case media::PIXEL_FORMAT_I420:
465 DCHECK(!chopped_width && !chopped_height); 464 DCHECK(!chopped_width && !chopped_height);
466 origin_colorspace = libyuv::FOURCC_I420; 465 origin_colorspace = libyuv::FOURCC_I420;
467 break; 466 break;
468 case media::PIXEL_FORMAT_YV12: 467 case media::PIXEL_FORMAT_YV12:
469 DCHECK(!chopped_width && !chopped_height); 468 DCHECK(!chopped_width && !chopped_height);
470 origin_colorspace = libyuv::FOURCC_YV12; 469 origin_colorspace = libyuv::FOURCC_YV12;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
519 uv_plane_stride, 518 uv_plane_stride,
520 crop_x, 519 crop_x,
521 crop_y, 520 crop_y,
522 frame_format.frame_size.width(), 521 frame_format.frame_size.width(),
523 (flip ? -frame_format.frame_size.height() : 522 (flip ? -frame_format.frame_size.height() :
524 frame_format.frame_size.height()), 523 frame_format.frame_size.height()),
525 new_unrotated_width, 524 new_unrotated_width,
526 new_unrotated_height, 525 new_unrotated_height,
527 rotation_mode, 526 rotation_mode,
528 origin_colorspace) != 0) { 527 origin_colorspace) != 0) {
529 DLOG(WARNING) << "Failed to convert buffer from" 528 DLOG(WARNING) << "Failed to convert buffer to I420 from "
530 << media::VideoCaptureFormat::PixelFormatToString( 529 << VideoCaptureFormat::PixelFormatToString(frame_format.pixel_format);
531 frame_format.pixel_format)
532 << "to I420.";
533 return; 530 return;
534 } 531 }
535 scoped_refptr<media::VideoFrame> frame = 532 scoped_refptr<VideoFrame> frame =
536 media::VideoFrame::WrapExternalPackedMemory( 533 VideoFrame::WrapExternalPackedMemory(
537 media::VideoFrame::I420, 534 VideoFrame::I420,
538 dimensions, 535 dimensions,
539 gfx::Rect(dimensions), 536 gfx::Rect(dimensions),
540 dimensions, 537 dimensions,
541 yplane, 538 yplane,
542 media::VideoFrame::AllocationSize(media::VideoFrame::I420, 539 VideoFrame::AllocationSize(VideoFrame::I420, dimensions),
543 dimensions),
544 base::SharedMemory::NULLHandle(), 540 base::SharedMemory::NULLHandle(),
545 0, 541 0,
546 base::TimeDelta(), 542 base::TimeDelta(),
547 base::Closure()); 543 base::Closure());
548 DCHECK(frame.get()); 544 DCHECK(frame.get());
549 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, 545 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
550 frame_format.frame_rate); 546 frame_format.frame_rate);
551 547
552 BrowserThread::PostTask( 548 BrowserThread::PostTask(
553 BrowserThread::IO, 549 BrowserThread::IO,
554 FROM_HERE, 550 FROM_HERE,
555 base::Bind( 551 base::Bind(
556 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, 552 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
557 controller_, 553 controller_,
558 buffer, 554 buffer,
559 frame, 555 frame,
560 timestamp)); 556 timestamp));
561 } 557 }
562 558
563 void 559 void
560 VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedYuvData(
561 uint8* y_data,
562 uint8* u_data,
563 uint8* v_data,
564 int y_length,
565 int u_length,
566 int v_length,
567 const VideoCaptureFormat& frame_format,
568 int clockwise_rotation,
569 const base::TimeTicks& timestamp) {
570 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedYuvData");
571 DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420);
572 DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported";
573 DCHECK_GE(static_cast<size_t>(y_length + u_length + v_length),
574 frame_format.ImageAllocationSize());
575
576 scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420,
emircan 2015/03/04 02:47:47 Consider using a scoped_ptr and passing ownership.
mcasas 2015/03/09 21:23:56 Good idea. Please add a bug to investigate this.
577 frame_format.frame_size);
578 if (!buffer.get())
579 return;
580
581 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return
582 // the parameter buffer synchronously to the driver.
583 const size_t y_stride = y_length / frame_format.frame_size.height();
584 const size_t u_stride = u_length / frame_format.frame_size.height() / 2;
585 const size_t v_stride = v_length / frame_format.frame_size.height() / 2;
586 const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420,
587 VideoFrame::kYPlane, frame_format.frame_size);
588 const size_t u_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420,
589 VideoFrame::kUPlane, frame_format.frame_size);
590 uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data());
591 uint8* const dst_u = dst_y + y_plane_size;
592 uint8* const dst_v = dst_u + u_plane_size;
593
594 libyuv::CopyPlane(y_data, y_stride, dst_y, y_stride,
595 frame_format.frame_size.width(), frame_format.frame_size.height());
596 libyuv::CopyPlane(u_data, u_stride, dst_u, u_stride,
597 frame_format.frame_size.width() / 2,frame_format.frame_size.height() / 2);
598 libyuv::CopyPlane(v_data, v_stride, dst_v, v_stride,
599 frame_format.frame_size.width() / 2,frame_format.frame_size.height() / 2);
600
601 scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData(
602 VideoFrame::I420, frame_format.frame_size,
603 gfx::Rect(frame_format.frame_size), frame_format.frame_size, y_stride,
604 u_stride, v_stride, dst_y, dst_u, dst_v, base::TimeDelta(),
605 base::Closure());
606 DCHECK(video_frame.get());
607
608 BrowserThread::PostTask(
609 BrowserThread::IO,
610 FROM_HERE,
611 base::Bind(
612 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
613 controller_,
614 buffer,
615 //frame_format,
616 video_frame,
617 timestamp));
618 };
619
620 void
564 VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( 621 VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
565 const scoped_refptr<Buffer>& buffer, 622 const scoped_refptr<Buffer>& buffer,
566 const scoped_refptr<media::VideoFrame>& frame, 623 const scoped_refptr<VideoFrame>& frame,
567 const base::TimeTicks& timestamp) { 624 const base::TimeTicks& timestamp) {
568 BrowserThread::PostTask( 625 BrowserThread::PostTask(
569 BrowserThread::IO, 626 BrowserThread::IO,
570 FROM_HERE, 627 FROM_HERE,
571 base::Bind( 628 base::Bind(
572 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, 629 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
573 controller_, 630 controller_,
574 buffer, 631 buffer,
575 frame, 632 frame,
576 timestamp)); 633 timestamp));
(...skipping 18 matching lines...) Expand all
595 MediaStreamManager::SendMessageToNativeLog("Video capture: " + message); 652 MediaStreamManager::SendMessageToNativeLog("Video capture: " + message);
596 } 653 }
597 654
598 VideoCaptureController::~VideoCaptureController() { 655 VideoCaptureController::~VideoCaptureController() {
599 STLDeleteContainerPointers(controller_clients_.begin(), 656 STLDeleteContainerPointers(controller_clients_.begin(),
600 controller_clients_.end()); 657 controller_clients_.end());
601 } 658 }
602 659
603 void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( 660 void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread(
604 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, 661 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer,
605 const scoped_refptr<media::VideoFrame>& frame, 662 const scoped_refptr<VideoFrame>& frame,
606 const base::TimeTicks& timestamp) { 663 const base::TimeTicks& timestamp) {
607 DCHECK_CURRENTLY_ON(BrowserThread::IO); 664 DCHECK_CURRENTLY_ON(BrowserThread::IO);
608 DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId); 665 DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId);
609 666
610 int count = 0; 667 int count = 0;
611 if (state_ == VIDEO_CAPTURE_STATE_STARTED) { 668 if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
612 if (!frame->metadata()->HasKey(media::VideoFrameMetadata::FRAME_RATE)) { 669 if (!frame->metadata()->HasKey(media::VideoFrameMetadata::FRAME_RATE)) {
613 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, 670 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
614 video_capture_format_.frame_rate); 671 video_capture_format_.frame_rate);
615 } 672 }
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
750 DCHECK_CURRENTLY_ON(BrowserThread::IO); 807 DCHECK_CURRENTLY_ON(BrowserThread::IO);
751 int active_client_count = 0; 808 int active_client_count = 0;
752 for (ControllerClient* client : controller_clients_) { 809 for (ControllerClient* client : controller_clients_) {
753 if (!client->paused) 810 if (!client->paused)
754 ++active_client_count; 811 ++active_client_count;
755 } 812 }
756 return active_client_count; 813 return active_client_count;
757 } 814 }
758 815
759 } // namespace content 816 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698