Index: content/browser/renderer_host/media/video_capture_controller.cc |
diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc |
index 3fbb2dcf37a64d0cfbab7f0a41404d6aa6ca69db..bafbde9cc2561126b7ffb73535dd61c69e30f62b 100644 |
--- a/content/browser/renderer_host/media/video_capture_controller.cc |
+++ b/content/browser/renderer_host/media/video_capture_controller.cc |
@@ -28,6 +28,7 @@ |
#endif |
using media::VideoCaptureFormat; |
+using media::VideoFrame; |
namespace content { |
@@ -67,7 +68,7 @@ class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
const size_t size_; |
}; |
-class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { |
+class SyncPointClientImpl : public VideoFrame::SyncPointClient { |
public: |
explicit SyncPointClientImpl(GLHelper* gl_helper) : gl_helper_(gl_helper) {} |
~SyncPointClientImpl() override {} |
@@ -80,7 +81,7 @@ class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { |
GLHelper* gl_helper_; |
}; |
-void ReturnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, |
+void ReturnVideoFrame(const scoped_refptr<VideoFrame>& video_frame, |
uint32 sync_point) { |
DCHECK_CURRENTLY_ON(BrowserThread::UI); |
#if defined(OS_ANDROID) |
@@ -129,7 +130,7 @@ struct VideoCaptureController::ControllerClient { |
// Buffers currently held by this client, and syncpoint callback to call when |
// they are returned from the client. |
- typedef std::map<int, scoped_refptr<media::VideoFrame> > ActiveBufferMap; |
+ typedef std::map<int, scoped_refptr<VideoFrame> > ActiveBufferMap; |
ActiveBufferMap active_buffers; |
// State of capture session, controlled by VideoCaptureManager directly. This |
@@ -169,13 +170,22 @@ class VideoCaptureController::VideoCaptureDeviceClient |
void OnIncomingCapturedData(const uint8* data, |
int length, |
const VideoCaptureFormat& frame_format, |
- int rotation, |
+ int clockwise_rotation, |
const base::TimeTicks& timestamp) override; |
- scoped_refptr<Buffer> ReserveOutputBuffer(media::VideoFrame::Format format, |
+ void OnIncomingCapturedYuvData(uint8* y_data, |
+ uint8* u_data, |
+ uint8* v_data, |
+ int y_length, |
+ int u_length, |
+ int v_length, |
+ const VideoCaptureFormat& frame_format, |
+ int clockwise_rotation, |
+ const base::TimeTicks& timestamp) override; |
+ scoped_refptr<Buffer> ReserveOutputBuffer(VideoFrame::Format format, |
const gfx::Size& size) override; |
void OnIncomingCapturedVideoFrame( |
const scoped_refptr<Buffer>& buffer, |
- const scoped_refptr<media::VideoFrame>& frame, |
+ const scoped_refptr<VideoFrame>& frame, |
const base::TimeTicks& timestamp) override; |
void OnError(const std::string& reason) override; |
void OnLog(const std::string& message) override; |
@@ -326,7 +336,7 @@ void VideoCaptureController::ReturnBuffer( |
NOTREACHED(); |
return; |
} |
- scoped_refptr<media::VideoFrame> frame = iter->second; |
+ scoped_refptr<VideoFrame> frame = iter->second; |
client->active_buffers.erase(iter); |
buffer_pool_->RelinquishConsumerHold(buffer_id, 1); |
@@ -347,17 +357,17 @@ VideoCaptureController::GetVideoCaptureFormat() const { |
scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( |
- media::VideoFrame::Format format, |
+ VideoFrame::Format format, |
const gfx::Size& dimensions) { |
size_t frame_bytes = 0; |
- if (format == media::VideoFrame::NATIVE_TEXTURE) { |
+ if (format == VideoFrame::NATIVE_TEXTURE) { |
DCHECK_EQ(dimensions.width(), 0); |
DCHECK_EQ(dimensions.height(), 0); |
} else { |
// The capture pipeline expects I420 for now. |
- DCHECK_EQ(format, media::VideoFrame::I420) |
+ DCHECK_EQ(format, VideoFrame::I420) |
<< " Non-I420 output buffer format " << format << " requested"; |
- frame_bytes = media::VideoFrame::AllocationSize(format, dimensions); |
+ frame_bytes = VideoFrame::AllocationSize(format, dimensions); |
} |
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
@@ -386,7 +396,7 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( |
const uint8* data, |
int length, |
const VideoCaptureFormat& frame_format, |
- int rotation, |
+ int clockwise_rotation, |
const base::TimeTicks& timestamp) { |
TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedData"); |
@@ -399,65 +409,54 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( |
if (!frame_format.IsValid()) |
return; |
- // Chopped pixels in width/height in case video capture device has odd |
- // numbers for width/height. |
- int chopped_width = 0; |
- int chopped_height = 0; |
- int new_unrotated_width = frame_format.frame_size.width(); |
- int new_unrotated_height = frame_format.frame_size.height(); |
- |
- if (new_unrotated_width & 1) { |
- --new_unrotated_width; |
- chopped_width = 1; |
- } |
- if (new_unrotated_height & 1) { |
- --new_unrotated_height; |
- chopped_height = 1; |
- } |
+ // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
+ // bit decomposition of {width, height}, grabbing the odd and even parts. |
+ const int chopped_width = frame_format.frame_size.width() & 1; |
+ const int chopped_height = frame_format.frame_size.height() & 1; |
+ const int new_unrotated_width = frame_format.frame_size.width() & ~1; |
+ const int new_unrotated_height = frame_format.frame_size.height() & ~1; |
int destination_width = new_unrotated_width; |
int destination_height = new_unrotated_height; |
- if (rotation == 90 || rotation == 270) { |
+ if (clockwise_rotation == 90 || clockwise_rotation == 270) { |
destination_width = new_unrotated_height; |
destination_height = new_unrotated_width; |
} |
+ |
+ DLOG_IF(WARNING, (clockwise_rotation % 90)) |
+ << " Rotation must be a multiple of 90 and is " << clockwise_rotation; |
+ libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
+ if (clockwise_rotation == 90) |
+ rotation_mode = libyuv::kRotate90; |
+ else if (clockwise_rotation == 180) |
+ rotation_mode = libyuv::kRotate180; |
+ else if (clockwise_rotation == 270) |
+ rotation_mode = libyuv::kRotate270; |
emircan
2015/03/04 02:47:47
Consider a switch statement.
mcasas
2015/03/09 21:23:56
Acknowledged.
|
+ |
const gfx::Size dimensions(destination_width, destination_height); |
- if (!media::VideoFrame::IsValidConfig(media::VideoFrame::I420, |
- dimensions, |
- gfx::Rect(dimensions), |
- dimensions)) { |
+ if (!VideoFrame::IsValidConfig(VideoFrame::I420, dimensions, |
+ gfx::Rect(dimensions), dimensions)) { |
return; |
} |
- scoped_refptr<Buffer> buffer = ReserveOutputBuffer(media::VideoFrame::I420, |
+ scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420, |
dimensions); |
if (!buffer.get()) |
return; |
- uint8* yplane = NULL; |
bool flip = false; |
- yplane = reinterpret_cast<uint8*>(buffer->data()); |
- uint8* uplane = |
- yplane + |
- media::VideoFrame::PlaneAllocationSize( |
- media::VideoFrame::I420, media::VideoFrame::kYPlane, dimensions); |
- uint8* vplane = |
- uplane + |
- media::VideoFrame::PlaneAllocationSize( |
- media::VideoFrame::I420, media::VideoFrame::kUPlane, dimensions); |
- int yplane_stride = dimensions.width(); |
- int uv_plane_stride = yplane_stride / 2; |
- int crop_x = 0; |
- int crop_y = 0; |
- libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
- |
- libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
- if (rotation == 90) |
- rotation_mode = libyuv::kRotate90; |
- else if (rotation == 180) |
- rotation_mode = libyuv::kRotate180; |
- else if (rotation == 270) |
- rotation_mode = libyuv::kRotate270; |
+ uint8* const yplane = reinterpret_cast<uint8*>(buffer->data()); |
+ uint8* const uplane = |
+ yplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
+ VideoFrame::kYPlane, dimensions); |
+ uint8* const vplane = |
+ uplane + VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
+ VideoFrame::kUPlane, dimensions); |
+ const int yplane_stride = dimensions.width(); |
+ const int uv_plane_stride = yplane_stride / 2; |
+ const int crop_x = 0; |
+ const int crop_y = 0; |
+ libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
switch (frame_format.pixel_format) { |
case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
break; |
@@ -526,21 +525,18 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( |
new_unrotated_height, |
rotation_mode, |
origin_colorspace) != 0) { |
- DLOG(WARNING) << "Failed to convert buffer from" |
- << media::VideoCaptureFormat::PixelFormatToString( |
- frame_format.pixel_format) |
- << "to I420."; |
+ DLOG(WARNING) << "Failed to convert buffer to I420 from " |
+ << VideoCaptureFormat::PixelFormatToString(frame_format.pixel_format); |
return; |
} |
- scoped_refptr<media::VideoFrame> frame = |
- media::VideoFrame::WrapExternalPackedMemory( |
- media::VideoFrame::I420, |
+ scoped_refptr<VideoFrame> frame = |
+ VideoFrame::WrapExternalPackedMemory( |
+ VideoFrame::I420, |
dimensions, |
gfx::Rect(dimensions), |
dimensions, |
yplane, |
- media::VideoFrame::AllocationSize(media::VideoFrame::I420, |
- dimensions), |
+ VideoFrame::AllocationSize(VideoFrame::I420, dimensions), |
base::SharedMemory::NULLHandle(), |
0, |
base::TimeDelta(), |
@@ -561,9 +557,70 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( |
} |
void |
+VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedYuvData( |
+ uint8* y_data, |
+ uint8* u_data, |
+ uint8* v_data, |
+ int y_length, |
+ int u_length, |
+ int v_length, |
+ const VideoCaptureFormat& frame_format, |
+ int clockwise_rotation, |
+ const base::TimeTicks& timestamp) { |
+ TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedYuvData"); |
+ DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420); |
+ DCHECK_EQ(clockwise_rotation, 0) << "Rotation not supported"; |
+ DCHECK_GE(static_cast<size_t>(y_length + u_length + v_length), |
+ frame_format.ImageAllocationSize()); |
+ |
+ scoped_refptr<Buffer> buffer = ReserveOutputBuffer(VideoFrame::I420, |
emircan
2015/03/04 02:47:47
Consider using a scoped_ptr and passing ownership.
mcasas
2015/03/09 21:23:56
Good idea. Please add a bug to investigate this.
|
+ frame_format.frame_size); |
+ if (!buffer.get()) |
+ return; |
+ |
+ // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return |
+ // the parameter buffer synchronously to the driver. |
+ const size_t y_stride = y_length / frame_format.frame_size.height(); |
+ const size_t u_stride = u_length / frame_format.frame_size.height() / 2; |
+ const size_t v_stride = v_length / frame_format.frame_size.height() / 2; |
+ const size_t y_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
+ VideoFrame::kYPlane, frame_format.frame_size); |
+ const size_t u_plane_size = VideoFrame::PlaneAllocationSize(VideoFrame::I420, |
+ VideoFrame::kUPlane, frame_format.frame_size); |
+ uint8* const dst_y = reinterpret_cast<uint8*>(buffer->data()); |
+ uint8* const dst_u = dst_y + y_plane_size; |
+ uint8* const dst_v = dst_u + u_plane_size; |
+ |
+ libyuv::CopyPlane(y_data, y_stride, dst_y, y_stride, |
+ frame_format.frame_size.width(), frame_format.frame_size.height()); |
+ libyuv::CopyPlane(u_data, u_stride, dst_u, u_stride, |
+ frame_format.frame_size.width() / 2,frame_format.frame_size.height() / 2); |
+ libyuv::CopyPlane(v_data, v_stride, dst_v, v_stride, |
+ frame_format.frame_size.width() / 2,frame_format.frame_size.height() / 2); |
+ |
+ scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData( |
+ VideoFrame::I420, frame_format.frame_size, |
+ gfx::Rect(frame_format.frame_size), frame_format.frame_size, y_stride, |
+ u_stride, v_stride, dst_y, dst_u, dst_v, base::TimeDelta(), |
+ base::Closure()); |
+ DCHECK(video_frame.get()); |
+ |
+ BrowserThread::PostTask( |
+ BrowserThread::IO, |
+ FROM_HERE, |
+ base::Bind( |
+ &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
+ controller_, |
+ buffer, |
+ //frame_format, |
+ video_frame, |
+ timestamp)); |
+}; |
+ |
+void |
VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
const scoped_refptr<Buffer>& buffer, |
- const scoped_refptr<media::VideoFrame>& frame, |
+ const scoped_refptr<VideoFrame>& frame, |
const base::TimeTicks& timestamp) { |
BrowserThread::PostTask( |
BrowserThread::IO, |
@@ -602,7 +659,7 @@ VideoCaptureController::~VideoCaptureController() { |
void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( |
const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, |
- const scoped_refptr<media::VideoFrame>& frame, |
+ const scoped_refptr<VideoFrame>& frame, |
const base::TimeTicks& timestamp) { |
DCHECK_CURRENTLY_ON(BrowserThread::IO); |
DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId); |