Index: content/renderer/media/rtc_video_capturer.cc |
diff --git a/content/renderer/media/rtc_video_capturer.cc b/content/renderer/media/rtc_video_capturer.cc |
index b9acc0ebc075fc16beeb0b3ef5fd6ad41eba81f4..2fcd57416b515b2cbe40302f4a2bc709ec2896b0 100644 |
--- a/content/renderer/media/rtc_video_capturer.cc |
+++ b/content/renderer/media/rtc_video_capturer.cc |
@@ -40,8 +40,9 @@ cricket::CaptureState RtcVideoCapturer::Start( |
SetCaptureFormat(&capture_format); |
state_ = VIDEO_CAPTURE_STATE_STARTED; |
- start_time_ = base::Time::Now(); |
- delegate_->StartCapture(cap, |
+ start_time_delta_ = base::Time::Now() - base::Time::UnixEpoch(); |
miu
2013/09/12 03:01:58
For consideration: Instead of doing this here, you
|
+ delegate_->StartCapture( |
+ cap, |
base::Bind(&RtcVideoCapturer::OnFrameCaptured, base::Unretained(this)), |
base::Bind(&RtcVideoCapturer::OnStateChange, base::Unretained(this))); |
// Update the desired aspect ratio so that later the video frame can be |
@@ -95,34 +96,37 @@ bool RtcVideoCapturer::GetBestCaptureFormat(const cricket::VideoFormat& desired, |
} |
void RtcVideoCapturer::OnFrameCaptured( |
- const media::VideoCapture::VideoFrameBuffer& buf) { |
+ const scoped_refptr<media::VideoFrame>& frame) { |
// Currently, |fourcc| is always I420. |
- cricket::CapturedFrame frame; |
- frame.width = buf.width; |
- frame.height = buf.height; |
- frame.fourcc = cricket::FOURCC_I420; |
- frame.data_size = buf.buffer_size; |
+ cricket::CapturedFrame cricket_frame; |
+ cricket_frame.width = frame->coded_size().width(); |
+ cricket_frame.height = frame->coded_size().height(); |
+ cricket_frame.fourcc = cricket::FOURCC_I420; |
// cricket::CapturedFrame time is in nanoseconds. |
- frame.elapsed_time = (buf.timestamp - start_time_).InMicroseconds() * |
+ cricket_frame.elapsed_time = |
+ (frame->GetTimestamp() - start_time_delta_).InMicroseconds() * |
base::Time::kNanosecondsPerMicrosecond; |
- frame.time_stamp = |
- (buf.timestamp - base::Time::UnixEpoch()).InMicroseconds() * |
- base::Time::kNanosecondsPerMicrosecond; |
- frame.data = buf.memory_pointer; |
- frame.pixel_height = 1; |
- frame.pixel_width = 1; |
- |
- TRACE_EVENT_INSTANT2("rtc_video_capturer", |
- "OnFrameCaptured", |
- TRACE_EVENT_SCOPE_THREAD, |
- "elapsed time", |
- frame.elapsed_time, |
- "timestamp_ms", |
- frame.time_stamp / talk_base::kNumNanosecsPerMillisec); |
+ cricket_frame.time_stamp = frame->GetTimestamp().InMicroseconds() * |
+ base::Time::kNanosecondsPerMicrosecond; |
+ // TODO(sheu): we assume contiguous layout of image planes. |
+ cricket_frame.data = frame->data(0); |
+ cricket_frame.data_size = |
+ media::VideoFrame::AllocationSize(frame->format(), frame->coded_size()); |
+ cricket_frame.pixel_height = 1; |
+ cricket_frame.pixel_width = 1; |
+ |
+ TRACE_EVENT_INSTANT2( |
+ "rtc_video_capturer", |
+ "OnFrameCaptured", |
+ TRACE_EVENT_SCOPE_THREAD, |
+ "elapsed time", |
+ cricket_frame.elapsed_time, |
+ "timestamp_ms", |
+ cricket_frame.time_stamp / talk_base::kNumNanosecsPerMillisec); |
// This signals to libJingle that a new VideoFrame is available. |
// libJingle have no assumptions on what thread this signal come from. |
- SignalFrameCaptured(this, &frame); |
+ SignalFrameCaptured(this, &cricket_frame); |
} |
void RtcVideoCapturer::OnStateChange( |