Index: content/renderer/media/rtc_video_encoder.cc |
diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..da455d9bd5945e1634ca7b02ae904e2d14377f4b |
--- /dev/null |
+++ b/content/renderer/media/rtc_video_encoder.cc |
@@ -0,0 +1,677 @@ |
+// Copyright 2013 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "content/renderer/media/rtc_video_encoder.h" |
+ |
+#include "base/bind.h" |
+#include "base/location.h" |
+#include "base/logging.h" |
+#include "base/memory/scoped_vector.h" |
+#include "base/message_loop/message_loop_proxy.h" |
+#include "base/synchronization/waitable_event.h" |
+#include "content/renderer/media/renderer_gpu_video_accelerator_factories.h" |
+#include "media/base/bitstream_buffer.h" |
+#include "media/base/video_frame.h" |
+#include "media/filters/gpu_video_accelerator_factories.h" |
+#include "media/video/video_encode_accelerator.h" |
+ |
+#define NOTIFY_ERROR(x) \ |
+ do { \ |
+ DLOG(ERROR) << "calling NotifyError(): " << x; \ |
+ NotifyError(x); \ |
+ } while (0) |
+ |
+namespace content { |
+ |
+// This private class of RTCVideoEncoder does the actual work of communicating |
+// with a media::VideoEncodeAccelerator for handling video encoding. It can |
+// be created on any thread, but should subsequently be posted to (and Destroy() |
+// called on) a single thread. Callbacks to RTCVideoEncoder are posted to the |
+// thread on which the instance was constructed. |
+// |
+// This class separates state related to the thread that RTCVideoEncoder |
+// operates on (presently the libjingle worker thread) from the thread that |
+// |gpu_factories_| provides for accelerator operations (presently the media |
+// thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while |
+// RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. |
+class RTCVideoEncoder::Impl |
+ : public media::VideoEncodeAccelerator::Client, |
+ public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
+ public: |
+ Impl( |
+ const base::WeakPtr<RTCVideoEncoder>& weak_encoder, |
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories); |
+ |
+ // Create the VEA and call Initialize() on it. Called once per instantiation, |
+ // and then the instance is bound forevermore to whichever thread made the |
+ // call. |
+ // RTCVideoEncoder expects to be able to call this function synchronously from |
+ // its own thread, hence the |async_waiter| and |async_retval| arguments. |
+ void CreateAndInitializeVEA(const gfx::Size& input_visible_size, |
+ int32 bitrate, |
+ media::VideoCodecProfile profile, |
+ base::WaitableEvent* async_waiter, |
+ int32_t* async_retval); |
+ // Enqueue a frame from WebRTC for encoding. |
+ // RTCVideoEncoder expects to be able to call this function synchronously from |
+ // its own thread, hence the |async_waiter| and |async_retval| arguments. |
+ void Enqueue(const webrtc::I420VideoFrame* input_frame, |
+ bool force_keyframe, |
+ base::WaitableEvent* async_waiter, |
+ int32_t* async_retval); |
+ |
+ // RTCVideoEncoder is given a buffer to be passed to WebRTC through the |
+ // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, |
+ // the buffer is returned to Impl by its index using this function. |
+ void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); |
+ |
+ // Request encoding parameter change for the underlying encoder. |
+ void RequestEncodingParametersChange(int32 bitrate, |
+ uint32 framerate_num, |
+ uint32 framerate_denom); |
+ |
+ // Destroy this Impl's encoder. The destructor is not explicitly called, as |
+ // Impl is a base::RefCountedThreadSafe. |
+ void Destroy(); |
+ |
+ // media::VideoEncodeAccelerator::Client implementation. |
+ virtual void NotifyInitializeDone() OVERRIDE; |
+ virtual void RequireBitstreamBuffers(int input_count, |
+ const gfx::Size& input_coded_size, |
+ size_t output_buffer_size) OVERRIDE; |
+ virtual void BitstreamBufferReady(int32 bitstream_buffer_id, |
+ size_t payload_size, |
+ bool key_frame) OVERRIDE; |
+ virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; |
+ |
+ private: |
+ friend class base::RefCountedThreadSafe<Impl>; |
+ |
+ enum { |
+ kInputBufferExtraCount = 1, // The number of input buffers allocated, more |
+ // than what is requested by |
+ // VEA::RequireBitstreamBuffers(). |
+ kOutputBufferCount = 3, |
+ }; |
+ |
+ virtual ~Impl(); |
+ |
+ // Perform encoding on an input frame from the input queue. |
+ void EncodeOneFrame(); |
+ |
+ // Notify that an input frame is finished for encoding. |index| is the index |
+ // of the completed frame in |input_buffers_|. |
+ void EncodeFrameFinished(int index); |
+ |
+ // Set up/signal |async_waiter_| and |async_retval_|; see declarations below. |
+ void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); |
+ void SignalAsyncWaiter(int32_t retval); |
+ |
+ base::ThreadChecker thread_checker_; |
+ |
+ // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client |
+ // notifications. |
+ const base::WeakPtr<RTCVideoEncoder> weak_encoder_; |
+ |
+ // The message loop on which to post callbacks to |weak_encoder_|. |
+ const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; |
+ |
+ // Factory for creating VEAs, shared memory buffers, etc. |
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_; |
+ |
+ // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
+ // Do this by waiting on the |async_waiter_| and returning the return value in |
+ // |async_retval_| when initialization completes, encoding completes, or |
+ // an error occurs. |
+ base::WaitableEvent* async_waiter_; |
+ int32_t* async_retval_; |
+ |
+ // The underlying VEA to perform encoding on. |
+ scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; |
+ |
+ // Next input frame. Since there is at most one next frame, a single-element |
+ // queue is sufficient. |
+ const webrtc::I420VideoFrame* input_next_frame_; |
+ |
+ // Whether to encode a keyframe next. |
+ bool input_next_frame_keyframe_; |
+ |
+ // Frame sizes. |
+ gfx::Size input_frame_coded_size_; |
+ gfx::Size input_visible_size_; |
+ |
+ // Shared memory buffers for input/output with the VEA. |
+ ScopedVector<base::SharedMemory> input_buffers_; |
+ ScopedVector<base::SharedMemory> output_buffers_; |
+ |
+ // Input buffers ready to be filled with input from Encode(). As a LIFO since |
+ // we don't care about ordering. |
+ std::vector<int> input_buffers_free_; |
+ |
+ // Timestamp of first frame returned from encoder. We calculate subsequent |
+ // capture times as deltas from this base. |
+ base::Time time_base_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(Impl); |
+}; |
+ |
+RTCVideoEncoder::Impl::Impl( |
+ const base::WeakPtr<RTCVideoEncoder>& weak_encoder, |
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) |
+ : weak_encoder_(weak_encoder), |
+ encoder_message_loop_proxy_(base::MessageLoopProxy::current()), |
+ gpu_factories_(gpu_factories), |
+ async_waiter_(NULL), |
+ async_retval_(NULL), |
+ input_next_frame_(NULL), |
+ input_next_frame_keyframe_(false) { |
+ thread_checker_.DetachFromThread(); |
+} |
+ |
+void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
+ const gfx::Size& input_visible_size, |
+ int32 bitrate, |
+ media::VideoCodecProfile profile, |
+ base::WaitableEvent* async_waiter, |
+ int32_t* async_retval) { |
+ DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ RegisterAsyncWaiter(async_waiter, async_retval); |
+ |
+ // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
+ if (bitrate > kint32max / 1000) { |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
+ return; |
+ } |
+ |
+ video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass(); |
+ if (!video_encoder_) { |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ input_visible_size_ = input_visible_size; |
+ video_encoder_->Initialize( |
+ media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000); |
+} |
+ |
+void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, |
+ bool force_keyframe, |
+ base::WaitableEvent* async_waiter, |
+ int32_t* async_retval) { |
+ DVLOG(3) << "Impl::Enqueue()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ DCHECK(!input_next_frame_); |
+ |
+ RegisterAsyncWaiter(async_waiter, async_retval); |
+ input_next_frame_ = input_frame; |
+ input_next_frame_keyframe_ = force_keyframe; |
+ |
+ if (!input_buffers_free_.empty()) |
+ EncodeOneFrame(); |
+} |
+ |
+void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( |
+ int32 bitstream_buffer_id) { |
+ DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " |
+ "bitstream_buffer_id=" << bitstream_buffer_id; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (video_encoder_) { |
+ video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
+ bitstream_buffer_id, |
+ output_buffers_[bitstream_buffer_id]->handle(), |
+ output_buffers_[bitstream_buffer_id]->mapped_size())); |
+ } |
+} |
+ |
+void RTCVideoEncoder::Impl::RequestEncodingParametersChange( |
+ int32 bitrate, |
+ uint32 framerate_num, |
+ uint32 framerate_denom) { |
+ DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate |
+ << ", frame_rate=" << framerate_num << "/" << framerate_denom; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
+ if (bitrate > kint32max / 1000) { |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
+ return; |
+ } |
+ |
+ if (video_encoder_) |
+ video_encoder_->RequestEncodingParametersChange( |
+ bitrate * 1000, framerate_num, framerate_denom); |
+} |
+ |
+void RTCVideoEncoder::Impl::Destroy() { |
+ DVLOG(3) << "Impl::Destroy()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (video_encoder_) |
+ video_encoder_.release()->Destroy(); |
+} |
+ |
+void RTCVideoEncoder::Impl::NotifyInitializeDone() { |
+ DVLOG(3) << "Impl::NotifyInitializeDone()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
+} |
+ |
+void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
+ int input_count, |
+ const gfx::Size& input_coded_size, |
+ size_t output_buffer_size) { |
+ DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
+ << ", input_coded_size=" << input_coded_size.ToString() |
+ << ", output_buffer_size=" << output_buffer_size; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ if (!video_encoder_) |
+ return; |
+ |
+ input_frame_coded_size_ = input_coded_size; |
+ |
+ for (int i = 0; i < input_count + kInputBufferExtraCount; ++i) { |
+ base::SharedMemory* shm = |
+ gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2); |
+ if (!shm) { |
+ DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
+ "failed to create input buffer " << i; |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ input_buffers_.push_back(shm); |
+ input_buffers_free_.push_back(i); |
+ } |
+ |
+ for (int i = 0; i < kOutputBufferCount; ++i) { |
+ base::SharedMemory* shm = |
+ gpu_factories_->CreateSharedMemory(output_buffer_size); |
+ if (!shm) { |
+ DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
+ "failed to create output buffer " << i; |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ output_buffers_.push_back(shm); |
+ } |
+ |
+ // Immediately provide all output buffers to the VEA. |
+ for (size_t i = 0; i < output_buffers_.size(); ++i) { |
+ video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
+ i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
+ } |
+} |
+ |
+void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, |
+ size_t payload_size, |
+ bool key_frame) { |
+ DVLOG(3) << "Impl::BitstreamBufferReady(): " |
+ "bitstream_buffer_id=" << bitstream_buffer_id |
+ << ", payload_size=" << payload_size |
+ << ", key_frame=" << key_frame; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ if (bitstream_buffer_id < 0 || |
+ bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { |
+ DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" |
+ << bitstream_buffer_id; |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
+ if (payload_size > output_buffer->mapped_size()) { |
+ DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" |
+ << payload_size; |
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
+ return; |
+ } |
+ |
+ const base::Time now = base::Time::Now(); |
+ if (time_base_.is_null()) |
+ time_base_ = now; |
+ const base::TimeDelta delta = now - time_base_; |
+ |
+ scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( |
+ reinterpret_cast<uint8_t*>(output_buffer->memory()), |
+ payload_size, |
+ output_buffer->mapped_size())); |
+ image->_encodedWidth = input_visible_size_.width(); |
+ image->_encodedHeight = input_visible_size_.height(); |
+ // Convert capture time to 90 kHz RTP timestamp. |
+ image->_timeStamp = (delta * 90000).InSeconds(); |
+ image->capture_time_ms_ = delta.InMilliseconds(); |
+ image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); |
+ image->_completeFrame = true; |
+ |
+ encoder_message_loop_proxy_->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::ReturnEncodedImage, |
+ weak_encoder_, |
+ make_scoped_refptr(this), |
+ base::Passed(&image), |
+ bitstream_buffer_id)); |
+} |
+ |
+void RTCVideoEncoder::Impl::NotifyError( |
+ media::VideoEncodeAccelerator::Error error) { |
+ DVLOG(3) << "Impl::NotifyError(): error=" << error; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ int32_t retval; |
+ switch (error) { |
+ case media::VideoEncodeAccelerator::kInvalidArgumentError: |
+ retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
+ break; |
+ default: |
+ retval = WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ |
+ if (video_encoder_) |
+ video_encoder_.release()->Destroy(); |
+ |
+ if (async_waiter_) { |
+ SignalAsyncWaiter(retval); |
+ } else { |
+ encoder_message_loop_proxy_->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::NotifyError, |
+ weak_encoder_, |
+ make_scoped_refptr(this), |
+ retval)); |
+ } |
+} |
+ |
+RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } |
+ |
+void RTCVideoEncoder::Impl::EncodeOneFrame() { |
+ DVLOG(3) << "Impl::EncodeOneFrame()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ DCHECK(input_next_frame_); |
+ DCHECK(!input_buffers_free_.empty()); |
+ |
+ if (video_encoder_) { |
+ const int index = input_buffers_free_.back(); |
+ base::SharedMemory* input_buffer = input_buffers_[index]; |
+ |
+ // Do a strided copy of the input frame to match the input requirements for |
+ // the encoder. |
+ // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 |
+ const uint8_t* src = input_next_frame_->buffer(webrtc::kYPlane); |
+ uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory()); |
+ uint8* const y_dst = dst; |
+ int width = input_frame_coded_size_.width(); |
+ int stride = input_next_frame_->stride(webrtc::kYPlane); |
+ for (int i = 0; i < input_next_frame_->height(); ++i) { |
+ memcpy(dst, src, width); |
+ src += stride; |
+ dst += width; |
+ } |
+ src = input_next_frame_->buffer(webrtc::kUPlane); |
+ width = input_frame_coded_size_.width() / 2; |
+ stride = input_next_frame_->stride(webrtc::kUPlane); |
+ uint8* const u_dst = dst; |
+ for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
+ memcpy(dst, src, width); |
+ src += stride; |
+ dst += width; |
+ } |
+ src = input_next_frame_->buffer(webrtc::kVPlane); |
+ width = input_frame_coded_size_.width() / 2; |
+ stride = input_next_frame_->stride(webrtc::kVPlane); |
+ uint8* const v_dst = dst; |
+ for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
+ memcpy(dst, src, width); |
+ src += stride; |
+ dst += width; |
+ } |
+ |
+ scoped_refptr<media::VideoFrame> frame = |
+ media::VideoFrame::WrapExternalYuvData( |
+ media::VideoFrame::I420, |
+ input_frame_coded_size_, |
+ gfx::Rect(input_visible_size_), |
+ input_visible_size_, |
+ input_frame_coded_size_.width(), |
+ input_frame_coded_size_.width() / 2, |
+ input_frame_coded_size_.width() / 2, |
+ y_dst, |
+ u_dst, |
+ v_dst, |
+ base::TimeDelta(), |
+ input_buffer->handle(), |
+ base::Bind( |
+ &RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)); |
+ video_encoder_->Encode(frame, input_next_frame_keyframe_); |
+ input_buffers_free_.pop_back(); |
+ } |
+ |
+ input_next_frame_ = NULL; |
+ input_next_frame_keyframe_ = false; |
+ |
+ SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
Ami GONE FROM CHROMIUM
2013/08/08 23:08:19
I'm vaguely worried that this will send the client
sheu
2013/08/09 00:15:24
Done.
|
+} |
+ |
+void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
+ DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ DCHECK_GE(index, 0); |
+ DCHECK_LT(index, static_cast<int>(input_buffers_.size())); |
+ input_buffers_free_.push_back(index); |
+ if (input_next_frame_) |
+ EncodeOneFrame(); |
+} |
+ |
+void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter, |
+ int32_t* retval) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ DCHECK(!async_waiter_); |
+ DCHECK(!async_retval_); |
+ async_waiter_ = waiter; |
+ async_retval_ = retval; |
+} |
+ |
+void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ *async_retval_ = retval; |
+ async_waiter_->Signal(); |
+ async_retval_ = NULL; |
+ async_waiter_ = NULL; |
+} |
+ |
+#undef NOTIFY_ERROR |
+ |
+//////////////////////////////////////////////////////////////////////////////// |
+// |
+// RTCVideoEncoder |
+// |
+//////////////////////////////////////////////////////////////////////////////// |
+ |
+RTCVideoEncoder::RTCVideoEncoder( |
+ webrtc::VideoCodecType type, |
+ media::VideoCodecProfile profile, |
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) |
+ : video_codec_type_(type), |
+ video_codec_profile_(profile), |
+ gpu_factories_(gpu_factories), |
+ weak_this_factory_(this), |
+ weak_this_(weak_this_factory_.GetWeakPtr()), |
+ encoded_image_callback_(NULL), |
+ impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
+ DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; |
+} |
+ |
+RTCVideoEncoder::~RTCVideoEncoder() { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ Release(); |
+ DCHECK(!impl_); |
+} |
+ |
+int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
+ int32_t number_of_cores, |
+ uint32_t max_payload_size) { |
+ DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
+ << ", width=" << codec_settings->width |
+ << ", height=" << codec_settings->height |
+ << ", startBitrate=" << codec_settings->startBitrate; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ DCHECK(!impl_); |
+ |
+ impl_ = new Impl(weak_this_, gpu_factories_); |
+ base::WaitableEvent initialization_waiter(true, false); |
+ int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
+ impl_, |
+ gfx::Size(codec_settings->width, codec_settings->height), |
+ codec_settings->startBitrate, |
+ video_codec_profile_, |
+ &initialization_waiter, |
+ &initialization_retval)); |
+ |
+ // webrtc::VideoEncoder expects this call to be synchronous. |
+ initialization_waiter.Wait(); |
+ return initialization_retval; |
+} |
+ |
+int32_t RTCVideoEncoder::Encode( |
+ const webrtc::I420VideoFrame& input_image, |
+ const webrtc::CodecSpecificInfo* codec_specific_info, |
+ const std::vector<webrtc::VideoFrameType>* frame_types) { |
+ DVLOG(3) << "Encode()"; |
+ // TODO(sheu): figure out why this check fails. |
+ // DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (!impl_) { |
+ DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; |
+ return impl_status_; |
+ } |
+ |
+ base::WaitableEvent encode_waiter(true, false); |
+ int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::Impl::Enqueue, |
+ impl_, |
+ &input_image, |
+ (frame_types->front() == webrtc::kKeyFrame), |
+ &encode_waiter, |
+ &encode_retval)); |
+ |
+ // webrtc::VideoEncoder expects this call to be synchronous. |
+ encode_waiter.Wait(); |
+ DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
+ return encode_retval; |
+} |
+ |
+int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
+ webrtc::EncodedImageCallback* callback) { |
+ DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (!impl_) { |
+ DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; |
+ return impl_status_; |
+ } |
+ |
+ encoded_image_callback_ = callback; |
+ return WEBRTC_VIDEO_CODEC_OK; |
+} |
+ |
+int32_t RTCVideoEncoder::Release() { |
+ DVLOG(3) << "Release()"; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ // Reset the gpu_factory_, in case we reuse this encoder. |
+ gpu_factories_->Abort(); |
+ gpu_factories_ = gpu_factories_->Clone(); |
+ if (impl_) { |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
+ impl_ = NULL; |
+ impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
+ } |
+ return WEBRTC_VIDEO_CODEC_OK; |
+} |
+ |
+int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { |
+ DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
+ << ", rtt=" << rtt; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ // Ignored. |
+ return WEBRTC_VIDEO_CODEC_OK; |
+} |
+ |
+int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { |
+ DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate |
+ << ", frame_rate=" << frame_rate; |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (!impl_) { |
+ DVLOG(3) << "SetRates(): returning " << impl_status_; |
+ return impl_status_; |
+ } |
+ |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, |
+ impl_, |
+ new_bit_rate, |
+ frame_rate, |
+ 1)); |
Ami GONE FROM CHROMIUM
2013/08/08 23:08:19
nit: why support num/denom if denom is always 1?
hshi1
2013/08/09 00:01:10
I guess this is to permit common fractional frame
sheu
2013/08/09 00:15:24
Since the only current user (WebRTC) uses only int
|
+ return WEBRTC_VIDEO_CODEC_OK; |
+} |
+ |
+void RTCVideoEncoder::ReturnEncodedImage(const scoped_refptr<Impl>& impl, |
+ scoped_ptr<webrtc::EncodedImage> image, |
+ int32 bitstream_buffer_id) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ if (impl != impl_) |
+ return; |
+ |
+ DVLOG(3) << "ReturnEncodedImage(): " |
+ "bitstream_buffer_id=" << bitstream_buffer_id; |
+ |
+ if (!encoded_image_callback_) |
+ return; |
+ |
+ webrtc::CodecSpecificInfo info; |
+ info.codecType = video_codec_type_; |
+ |
+ // Generate a header describing a single fragment. |
+ webrtc::RTPFragmentationHeader header; |
+ header.VerifyAndAllocateFragmentationHeader(1); |
+ header.fragmentationOffset[0] = 0; |
+ header.fragmentationLength[0] = image->_length; |
+ header.fragmentationPlType[0] = 0; |
+ header.fragmentationTimeDiff[0] = 0; |
+ |
+ int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); |
+ if (retval < 0) { |
+ DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " |
+ << retval; |
+ } |
+ |
+ // The call through webrtc::EncodedImageCallback is synchronous, so we can |
+ // immediately recycle the output buffer back to the Impl. |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, |
+ base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, |
+ impl_, |
+ bitstream_buffer_id)); |
+} |
+ |
+void RTCVideoEncoder::NotifyError(const scoped_refptr<Impl>& impl, |
+ int32_t error) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ if (impl != impl_) |
+ return; |
+ |
+ DVLOG(1) << "NotifyError(): error=" << error; |
+ |
+ impl_status_ = error; |
+ gpu_factories_->GetMessageLoop()->PostTask( |
+ FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
+ impl_ = NULL; |
+} |
+ |
+} // namespace content |