OLD | NEW |
(Empty) | |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/renderer/media/rtc_video_encoder.h" |
| 6 |
| 7 #include "base/bind.h" |
| 8 #include "base/location.h" |
| 9 #include "base/logging.h" |
| 10 #include "base/memory/scoped_vector.h" |
| 11 #include "base/message_loop/message_loop_proxy.h" |
| 12 #include "base/synchronization/waitable_event.h" |
| 13 #include "content/renderer/media/renderer_gpu_video_accelerator_factories.h" |
| 14 #include "media/base/bitstream_buffer.h" |
| 15 #include "media/base/video_frame.h" |
| 16 #include "media/filters/gpu_video_accelerator_factories.h" |
| 17 #include "media/video/video_encode_accelerator.h" |
| 18 |
| 19 #define NOTIFY_ERROR(x) \ |
| 20 do { \ |
| 21 DLOG(ERROR) << "calling NotifyError(): " << x; \ |
| 22 NotifyError(x); \ |
| 23 } while (0) |
| 24 |
| 25 namespace content { |
| 26 |
| 27 // This private class of RTCVideoEncoder does the actual work of communicating |
| 28 // with a media::VideoEncodeAccelerator for handling video encoding. It can |
| 29 // be created on any thread, but should subsequently be posted to (and Destroy() |
| 30 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the |
| 31 // thread on which the instance was constructed. |
| 32 // |
| 33 // This class separates state related to the thread that RTCVideoEncoder |
| 34 // operates on (presently the libjingle worker thread) from the thread that |
| 35 // |gpu_factories_| provides for accelerator operations (presently the media |
| 36 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while |
| 37 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. |
| 38 class RTCVideoEncoder::Impl |
| 39 : public media::VideoEncodeAccelerator::Client, |
| 40 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
| 41 public: |
| 42 Impl( |
| 43 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, |
| 44 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories); |
| 45 |
| 46 // Create the VEA and call Initialize() on it. Called once per instantiation, |
| 47 // and then the instance is bound forevermore to whichever thread made the |
| 48 // call. |
| 49 // RTCVideoEncoder expects to be able to call this function synchronously from |
| 50 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
| 51 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, |
| 52 int32 bitrate, |
| 53 media::VideoCodecProfile profile, |
| 54 base::WaitableEvent* async_waiter, |
| 55 int32_t* async_retval); |
| 56 // Enqueue a frame from WebRTC for encoding. |
| 57 // RTCVideoEncoder expects to be able to call this function synchronously from |
| 58 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
| 59 void Enqueue(const webrtc::I420VideoFrame* input_frame, |
| 60 bool force_keyframe, |
| 61 base::WaitableEvent* async_waiter, |
| 62 int32_t* async_retval); |
| 63 |
| 64 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the |
| 65 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, |
| 66 // the buffer is returned to Impl by its index using this function. |
| 67 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); |
| 68 |
| 69 // Request encoding parameter change for the underlying encoder. |
| 70 void RequestEncodingParametersChange(int32 bitrate, uint32 framerate); |
| 71 |
| 72 // Destroy this Impl's encoder. The destructor is not explicitly called, as |
| 73 // Impl is a base::RefCountedThreadSafe. |
| 74 void Destroy(); |
| 75 |
| 76 // media::VideoEncodeAccelerator::Client implementation. |
| 77 virtual void NotifyInitializeDone() OVERRIDE; |
| 78 virtual void RequireBitstreamBuffers(int input_count, |
| 79 const gfx::Size& input_coded_size, |
| 80 size_t output_buffer_size) OVERRIDE; |
| 81 virtual void BitstreamBufferReady(int32 bitstream_buffer_id, |
| 82 size_t payload_size, |
| 83 bool key_frame) OVERRIDE; |
| 84 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; |
| 85 |
| 86 private: |
| 87 friend class base::RefCountedThreadSafe<Impl>; |
| 88 |
| 89 enum { |
| 90 kInputBufferExtraCount = 1, // The number of input buffers allocated, more |
| 91 // than what is requested by |
| 92 // VEA::RequireBitstreamBuffers(). |
| 93 kOutputBufferCount = 3, |
| 94 }; |
| 95 |
| 96 virtual ~Impl(); |
| 97 |
| 98 // Perform encoding on an input frame from the input queue. |
| 99 void EncodeOneFrame(); |
| 100 |
| 101 // Notify that an input frame is finished for encoding. |index| is the index |
| 102 // of the completed frame in |input_buffers_|. |
| 103 void EncodeFrameFinished(int index); |
| 104 |
| 105 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below. |
| 106 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); |
| 107 void SignalAsyncWaiter(int32_t retval); |
| 108 |
| 109 base::ThreadChecker thread_checker_; |
| 110 |
| 111 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client |
| 112 // notifications. |
| 113 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; |
| 114 |
| 115 // The message loop on which to post callbacks to |weak_encoder_|. |
| 116 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; |
| 117 |
| 118 // Factory for creating VEAs, shared memory buffers, etc. |
| 119 const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_; |
| 120 |
| 121 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| 122 // Do this by waiting on the |async_waiter_| and returning the return value in |
| 123 // |async_retval_| when initialization completes, encoding completes, or |
| 124 // an error occurs. |
| 125 base::WaitableEvent* async_waiter_; |
| 126 int32_t* async_retval_; |
| 127 |
| 128 // The underlying VEA to perform encoding on. |
| 129 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; |
| 130 |
| 131 // Next input frame. Since there is at most one next frame, a single-element |
| 132 // queue is sufficient. |
| 133 const webrtc::I420VideoFrame* input_next_frame_; |
| 134 |
| 135 // Whether to encode a keyframe next. |
| 136 bool input_next_frame_keyframe_; |
| 137 |
| 138 // Frame sizes. |
| 139 gfx::Size input_frame_coded_size_; |
| 140 gfx::Size input_visible_size_; |
| 141 |
| 142 // Shared memory buffers for input/output with the VEA. |
| 143 ScopedVector<base::SharedMemory> input_buffers_; |
| 144 ScopedVector<base::SharedMemory> output_buffers_; |
| 145 |
| 146 // Input buffers ready to be filled with input from Encode(). As a LIFO since |
| 147 // we don't care about ordering. |
| 148 std::vector<int> input_buffers_free_; |
| 149 |
| 150 // Timestamp of first frame returned from encoder. We calculate subsequent |
| 151 // capture times as deltas from this base. |
| 152 base::Time time_base_; |
| 153 |
| 154 DISALLOW_COPY_AND_ASSIGN(Impl); |
| 155 }; |
| 156 |
| 157 RTCVideoEncoder::Impl::Impl( |
| 158 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, |
| 159 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) |
| 160 : weak_encoder_(weak_encoder), |
| 161 encoder_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 162 gpu_factories_(gpu_factories), |
| 163 async_waiter_(NULL), |
| 164 async_retval_(NULL), |
| 165 input_next_frame_(NULL), |
| 166 input_next_frame_keyframe_(false) { |
| 167 thread_checker_.DetachFromThread(); |
| 168 } |
| 169 |
| 170 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
| 171 const gfx::Size& input_visible_size, |
| 172 int32 bitrate, |
| 173 media::VideoCodecProfile profile, |
| 174 base::WaitableEvent* async_waiter, |
| 175 int32_t* async_retval) { |
| 176 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
| 177 DCHECK(thread_checker_.CalledOnValidThread()); |
| 178 |
| 179 RegisterAsyncWaiter(async_waiter, async_retval); |
| 180 |
| 181 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
| 182 if (bitrate > kint32max / 1000) { |
| 183 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
| 184 return; |
| 185 } |
| 186 |
| 187 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass(); |
| 188 if (!video_encoder_) { |
| 189 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| 190 return; |
| 191 } |
| 192 input_visible_size_ = input_visible_size; |
| 193 video_encoder_->Initialize( |
| 194 media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000); |
| 195 } |
| 196 |
| 197 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, |
| 198 bool force_keyframe, |
| 199 base::WaitableEvent* async_waiter, |
| 200 int32_t* async_retval) { |
| 201 DVLOG(3) << "Impl::Enqueue()"; |
| 202 DCHECK(thread_checker_.CalledOnValidThread()); |
| 203 DCHECK(!input_next_frame_); |
| 204 |
| 205 RegisterAsyncWaiter(async_waiter, async_retval); |
| 206 input_next_frame_ = input_frame; |
| 207 input_next_frame_keyframe_ = force_keyframe; |
| 208 |
| 209 if (!input_buffers_free_.empty()) |
| 210 EncodeOneFrame(); |
| 211 } |
| 212 |
| 213 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( |
| 214 int32 bitstream_buffer_id) { |
| 215 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " |
| 216 "bitstream_buffer_id=" << bitstream_buffer_id; |
| 217 DCHECK(thread_checker_.CalledOnValidThread()); |
| 218 if (video_encoder_) { |
| 219 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| 220 bitstream_buffer_id, |
| 221 output_buffers_[bitstream_buffer_id]->handle(), |
| 222 output_buffers_[bitstream_buffer_id]->mapped_size())); |
| 223 } |
| 224 } |
| 225 |
| 226 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(int32 bitrate, |
| 227 uint32 framerate) { |
| 228 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate |
| 229 << ", framerate=" << framerate; |
| 230 DCHECK(thread_checker_.CalledOnValidThread()); |
| 231 |
| 232 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
| 233 if (bitrate > kint32max / 1000) { |
| 234 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
| 235 return; |
| 236 } |
| 237 |
| 238 if (video_encoder_) |
| 239 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); |
| 240 } |
| 241 |
| 242 void RTCVideoEncoder::Impl::Destroy() { |
| 243 DVLOG(3) << "Impl::Destroy()"; |
| 244 DCHECK(thread_checker_.CalledOnValidThread()); |
| 245 if (video_encoder_) |
| 246 video_encoder_.release()->Destroy(); |
| 247 } |
| 248 |
| 249 void RTCVideoEncoder::Impl::NotifyInitializeDone() { |
| 250 DVLOG(3) << "Impl::NotifyInitializeDone()"; |
| 251 DCHECK(thread_checker_.CalledOnValidThread()); |
| 252 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 253 } |
| 254 |
| 255 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
| 256 int input_count, |
| 257 const gfx::Size& input_coded_size, |
| 258 size_t output_buffer_size) { |
| 259 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
| 260 << ", input_coded_size=" << input_coded_size.ToString() |
| 261 << ", output_buffer_size=" << output_buffer_size; |
| 262 DCHECK(thread_checker_.CalledOnValidThread()); |
| 263 |
| 264 if (!video_encoder_) |
| 265 return; |
| 266 |
| 267 input_frame_coded_size_ = input_coded_size; |
| 268 |
| 269 for (int i = 0; i < input_count + kInputBufferExtraCount; ++i) { |
| 270 base::SharedMemory* shm = |
| 271 gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2); |
| 272 if (!shm) { |
| 273 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
| 274 "failed to create input buffer " << i; |
| 275 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| 276 return; |
| 277 } |
| 278 input_buffers_.push_back(shm); |
| 279 input_buffers_free_.push_back(i); |
| 280 } |
| 281 |
| 282 for (int i = 0; i < kOutputBufferCount; ++i) { |
| 283 base::SharedMemory* shm = |
| 284 gpu_factories_->CreateSharedMemory(output_buffer_size); |
| 285 if (!shm) { |
| 286 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
| 287 "failed to create output buffer " << i; |
| 288 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| 289 return; |
| 290 } |
| 291 output_buffers_.push_back(shm); |
| 292 } |
| 293 |
| 294 // Immediately provide all output buffers to the VEA. |
| 295 for (size_t i = 0; i < output_buffers_.size(); ++i) { |
| 296 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| 297 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
| 298 } |
| 299 } |
| 300 |
| 301 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, |
| 302 size_t payload_size, |
| 303 bool key_frame) { |
| 304 DVLOG(3) << "Impl::BitstreamBufferReady(): " |
| 305 "bitstream_buffer_id=" << bitstream_buffer_id |
| 306 << ", payload_size=" << payload_size |
| 307 << ", key_frame=" << key_frame; |
| 308 DCHECK(thread_checker_.CalledOnValidThread()); |
| 309 |
| 310 if (bitstream_buffer_id < 0 || |
| 311 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { |
| 312 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" |
| 313 << bitstream_buffer_id; |
| 314 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| 315 return; |
| 316 } |
| 317 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
| 318 if (payload_size > output_buffer->mapped_size()) { |
| 319 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" |
| 320 << payload_size; |
| 321 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| 322 return; |
| 323 } |
| 324 |
| 325 const base::Time now = base::Time::Now(); |
| 326 if (time_base_.is_null()) |
| 327 time_base_ = now; |
| 328 const base::TimeDelta delta = now - time_base_; |
| 329 |
| 330 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( |
| 331 reinterpret_cast<uint8_t*>(output_buffer->memory()), |
| 332 payload_size, |
| 333 output_buffer->mapped_size())); |
| 334 image->_encodedWidth = input_visible_size_.width(); |
| 335 image->_encodedHeight = input_visible_size_.height(); |
| 336 // Convert capture time to 90 kHz RTP timestamp. |
| 337 image->_timeStamp = (delta * 90000).InSeconds(); |
| 338 image->capture_time_ms_ = delta.InMilliseconds(); |
| 339 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); |
| 340 image->_completeFrame = true; |
| 341 |
| 342 encoder_message_loop_proxy_->PostTask( |
| 343 FROM_HERE, |
| 344 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, |
| 345 weak_encoder_, |
| 346 make_scoped_refptr(this), |
| 347 base::Passed(&image), |
| 348 bitstream_buffer_id)); |
| 349 } |
| 350 |
| 351 void RTCVideoEncoder::Impl::NotifyError( |
| 352 media::VideoEncodeAccelerator::Error error) { |
| 353 DVLOG(3) << "Impl::NotifyError(): error=" << error; |
| 354 DCHECK(thread_checker_.CalledOnValidThread()); |
| 355 int32_t retval; |
| 356 switch (error) { |
| 357 case media::VideoEncodeAccelerator::kInvalidArgumentError: |
| 358 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 359 break; |
| 360 default: |
| 361 retval = WEBRTC_VIDEO_CODEC_ERROR; |
| 362 } |
| 363 |
| 364 if (video_encoder_) |
| 365 video_encoder_.release()->Destroy(); |
| 366 |
| 367 if (async_waiter_) { |
| 368 SignalAsyncWaiter(retval); |
| 369 } else { |
| 370 encoder_message_loop_proxy_->PostTask( |
| 371 FROM_HERE, |
| 372 base::Bind(&RTCVideoEncoder::NotifyError, |
| 373 weak_encoder_, |
| 374 make_scoped_refptr(this), |
| 375 retval)); |
| 376 } |
| 377 } |
| 378 |
| 379 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } |
| 380 |
| 381 void RTCVideoEncoder::Impl::EncodeOneFrame() { |
| 382 DVLOG(3) << "Impl::EncodeOneFrame()"; |
| 383 DCHECK(thread_checker_.CalledOnValidThread()); |
| 384 DCHECK(input_next_frame_); |
| 385 DCHECK(!input_buffers_free_.empty()); |
| 386 |
| 387 int32_t retval = WEBRTC_VIDEO_CODEC_ERROR; |
| 388 if (video_encoder_) { |
| 389 const int index = input_buffers_free_.back(); |
| 390 base::SharedMemory* input_buffer = input_buffers_[index]; |
| 391 |
| 392 // Do a strided copy of the input frame to match the input requirements for |
| 393 // the encoder. |
| 394 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 |
| 395 const uint8_t* src = input_next_frame_->buffer(webrtc::kYPlane); |
| 396 uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory()); |
| 397 uint8* const y_dst = dst; |
| 398 int width = input_frame_coded_size_.width(); |
| 399 int stride = input_next_frame_->stride(webrtc::kYPlane); |
| 400 for (int i = 0; i < input_next_frame_->height(); ++i) { |
| 401 memcpy(dst, src, width); |
| 402 src += stride; |
| 403 dst += width; |
| 404 } |
| 405 src = input_next_frame_->buffer(webrtc::kUPlane); |
| 406 width = input_frame_coded_size_.width() / 2; |
| 407 stride = input_next_frame_->stride(webrtc::kUPlane); |
| 408 uint8* const u_dst = dst; |
| 409 for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
| 410 memcpy(dst, src, width); |
| 411 src += stride; |
| 412 dst += width; |
| 413 } |
| 414 src = input_next_frame_->buffer(webrtc::kVPlane); |
| 415 width = input_frame_coded_size_.width() / 2; |
| 416 stride = input_next_frame_->stride(webrtc::kVPlane); |
| 417 uint8* const v_dst = dst; |
| 418 for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
| 419 memcpy(dst, src, width); |
| 420 src += stride; |
| 421 dst += width; |
| 422 } |
| 423 |
| 424 scoped_refptr<media::VideoFrame> frame = |
| 425 media::VideoFrame::WrapExternalYuvData( |
| 426 media::VideoFrame::I420, |
| 427 input_frame_coded_size_, |
| 428 gfx::Rect(input_visible_size_), |
| 429 input_visible_size_, |
| 430 input_frame_coded_size_.width(), |
| 431 input_frame_coded_size_.width() / 2, |
| 432 input_frame_coded_size_.width() / 2, |
| 433 y_dst, |
| 434 u_dst, |
| 435 v_dst, |
| 436 base::TimeDelta(), |
| 437 input_buffer->handle(), |
| 438 base::Bind( |
| 439 &RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)); |
| 440 video_encoder_->Encode(frame, input_next_frame_keyframe_); |
| 441 input_buffers_free_.pop_back(); |
| 442 retval = WEBRTC_VIDEO_CODEC_OK; |
| 443 } |
| 444 |
| 445 input_next_frame_ = NULL; |
| 446 input_next_frame_keyframe_ = false; |
| 447 SignalAsyncWaiter(retval); |
| 448 } |
| 449 |
| 450 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
| 451 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
| 452 DCHECK(thread_checker_.CalledOnValidThread()); |
| 453 DCHECK_GE(index, 0); |
| 454 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); |
| 455 input_buffers_free_.push_back(index); |
| 456 if (input_next_frame_) |
| 457 EncodeOneFrame(); |
| 458 } |
| 459 |
| 460 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter, |
| 461 int32_t* retval) { |
| 462 DCHECK(thread_checker_.CalledOnValidThread()); |
| 463 DCHECK(!async_waiter_); |
| 464 DCHECK(!async_retval_); |
| 465 async_waiter_ = waiter; |
| 466 async_retval_ = retval; |
| 467 } |
| 468 |
| 469 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) { |
| 470 DCHECK(thread_checker_.CalledOnValidThread()); |
| 471 *async_retval_ = retval; |
| 472 async_waiter_->Signal(); |
| 473 async_retval_ = NULL; |
| 474 async_waiter_ = NULL; |
| 475 } |
| 476 |
| 477 #undef NOTIFY_ERROR |
| 478 |
| 479 //////////////////////////////////////////////////////////////////////////////// |
| 480 // |
| 481 // RTCVideoEncoder |
| 482 // |
| 483 //////////////////////////////////////////////////////////////////////////////// |
| 484 |
| 485 RTCVideoEncoder::RTCVideoEncoder( |
| 486 webrtc::VideoCodecType type, |
| 487 media::VideoCodecProfile profile, |
| 488 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) |
| 489 : video_codec_type_(type), |
| 490 video_codec_profile_(profile), |
| 491 gpu_factories_(gpu_factories), |
| 492 weak_this_factory_(this), |
| 493 weak_this_(weak_this_factory_.GetWeakPtr()), |
| 494 encoded_image_callback_(NULL), |
| 495 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
| 496 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; |
| 497 } |
| 498 |
| 499 RTCVideoEncoder::~RTCVideoEncoder() { |
| 500 DCHECK(thread_checker_.CalledOnValidThread()); |
| 501 Release(); |
| 502 DCHECK(!impl_); |
| 503 } |
| 504 |
| 505 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
| 506 int32_t number_of_cores, |
| 507 uint32_t max_payload_size) { |
| 508 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
| 509 << ", width=" << codec_settings->width |
| 510 << ", height=" << codec_settings->height |
| 511 << ", startBitrate=" << codec_settings->startBitrate; |
| 512 DCHECK(thread_checker_.CalledOnValidThread()); |
| 513 DCHECK(!impl_); |
| 514 |
| 515 impl_ = new Impl(weak_this_, gpu_factories_); |
| 516 base::WaitableEvent initialization_waiter(true, false); |
| 517 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 518 gpu_factories_->GetMessageLoop()->PostTask( |
| 519 FROM_HERE, |
| 520 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
| 521 impl_, |
| 522 gfx::Size(codec_settings->width, codec_settings->height), |
| 523 codec_settings->startBitrate, |
| 524 video_codec_profile_, |
| 525 &initialization_waiter, |
| 526 &initialization_retval)); |
| 527 |
| 528 // webrtc::VideoEncoder expects this call to be synchronous. |
| 529 initialization_waiter.Wait(); |
| 530 return initialization_retval; |
| 531 } |
| 532 |
| 533 int32_t RTCVideoEncoder::Encode( |
| 534 const webrtc::I420VideoFrame& input_image, |
| 535 const webrtc::CodecSpecificInfo* codec_specific_info, |
| 536 const std::vector<webrtc::VideoFrameType>* frame_types) { |
| 537 DVLOG(3) << "Encode()"; |
| 538 // TODO(sheu): figure out why this check fails. |
| 539 // DCHECK(thread_checker_.CalledOnValidThread()); |
| 540 if (!impl_) { |
| 541 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; |
| 542 return impl_status_; |
| 543 } |
| 544 |
| 545 base::WaitableEvent encode_waiter(true, false); |
| 546 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 547 gpu_factories_->GetMessageLoop()->PostTask( |
| 548 FROM_HERE, |
| 549 base::Bind(&RTCVideoEncoder::Impl::Enqueue, |
| 550 impl_, |
| 551 &input_image, |
| 552 (frame_types->front() == webrtc::kKeyFrame), |
| 553 &encode_waiter, |
| 554 &encode_retval)); |
| 555 |
| 556 // webrtc::VideoEncoder expects this call to be synchronous. |
| 557 encode_waiter.Wait(); |
| 558 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
| 559 return encode_retval; |
| 560 } |
| 561 |
| 562 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
| 563 webrtc::EncodedImageCallback* callback) { |
| 564 DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
| 565 DCHECK(thread_checker_.CalledOnValidThread()); |
| 566 if (!impl_) { |
| 567 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; |
| 568 return impl_status_; |
| 569 } |
| 570 |
| 571 encoded_image_callback_ = callback; |
| 572 return WEBRTC_VIDEO_CODEC_OK; |
| 573 } |
| 574 |
| 575 int32_t RTCVideoEncoder::Release() { |
| 576 DVLOG(3) << "Release()"; |
| 577 DCHECK(thread_checker_.CalledOnValidThread()); |
| 578 |
| 579 // Reset the gpu_factory_, in case we reuse this encoder. |
| 580 gpu_factories_->Abort(); |
| 581 gpu_factories_ = gpu_factories_->Clone(); |
| 582 if (impl_) { |
| 583 gpu_factories_->GetMessageLoop()->PostTask( |
| 584 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
| 585 impl_ = NULL; |
| 586 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 587 } |
| 588 return WEBRTC_VIDEO_CODEC_OK; |
| 589 } |
| 590 |
| 591 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { |
| 592 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
| 593 << ", rtt=" << rtt; |
| 594 DCHECK(thread_checker_.CalledOnValidThread()); |
| 595 // Ignored. |
| 596 return WEBRTC_VIDEO_CODEC_OK; |
| 597 } |
| 598 |
| 599 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { |
| 600 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate |
| 601 << ", frame_rate=" << frame_rate; |
| 602 DCHECK(thread_checker_.CalledOnValidThread()); |
| 603 if (!impl_) { |
| 604 DVLOG(3) << "SetRates(): returning " << impl_status_; |
| 605 return impl_status_; |
| 606 } |
| 607 |
| 608 gpu_factories_->GetMessageLoop()->PostTask( |
| 609 FROM_HERE, |
| 610 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, |
| 611 impl_, |
| 612 new_bit_rate, |
| 613 frame_rate)); |
| 614 return WEBRTC_VIDEO_CODEC_OK; |
| 615 } |
| 616 |
| 617 void RTCVideoEncoder::ReturnEncodedImage(const scoped_refptr<Impl>& impl, |
| 618 scoped_ptr<webrtc::EncodedImage> image, |
| 619 int32 bitstream_buffer_id) { |
| 620 DCHECK(thread_checker_.CalledOnValidThread()); |
| 621 |
| 622 if (impl != impl_) |
| 623 return; |
| 624 |
| 625 DVLOG(3) << "ReturnEncodedImage(): " |
| 626 "bitstream_buffer_id=" << bitstream_buffer_id; |
| 627 |
| 628 if (!encoded_image_callback_) |
| 629 return; |
| 630 |
| 631 webrtc::CodecSpecificInfo info; |
| 632 info.codecType = video_codec_type_; |
| 633 |
| 634 // Generate a header describing a single fragment. |
| 635 webrtc::RTPFragmentationHeader header; |
| 636 header.VerifyAndAllocateFragmentationHeader(1); |
| 637 header.fragmentationOffset[0] = 0; |
| 638 header.fragmentationLength[0] = image->_length; |
| 639 header.fragmentationPlType[0] = 0; |
| 640 header.fragmentationTimeDiff[0] = 0; |
| 641 |
| 642 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); |
| 643 if (retval < 0) { |
| 644 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " |
| 645 << retval; |
| 646 } |
| 647 |
| 648 // The call through webrtc::EncodedImageCallback is synchronous, so we can |
| 649 // immediately recycle the output buffer back to the Impl. |
| 650 gpu_factories_->GetMessageLoop()->PostTask( |
| 651 FROM_HERE, |
| 652 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, |
| 653 impl_, |
| 654 bitstream_buffer_id)); |
| 655 } |
| 656 |
| 657 void RTCVideoEncoder::NotifyError(const scoped_refptr<Impl>& impl, |
| 658 int32_t error) { |
| 659 DCHECK(thread_checker_.CalledOnValidThread()); |
| 660 |
| 661 if (impl != impl_) |
| 662 return; |
| 663 |
| 664 DVLOG(1) << "NotifyError(): error=" << error; |
| 665 |
| 666 impl_status_ = error; |
| 667 gpu_factories_->GetMessageLoop()->PostTask( |
| 668 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
| 669 impl_ = NULL; |
| 670 } |
| 671 |
| 672 } // namespace content |
OLD | NEW |