OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_encoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/location.h" | |
9 #include "base/logging.h" | |
10 #include "base/memory/scoped_vector.h" | |
11 #include "base/message_loop/message_loop_proxy.h" | |
12 #include "base/synchronization/waitable_event.h" | |
13 #include "content/renderer/media/renderer_gpu_video_accelerator_factories.h" | |
14 #include "media/base/bitstream_buffer.h" | |
15 #include "media/base/video_frame.h" | |
16 #include "media/filters/gpu_video_accelerator_factories.h" | |
17 #include "media/video/video_encode_accelerator.h" | |
18 | |
19 #define NOTIFY_ERROR(x) \ | |
20 do { \ | |
21 DLOG(ERROR) << "calling NotifyError(): " << x; \ | |
22 NotifyError(x); \ | |
23 } while (0) | |
24 | |
25 namespace content { | |
26 | |
27 // This private class of RTCVideoEncoder does the actual work of communicating | |
28 // with a media::VideoEncodeAccelerator for handling video encoding. It can | |
29 // be created on any thread, but should subsequently be posted to (and Destroy() | |
30 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the | |
31 // thread on which the instance was constructed. | |
32 // | |
33 // This class separates state related to the thread that RTCVideoEncoder | |
34 // operates on (presently the libjingle worker thread) from the thread that | |
35 // |gpu_factories_| provides for accelerator operations (presently the media | |
36 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while | |
37 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. | |
38 class RTCVideoEncoder::Impl | |
39 : public media::VideoEncodeAccelerator::Client, | |
40 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { | |
41 public: | |
42 Impl( | |
43 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | |
44 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories); | |
45 | |
46 // Create the VEA and call Initialize() on it. Called once per instantiation, | |
47 // and then the instance is bound forevermore to whichever thread made the | |
48 // call. | |
49 // RTCVideoEncoder expects to be able to call this function synchronously from | |
50 // its own thread, hence the |async_waiter| and |async_retval| arguments. | |
51 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, | |
52 int32 bitrate, | |
53 media::VideoCodecProfile profile, | |
54 base::WaitableEvent* async_waiter, | |
55 int32_t* async_retval); | |
56 // Enqueue a frame from WebRTC for encoding. | |
57 // RTCVideoEncoder expects to be able to call this function synchronously from | |
58 // its own thread, hence the |async_waiter| and |async_retval| arguments. | |
59 void Enqueue(const webrtc::I420VideoFrame* input_frame, | |
60 bool force_keyframe, | |
61 base::WaitableEvent* async_waiter, | |
62 int32_t* async_retval); | |
63 | |
64 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the | |
65 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, | |
66 // the buffer is returned to Impl by its index using this function. | |
67 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); | |
68 | |
69 // Request encoding parameter change for the underlying encoder. | |
70 void RequestEncodingParametersChange(int32 bitrate, | |
71 uint32 framerate_num, | |
72 uint32 framerate_denom); | |
73 | |
74 // Destroy this Impl's encoder. The destructor is not explicitly called, as | |
75 // Impl is a base::RefCountedThreadSafe. | |
76 void Destroy(); | |
77 | |
78 // media::VideoEncodeAccelerator::Client implementation. | |
79 virtual void NotifyInitializeDone() OVERRIDE; | |
80 virtual void RequireBitstreamBuffers(int input_count, | |
81 const gfx::Size& input_coded_size, | |
82 size_t output_buffer_size) OVERRIDE; | |
83 virtual void BitstreamBufferReady(int32 bitstream_buffer_id, | |
84 size_t payload_size, | |
85 bool key_frame) OVERRIDE; | |
86 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; | |
87 | |
88 private: | |
89 friend class base::RefCountedThreadSafe<Impl>; | |
90 | |
91 enum { | |
92 kInputBufferExtraCount = 1, // The number of input buffers allocated, more | |
93 // than what is requested by | |
94 // VEA::RequireBitstreamBuffers(). | |
95 kOutputBufferCount = 3, | |
96 }; | |
97 | |
98 virtual ~Impl(); | |
99 | |
100 // Perform encoding on an input frame from the input queue. | |
101 void EncodeOneFrame(); | |
102 | |
103 // Notify that an input frame is finished for encoding. |index| is the index | |
104 // of the completed frame in |input_buffers_|. | |
105 void EncodeFrameFinished(int index); | |
106 | |
107 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below. | |
108 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); | |
109 void SignalAsyncWaiter(int32_t retval); | |
110 | |
111 base::ThreadChecker thread_checker_; | |
112 | |
113 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client | |
114 // notifications. | |
115 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; | |
116 | |
117 // The message loop on which to post callbacks to |weak_encoder_|. | |
118 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; | |
119 | |
120 // Factory for creating VEAs, shared memory buffers, etc. | |
121 const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_; | |
122 | |
123 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | |
124 // Do this by waiting on the |async_waiter_| and returning the return value in | |
125 // |async_retval_| when initialization completes, encoding completes, or | |
126 // an error occurs. | |
127 base::WaitableEvent* async_waiter_; | |
128 int32_t* async_retval_; | |
129 | |
130 // The underlying VEA to perform encoding on. | |
131 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; | |
132 | |
133 // Next input frame. Since there is at most one next frame, a single-element | |
134 // queue is sufficient. | |
135 const webrtc::I420VideoFrame* input_next_frame_; | |
136 | |
137 // Whether to encode a keyframe next. | |
138 bool input_next_frame_keyframe_; | |
139 | |
140 // Frame sizes. | |
141 gfx::Size input_frame_coded_size_; | |
142 gfx::Size input_visible_size_; | |
143 | |
144 // Shared memory buffers for input/output with the VEA. | |
145 ScopedVector<base::SharedMemory> input_buffers_; | |
146 ScopedVector<base::SharedMemory> output_buffers_; | |
147 | |
148 // Input buffers ready to be filled with input from Encode(). As a LIFO since | |
149 // we don't care about ordering. | |
150 std::vector<int> input_buffers_free_; | |
151 | |
152 // Timestamp of first frame returned from encoder. We calculate subsequent | |
153 // capture times as deltas from this base. | |
154 base::Time time_base_; | |
155 | |
156 DISALLOW_COPY_AND_ASSIGN(Impl); | |
157 }; | |
158 | |
159 RTCVideoEncoder::Impl::Impl( | |
160 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | |
161 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) | |
162 : weak_encoder_(weak_encoder), | |
163 encoder_message_loop_proxy_(base::MessageLoopProxy::current()), | |
164 gpu_factories_(gpu_factories), | |
165 async_waiter_(NULL), | |
166 async_retval_(NULL), | |
167 input_next_frame_(NULL), | |
168 input_next_frame_keyframe_(false) { | |
169 thread_checker_.DetachFromThread(); | |
170 } | |
171 | |
172 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | |
173 const gfx::Size& input_visible_size, | |
174 int32 bitrate, | |
175 media::VideoCodecProfile profile, | |
176 base::WaitableEvent* async_waiter, | |
177 int32_t* async_retval) { | |
178 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; | |
179 DCHECK(thread_checker_.CalledOnValidThread()); | |
180 | |
181 RegisterAsyncWaiter(async_waiter, async_retval); | |
182 | |
183 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | |
184 if (bitrate > kint32max / 1000) { | |
185 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); | |
186 return; | |
187 } | |
188 | |
189 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass(); | |
190 if (!video_encoder_) { | |
191 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
192 return; | |
193 } | |
194 input_visible_size_ = input_visible_size; | |
195 video_encoder_->Initialize( | |
196 media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000); | |
197 } | |
198 | |
199 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, | |
200 bool force_keyframe, | |
201 base::WaitableEvent* async_waiter, | |
202 int32_t* async_retval) { | |
203 DVLOG(3) << "Impl::Enqueue()"; | |
204 DCHECK(thread_checker_.CalledOnValidThread()); | |
205 DCHECK(!input_next_frame_); | |
206 | |
207 RegisterAsyncWaiter(async_waiter, async_retval); | |
208 input_next_frame_ = input_frame; | |
209 input_next_frame_keyframe_ = force_keyframe; | |
210 | |
211 if (!input_buffers_free_.empty()) | |
212 EncodeOneFrame(); | |
213 } | |
214 | |
215 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( | |
216 int32 bitstream_buffer_id) { | |
217 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " | |
218 "bitstream_buffer_id=" << bitstream_buffer_id; | |
219 DCHECK(thread_checker_.CalledOnValidThread()); | |
220 if (video_encoder_) { | |
221 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | |
222 bitstream_buffer_id, | |
223 output_buffers_[bitstream_buffer_id]->handle(), | |
224 output_buffers_[bitstream_buffer_id]->mapped_size())); | |
225 } | |
226 } | |
227 | |
228 void RTCVideoEncoder::Impl::RequestEncodingParametersChange( | |
229 int32 bitrate, | |
230 uint32 framerate_num, | |
231 uint32 framerate_denom) { | |
232 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate | |
233 << ", frame_rate=" << framerate_num << "/" << framerate_denom; | |
234 DCHECK(thread_checker_.CalledOnValidThread()); | |
235 | |
236 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | |
237 if (bitrate > kint32max / 1000) { | |
238 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); | |
239 return; | |
240 } | |
241 | |
242 if (video_encoder_) | |
243 video_encoder_->RequestEncodingParametersChange( | |
244 bitrate * 1000, framerate_num, framerate_denom); | |
245 } | |
246 | |
247 void RTCVideoEncoder::Impl::Destroy() { | |
248 DVLOG(3) << "Impl::Destroy()"; | |
249 DCHECK(thread_checker_.CalledOnValidThread()); | |
250 if (video_encoder_) | |
251 video_encoder_.release()->Destroy(); | |
252 } | |
253 | |
254 void RTCVideoEncoder::Impl::NotifyInitializeDone() { | |
255 DVLOG(3) << "Impl::NotifyInitializeDone()"; | |
256 DCHECK(thread_checker_.CalledOnValidThread()); | |
257 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | |
258 } | |
259 | |
260 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( | |
261 int input_count, | |
262 const gfx::Size& input_coded_size, | |
263 size_t output_buffer_size) { | |
264 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count | |
265 << ", input_coded_size=" << input_coded_size.ToString() | |
266 << ", output_buffer_size=" << output_buffer_size; | |
267 DCHECK(thread_checker_.CalledOnValidThread()); | |
268 | |
269 if (!video_encoder_) | |
270 return; | |
271 | |
272 input_frame_coded_size_ = input_coded_size; | |
273 | |
274 for (int i = 0; i < input_count + kInputBufferExtraCount; ++i) { | |
275 base::SharedMemory* shm = | |
276 gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2); | |
277 if (!shm) { | |
278 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " | |
279 "failed to create input buffer " << i; | |
280 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
281 return; | |
282 } | |
283 input_buffers_.push_back(shm); | |
284 input_buffers_free_.push_back(i); | |
285 } | |
286 | |
287 for (int i = 0; i < kOutputBufferCount; ++i) { | |
288 base::SharedMemory* shm = | |
289 gpu_factories_->CreateSharedMemory(output_buffer_size); | |
290 if (!shm) { | |
291 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " | |
292 "failed to create output buffer " << i; | |
293 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
294 return; | |
295 } | |
296 output_buffers_.push_back(shm); | |
297 } | |
298 | |
299 // Immediately provide all output buffers to the VEA. | |
300 for (size_t i = 0; i < output_buffers_.size(); ++i) { | |
301 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | |
302 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | |
303 } | |
304 } | |
305 | |
306 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, | |
307 size_t payload_size, | |
308 bool key_frame) { | |
309 DVLOG(3) << "Impl::BitstreamBufferReady(): " | |
310 "bitstream_buffer_id=" << bitstream_buffer_id | |
311 << ", payload_size=" << payload_size | |
312 << ", key_frame=" << key_frame; | |
313 DCHECK(thread_checker_.CalledOnValidThread()); | |
314 | |
315 if (bitstream_buffer_id < 0 || | |
316 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { | |
317 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" | |
318 << bitstream_buffer_id; | |
319 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
320 return; | |
321 } | |
322 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | |
323 if (payload_size > output_buffer->mapped_size()) { | |
324 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" | |
325 << payload_size; | |
326 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
327 return; | |
328 } | |
329 | |
330 const base::Time now = base::Time::Now(); | |
331 if (time_base_.is_null()) | |
332 time_base_ = now; | |
333 const base::TimeDelta delta = now - time_base_; | |
334 | |
335 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( | |
336 reinterpret_cast<uint8_t*>(output_buffer->memory()), | |
337 payload_size, | |
338 output_buffer->mapped_size())); | |
339 image->_encodedWidth = input_visible_size_.width(); | |
340 image->_encodedHeight = input_visible_size_.height(); | |
341 // Convert capture time to 90 kHz RTP timestamp. | |
342 image->_timeStamp = (delta * 90000).InSeconds(); | |
343 image->capture_time_ms_ = delta.InMilliseconds(); | |
344 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); | |
345 image->_completeFrame = true; | |
346 | |
347 encoder_message_loop_proxy_->PostTask( | |
348 FROM_HERE, | |
349 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, | |
350 weak_encoder_, | |
351 make_scoped_refptr(this), | |
352 base::Passed(&image), | |
353 bitstream_buffer_id)); | |
354 } | |
355 | |
356 void RTCVideoEncoder::Impl::NotifyError( | |
357 media::VideoEncodeAccelerator::Error error) { | |
358 DVLOG(3) << "Impl::NotifyError(): error=" << error; | |
359 DCHECK(thread_checker_.CalledOnValidThread()); | |
360 int32_t retval; | |
361 switch (error) { | |
362 case media::VideoEncodeAccelerator::kInvalidArgumentError: | |
363 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
364 break; | |
365 default: | |
366 retval = WEBRTC_VIDEO_CODEC_ERROR; | |
367 } | |
368 | |
369 if (video_encoder_) | |
370 video_encoder_.release()->Destroy(); | |
371 | |
372 if (async_waiter_) { | |
373 SignalAsyncWaiter(retval); | |
374 } else { | |
375 encoder_message_loop_proxy_->PostTask( | |
376 FROM_HERE, | |
377 base::Bind(&RTCVideoEncoder::NotifyError, | |
378 weak_encoder_, | |
379 make_scoped_refptr(this), | |
380 retval)); | |
381 } | |
382 } | |
383 | |
384 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } | |
385 | |
386 void RTCVideoEncoder::Impl::EncodeOneFrame() { | |
387 DVLOG(3) << "Impl::EncodeOneFrame()"; | |
388 DCHECK(thread_checker_.CalledOnValidThread()); | |
389 DCHECK(input_next_frame_); | |
390 DCHECK(!input_buffers_free_.empty()); | |
391 | |
392 if (video_encoder_) { | |
393 const int index = input_buffers_free_.back(); | |
394 base::SharedMemory* input_buffer = input_buffers_[index]; | |
395 | |
396 // Do a strided copy of the input frame to match the input requirements for | |
397 // the encoder. | |
398 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 | |
399 const uint8_t* src = input_next_frame_->buffer(webrtc::kYPlane); | |
400 uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory()); | |
401 uint8* const y_dst = dst; | |
402 int width = input_frame_coded_size_.width(); | |
403 int stride = input_next_frame_->stride(webrtc::kYPlane); | |
404 for (int i = 0; i < input_next_frame_->height(); ++i) { | |
405 memcpy(dst, src, width); | |
406 src += stride; | |
407 dst += width; | |
408 } | |
409 src = input_next_frame_->buffer(webrtc::kUPlane); | |
410 width = input_frame_coded_size_.width() / 2; | |
411 stride = input_next_frame_->stride(webrtc::kUPlane); | |
412 uint8* const u_dst = dst; | |
413 for (int i = 0; i < input_next_frame_->height() / 2; ++i) { | |
414 memcpy(dst, src, width); | |
415 src += stride; | |
416 dst += width; | |
417 } | |
418 src = input_next_frame_->buffer(webrtc::kVPlane); | |
419 width = input_frame_coded_size_.width() / 2; | |
420 stride = input_next_frame_->stride(webrtc::kVPlane); | |
421 uint8* const v_dst = dst; | |
422 for (int i = 0; i < input_next_frame_->height() / 2; ++i) { | |
423 memcpy(dst, src, width); | |
424 src += stride; | |
425 dst += width; | |
426 } | |
427 | |
428 scoped_refptr<media::VideoFrame> frame = | |
429 media::VideoFrame::WrapExternalYuvData( | |
430 media::VideoFrame::I420, | |
431 input_frame_coded_size_, | |
432 gfx::Rect(input_visible_size_), | |
433 input_visible_size_, | |
434 input_frame_coded_size_.width(), | |
435 input_frame_coded_size_.width() / 2, | |
436 input_frame_coded_size_.width() / 2, | |
437 y_dst, | |
438 u_dst, | |
439 v_dst, | |
440 base::TimeDelta(), | |
441 input_buffer->handle(), | |
442 base::Bind( | |
443 &RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)); | |
444 video_encoder_->Encode(frame, input_next_frame_keyframe_); | |
445 input_buffers_free_.pop_back(); | |
446 } | |
447 | |
448 input_next_frame_ = NULL; | |
449 input_next_frame_keyframe_ = false; | |
450 | |
451 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | |
Ami GONE FROM CHROMIUM
2013/08/08 23:08:19
I'm vaguely worried that this will send the client
sheu
2013/08/09 00:15:24
Done.
| |
452 } | |
453 | |
454 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | |
455 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | |
456 DCHECK(thread_checker_.CalledOnValidThread()); | |
457 DCHECK_GE(index, 0); | |
458 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); | |
459 input_buffers_free_.push_back(index); | |
460 if (input_next_frame_) | |
461 EncodeOneFrame(); | |
462 } | |
463 | |
464 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter, | |
465 int32_t* retval) { | |
466 DCHECK(thread_checker_.CalledOnValidThread()); | |
467 DCHECK(!async_waiter_); | |
468 DCHECK(!async_retval_); | |
469 async_waiter_ = waiter; | |
470 async_retval_ = retval; | |
471 } | |
472 | |
473 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) { | |
474 DCHECK(thread_checker_.CalledOnValidThread()); | |
475 *async_retval_ = retval; | |
476 async_waiter_->Signal(); | |
477 async_retval_ = NULL; | |
478 async_waiter_ = NULL; | |
479 } | |
480 | |
481 #undef NOTIFY_ERROR | |
482 | |
483 //////////////////////////////////////////////////////////////////////////////// | |
484 // | |
485 // RTCVideoEncoder | |
486 // | |
487 //////////////////////////////////////////////////////////////////////////////// | |
488 | |
489 RTCVideoEncoder::RTCVideoEncoder( | |
490 webrtc::VideoCodecType type, | |
491 media::VideoCodecProfile profile, | |
492 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) | |
493 : video_codec_type_(type), | |
494 video_codec_profile_(profile), | |
495 gpu_factories_(gpu_factories), | |
496 weak_this_factory_(this), | |
497 weak_this_(weak_this_factory_.GetWeakPtr()), | |
498 encoded_image_callback_(NULL), | |
499 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { | |
500 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; | |
501 } | |
502 | |
503 RTCVideoEncoder::~RTCVideoEncoder() { | |
504 DCHECK(thread_checker_.CalledOnValidThread()); | |
505 Release(); | |
506 DCHECK(!impl_); | |
507 } | |
508 | |
509 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, | |
510 int32_t number_of_cores, | |
511 uint32_t max_payload_size) { | |
512 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType | |
513 << ", width=" << codec_settings->width | |
514 << ", height=" << codec_settings->height | |
515 << ", startBitrate=" << codec_settings->startBitrate; | |
516 DCHECK(thread_checker_.CalledOnValidThread()); | |
517 DCHECK(!impl_); | |
518 | |
519 impl_ = new Impl(weak_this_, gpu_factories_); | |
520 base::WaitableEvent initialization_waiter(true, false); | |
521 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
522 gpu_factories_->GetMessageLoop()->PostTask( | |
523 FROM_HERE, | |
524 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, | |
525 impl_, | |
526 gfx::Size(codec_settings->width, codec_settings->height), | |
527 codec_settings->startBitrate, | |
528 video_codec_profile_, | |
529 &initialization_waiter, | |
530 &initialization_retval)); | |
531 | |
532 // webrtc::VideoEncoder expects this call to be synchronous. | |
533 initialization_waiter.Wait(); | |
534 return initialization_retval; | |
535 } | |
536 | |
537 int32_t RTCVideoEncoder::Encode( | |
538 const webrtc::I420VideoFrame& input_image, | |
539 const webrtc::CodecSpecificInfo* codec_specific_info, | |
540 const std::vector<webrtc::VideoFrameType>* frame_types) { | |
541 DVLOG(3) << "Encode()"; | |
542 // TODO(sheu): figure out why this check fails. | |
543 // DCHECK(thread_checker_.CalledOnValidThread()); | |
544 if (!impl_) { | |
545 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; | |
546 return impl_status_; | |
547 } | |
548 | |
549 base::WaitableEvent encode_waiter(true, false); | |
550 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
551 gpu_factories_->GetMessageLoop()->PostTask( | |
552 FROM_HERE, | |
553 base::Bind(&RTCVideoEncoder::Impl::Enqueue, | |
554 impl_, | |
555 &input_image, | |
556 (frame_types->front() == webrtc::kKeyFrame), | |
557 &encode_waiter, | |
558 &encode_retval)); | |
559 | |
560 // webrtc::VideoEncoder expects this call to be synchronous. | |
561 encode_waiter.Wait(); | |
562 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; | |
563 return encode_retval; | |
564 } | |
565 | |
566 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( | |
567 webrtc::EncodedImageCallback* callback) { | |
568 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | |
569 DCHECK(thread_checker_.CalledOnValidThread()); | |
570 if (!impl_) { | |
571 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; | |
572 return impl_status_; | |
573 } | |
574 | |
575 encoded_image_callback_ = callback; | |
576 return WEBRTC_VIDEO_CODEC_OK; | |
577 } | |
578 | |
579 int32_t RTCVideoEncoder::Release() { | |
580 DVLOG(3) << "Release()"; | |
581 DCHECK(thread_checker_.CalledOnValidThread()); | |
582 | |
583 // Reset the gpu_factory_, in case we reuse this encoder. | |
584 gpu_factories_->Abort(); | |
585 gpu_factories_ = gpu_factories_->Clone(); | |
586 if (impl_) { | |
587 gpu_factories_->GetMessageLoop()->PostTask( | |
588 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
589 impl_ = NULL; | |
590 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
591 } | |
592 return WEBRTC_VIDEO_CODEC_OK; | |
593 } | |
594 | |
595 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { | |
596 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss | |
597 << ", rtt=" << rtt; | |
598 DCHECK(thread_checker_.CalledOnValidThread()); | |
599 // Ignored. | |
600 return WEBRTC_VIDEO_CODEC_OK; | |
601 } | |
602 | |
603 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { | |
604 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate | |
605 << ", frame_rate=" << frame_rate; | |
606 DCHECK(thread_checker_.CalledOnValidThread()); | |
607 if (!impl_) { | |
608 DVLOG(3) << "SetRates(): returning " << impl_status_; | |
609 return impl_status_; | |
610 } | |
611 | |
612 gpu_factories_->GetMessageLoop()->PostTask( | |
613 FROM_HERE, | |
614 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, | |
615 impl_, | |
616 new_bit_rate, | |
617 frame_rate, | |
618 1)); | |
Ami GONE FROM CHROMIUM
2013/08/08 23:08:19
nit: why support num/denom if denom is always 1?
hshi1
2013/08/09 00:01:10
I guess this is to permit common fractional frame
sheu
2013/08/09 00:15:24
Since the only current user (WebRTC) uses only int
| |
619 return WEBRTC_VIDEO_CODEC_OK; | |
620 } | |
621 | |
622 void RTCVideoEncoder::ReturnEncodedImage(const scoped_refptr<Impl>& impl, | |
623 scoped_ptr<webrtc::EncodedImage> image, | |
624 int32 bitstream_buffer_id) { | |
625 DCHECK(thread_checker_.CalledOnValidThread()); | |
626 | |
627 if (impl != impl_) | |
628 return; | |
629 | |
630 DVLOG(3) << "ReturnEncodedImage(): " | |
631 "bitstream_buffer_id=" << bitstream_buffer_id; | |
632 | |
633 if (!encoded_image_callback_) | |
634 return; | |
635 | |
636 webrtc::CodecSpecificInfo info; | |
637 info.codecType = video_codec_type_; | |
638 | |
639 // Generate a header describing a single fragment. | |
640 webrtc::RTPFragmentationHeader header; | |
641 header.VerifyAndAllocateFragmentationHeader(1); | |
642 header.fragmentationOffset[0] = 0; | |
643 header.fragmentationLength[0] = image->_length; | |
644 header.fragmentationPlType[0] = 0; | |
645 header.fragmentationTimeDiff[0] = 0; | |
646 | |
647 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
648 if (retval < 0) { | |
649 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
650 << retval; | |
651 } | |
652 | |
653 // The call through webrtc::EncodedImageCallback is synchronous, so we can | |
654 // immediately recycle the output buffer back to the Impl. | |
655 gpu_factories_->GetMessageLoop()->PostTask( | |
656 FROM_HERE, | |
657 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, | |
658 impl_, | |
659 bitstream_buffer_id)); | |
660 } | |
661 | |
662 void RTCVideoEncoder::NotifyError(const scoped_refptr<Impl>& impl, | |
663 int32_t error) { | |
664 DCHECK(thread_checker_.CalledOnValidThread()); | |
665 | |
666 if (impl != impl_) | |
667 return; | |
668 | |
669 DVLOG(1) << "NotifyError(): error=" << error; | |
670 | |
671 impl_status_ = error; | |
672 gpu_factories_->GetMessageLoop()->PostTask( | |
673 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
674 impl_ = NULL; | |
675 } | |
676 | |
677 } // namespace content | |
OLD | NEW |