OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <jni.h> | |
8 | |
9 #include "base/android/jni_android.h" | |
10 #include "base/android/scoped_java_ref.h" | |
11 #include "base/bind.h" | |
12 #include "base/logging.h" | |
13 #include "base/message_loop.h" | |
14 #include "content/common/android/surface_callback.h" | |
15 #include "content/common/gpu/gpu_channel.h" | |
16 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | |
17 #include "media/base/bitstream_buffer.h" | |
18 #include "media/base/limits.h" | |
19 #include "media/video/picture.h" | |
20 #include "ui/gl/gl_bindings.h" | |
21 | |
22 using base::android::MethodID; | |
23 using base::android::ScopedJavaLocalRef; | |
24 | |
25 namespace content { | |
26 | |
27 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
28 // |log| to ERROR, register |error| with the decoder, and return. | |
29 #define RETURN_ON_FAILURE(result, log, error) \ | |
30 do { \ | |
31 if (!(result)) { \ | |
32 DLOG(ERROR) << log; \ | |
33 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ | |
34 &AndroidVideoDecodeAccelerator::NotifyError, \ | |
35 base::AsWeakPtr(this), error)); \ | |
36 state_ = ERROR; \ | |
37 return; \ | |
38 } \ | |
39 } while (0) | |
40 | |
41 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
42 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
43 // have actual use case. | |
44 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
45 | |
46 // Max number of bitstreams notified to the client with | |
47 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | |
48 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | |
49 | |
50 // static | |
51 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = | |
52 base::TimeDelta::FromMilliseconds(10); | |
53 | |
54 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
55 media::VideoDecodeAccelerator::Client* client, | |
56 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, | |
57 const base::Callback<bool(void)>& make_context_current) | |
58 : client_(client), | |
59 make_context_current_(make_context_current), | |
60 codec_(media::MediaCodecBridge::VIDEO_H264), | |
61 state_(NO_ERROR), | |
62 surface_texture_id_(0), | |
63 picturebuffers_requested_(false), | |
64 io_task_is_posted_(false), | |
65 decoder_met_eos_(false), | |
66 num_bytes_used_in_the_pending_buffer_(0), | |
67 gl_decoder_(decoder) { | |
68 } | |
69 | |
70 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
71 DCHECK(thread_checker_.CalledOnValidThread()); | |
72 } | |
73 | |
74 bool AndroidVideoDecodeAccelerator::Initialize( | |
75 media::VideoCodecProfile profile) { | |
76 DCHECK(!media_codec_); | |
77 DCHECK(thread_checker_.CalledOnValidThread()); | |
78 | |
79 if (!media::MediaCodecBridge::IsAvailable()) | |
80 return false; | |
81 | |
82 if (profile == media::VP8PROFILE_MAIN) { | |
83 codec_ = media::MediaCodecBridge::VIDEO_VP8; | |
84 } else { | |
85 // TODO(dwkang): enable H264 once b/8125974 is fixed. | |
86 LOG(ERROR) << "Unsupported profile: " << profile; | |
87 return false; | |
88 } | |
89 | |
90 if (!make_context_current_.Run()) { | |
91 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
92 return false; | |
93 } | |
94 | |
95 if (!gl_decoder_.get()) { | |
96 LOG(ERROR) << "Failed to get gles2 decoder instance."; | |
97 return false; | |
98 } | |
99 glGenTextures(1, &surface_texture_id_); | |
100 glActiveTexture(GL_TEXTURE0); | |
101 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
102 | |
103 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
104 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
105 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
106 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
107 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
108 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
109 gl_decoder_->RestoreTextureUnitBindings(0); | |
110 gl_decoder_->RestoreActiveTexture(); | |
111 | |
112 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
113 | |
114 ConfigureMediaCodec(); | |
115 | |
116 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
117 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, | |
118 base::AsWeakPtr(this))); | |
119 return true; | |
120 } | |
121 | |
122 void AndroidVideoDecodeAccelerator::DoIOTask() { | |
123 io_task_is_posted_ = false; | |
124 if (state_ == ERROR) { | |
125 return; | |
126 } | |
127 | |
128 DequeueOutput(); | |
129 QueueInput(); | |
130 | |
131 if (!pending_bitstream_buffers_.empty() || | |
132 !free_picture_ids_.empty()) { | |
133 io_task_is_posted_ = true; | |
134 // TODO(dwkang): PostDelayedTask() does not guarantee the task will awake | |
135 // at the exact time. Need a better way for polling. | |
136 MessageLoop::current()->PostDelayedTask( | |
137 FROM_HERE, | |
138 base::Bind( | |
139 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), | |
140 kDecodePollDelay); | |
141 } | |
142 } | |
143 | |
144 void AndroidVideoDecodeAccelerator::QueueInput() { | |
145 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | |
146 return; | |
147 if (pending_bitstream_buffers_.empty()) | |
148 return; | |
149 | |
150 int input_buf_index = media_codec_->DequeueInputBuffer( | |
151 media::MediaCodecBridge::kTimeOutNoWait); | |
152 if (input_buf_index < 0) { | |
153 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); | |
154 return; | |
155 } | |
156 media::BitstreamBuffer& bitstream_buffer = | |
157 pending_bitstream_buffers_.front(); | |
158 | |
159 if (bitstream_buffer.id() == -1) { | |
160 media_codec_->QueueEOS(input_buf_index); | |
161 pending_bitstream_buffers_.pop(); | |
162 return; | |
163 } | |
164 // Abuse the presentation time argument to propagate the bitstream | |
165 // buffer ID to the output, so we can report it back to the client in | |
166 // PictureReady(). | |
167 base::TimeDelta timestamp = | |
168 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | |
169 | |
170 int bytes_written = 0; | |
171 scoped_ptr<base::SharedMemory> shm( | |
172 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
173 | |
174 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | |
175 "Failed to SharedMemory::Map()", | |
176 UNREADABLE_INPUT); | |
177 | |
178 const size_t offset = num_bytes_used_in_the_pending_buffer_; | |
179 bytes_written = media_codec_->QueueInputBuffer( | |
180 input_buf_index, | |
181 static_cast<const uint8*>(shm->memory()) + offset, | |
182 bitstream_buffer.size() - offset, timestamp); | |
183 num_bytes_used_in_the_pending_buffer_ += bytes_written; | |
184 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); | |
185 | |
186 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { | |
187 num_bytes_used_in_the_pending_buffer_ = 0; | |
188 pending_bitstream_buffers_.pop(); | |
189 | |
190 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | |
191 // will be returned from the bitstream buffer. However, MediaCodec API is | |
192 // not enough to guarantee it. | |
193 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | |
194 // keep getting more bitstreams from the client, and throttle them by using | |
195 // |bitstreams_notified_in_advance_|. | |
196 // TODO(dwkang): check if there is a way to remove this workaround. | |
197 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
198 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
199 base::AsWeakPtr(this), bitstream_buffer.id())); | |
200 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | |
201 } | |
202 } | |
203 | |
204 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
205 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
206 return; | |
207 | |
208 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
209 // Don't have any picture buffer to send. Need to wait more. | |
210 return; | |
211 } | |
212 | |
213 bool eos = false; | |
214 base::TimeDelta timestamp; | |
215 int32 buf_index = 0; | |
216 do { | |
217 int32 offset = 0; | |
218 int32 size = 0; | |
219 buf_index = media_codec_->DequeueOutputBuffer( | |
220 media::MediaCodecBridge::kTimeOutNoWait, | |
221 &offset, &size, ×tamp, &eos); | |
222 switch (buf_index) { | |
223 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: | |
224 return; | |
225 | |
226 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { | |
227 int32 width, height; | |
228 media_codec_->GetOutputFormat(&width, &height); | |
229 | |
230 if (!picturebuffers_requested_) { | |
231 picturebuffers_requested_ = true; | |
232 size_ = gfx::Size(width, height); | |
233 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
234 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
235 base::AsWeakPtr(this))); | |
236 } else { | |
237 // TODO(dwkang): support the dynamic resolution change. | |
238 // Currently, we assume that there is no resolution change in the | |
239 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen | |
240 // more than once. However, we allows it if resolution is the same | |
241 // as the previous one because |media_codec_| can be reset in Reset(). | |
242 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | |
243 "Dynamic resolution change is not supported.", | |
244 PLATFORM_FAILURE); | |
245 } | |
246 return; | |
247 } | |
248 | |
249 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | |
250 media_codec_->GetOutputBuffers(); | |
251 break; | |
252 } | |
253 } while (buf_index < 0); | |
254 | |
255 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
256 | |
257 if (eos) { | |
258 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
259 &AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
260 base::AsWeakPtr(this))); | |
261 decoder_met_eos_ = true; | |
262 } else { | |
263 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | |
264 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | |
265 | |
266 // Removes ids former or equal than the id from decoder. Note that | |
267 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | |
268 // because of frame reordering issue. We just maintain this roughly and use | |
269 // for the throttling purpose. | |
270 std::list<int32>::iterator it; | |
271 for (it = bitstreams_notified_in_advance_.begin(); | |
272 it != bitstreams_notified_in_advance_.end(); | |
273 ++it) { | |
274 if (*it == bitstream_buffer_id) { | |
275 bitstreams_notified_in_advance_.erase( | |
276 bitstreams_notified_in_advance_.begin(), ++it); | |
277 break; | |
278 } | |
279 } | |
280 } | |
281 } | |
282 | |
283 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | |
284 int32 bitstream_id) { | |
285 DCHECK(thread_checker_.CalledOnValidThread()); | |
286 DCHECK_NE(bitstream_id, -1); | |
287 DCHECK(!free_picture_ids_.empty()); | |
288 | |
289 RETURN_ON_FAILURE(make_context_current_.Run(), | |
290 "Failed to make this decoder's GL context current.", | |
291 PLATFORM_FAILURE); | |
292 | |
293 int32 picture_buffer_id = free_picture_ids_.front(); | |
294 free_picture_ids_.pop(); | |
295 | |
296 float transfrom_matrix[16]; | |
297 surface_texture_->UpdateTexImage(); | |
298 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
299 | |
300 OutputBufferMap::const_iterator i = | |
301 output_picture_buffers_.find(picture_buffer_id); | |
302 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | |
303 "Can't find a PictureBuffer for " << picture_buffer_id, | |
304 PLATFORM_FAILURE); | |
305 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
306 | |
307 RETURN_ON_FAILURE(gl_decoder_.get(), | |
308 "Failed to get gles2 decoder instance.", | |
309 ILLEGAL_STATE); | |
310 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is | |
311 // needed because it takes 10s of milliseconds to initialize. | |
312 if (!copier_.get()) { | |
313 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); | |
314 copier_->Initialize(); | |
315 gl_decoder_->RestoreFramebufferBindings(); | |
greggman
2013/02/28 00:39:27
This store is not needed? I made Initialize take a
dwkang1
2013/02/28 00:45:25
I just checked your patch set 2. ;-) I'll remove i
dwkang1
2013/02/28 03:07:32
Done.
| |
316 } | |
317 | |
318 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
319 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
320 // because: | |
321 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
322 // attached. | |
323 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
324 // the texture. | |
325 copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, | |
326 GL_TEXTURE_2D, surface_texture_id_, | |
327 picture_buffer_texture_id, 0, size_.width(), | |
328 size_.height(), false, false, false); | |
329 | |
330 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
331 &AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
332 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); | |
333 } | |
334 | |
335 void AndroidVideoDecodeAccelerator::Decode( | |
336 const media::BitstreamBuffer& bitstream_buffer) { | |
337 DCHECK(thread_checker_.CalledOnValidThread()); | |
338 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | |
339 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
340 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
341 base::AsWeakPtr(this), bitstream_buffer.id())); | |
342 return; | |
343 } | |
344 | |
345 pending_bitstream_buffers_.push(bitstream_buffer); | |
346 | |
347 if (!io_task_is_posted_) | |
348 DoIOTask(); | |
349 } | |
350 | |
351 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
352 const std::vector<media::PictureBuffer>& buffers) { | |
353 DCHECK(thread_checker_.CalledOnValidThread()); | |
354 DCHECK(output_picture_buffers_.empty()); | |
355 | |
356 for (size_t i = 0; i < buffers.size(); ++i) { | |
357 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
358 free_picture_ids_.push(buffers[i].id()); | |
359 } | |
360 | |
361 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, | |
362 "Invalid picture buffers were passed.", | |
363 INVALID_ARGUMENT); | |
364 | |
365 if (!io_task_is_posted_) | |
366 DoIOTask(); | |
367 } | |
368 | |
369 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
370 int32 picture_buffer_id) { | |
371 DCHECK(thread_checker_.CalledOnValidThread()); | |
372 free_picture_ids_.push(picture_buffer_id); | |
373 | |
374 if (!io_task_is_posted_) | |
375 DoIOTask(); | |
376 } | |
377 | |
378 void AndroidVideoDecodeAccelerator::Flush() { | |
379 DCHECK(thread_checker_.CalledOnValidThread()); | |
380 | |
381 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
382 } | |
383 | |
384 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
385 DCHECK(surface_texture_.get()); | |
386 | |
387 media_codec_.reset(new media::MediaCodecBridge(codec_)); | |
388 | |
389 JNIEnv* env = base::android::AttachCurrentThread(); | |
390 CHECK(env); | |
391 ScopedJavaLocalRef<jclass> cls( | |
392 base::android::GetClass(env, "android/view/Surface")); | |
393 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
394 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
395 ScopedJavaLocalRef<jobject> j_surface( | |
396 env, env->NewObject( | |
397 cls.obj(), constructor, | |
398 surface_texture_->j_surface_texture().obj())); | |
399 | |
400 // VDA does not pass the container indicated resolution in the initialization | |
401 // phase. Here, we set 720p by default. | |
402 // TODO(dwkang): find out a way to remove the following hard-coded value. | |
403 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); | |
404 content::ReleaseSurface(j_surface.obj()); | |
405 media_codec_->GetOutputBuffers(); | |
406 } | |
407 | |
408 void AndroidVideoDecodeAccelerator::Reset() { | |
409 DCHECK(thread_checker_.CalledOnValidThread()); | |
410 | |
411 while(!pending_bitstream_buffers_.empty()) { | |
412 media::BitstreamBuffer& bitstream_buffer = | |
413 pending_bitstream_buffers_.front(); | |
414 pending_bitstream_buffers_.pop(); | |
415 | |
416 if (bitstream_buffer.id() != -1) { | |
417 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
418 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
419 base::AsWeakPtr(this), bitstream_buffer.id())); | |
420 } | |
421 } | |
422 bitstreams_notified_in_advance_.clear(); | |
423 | |
424 if (!decoder_met_eos_) { | |
425 media_codec_->Reset(); | |
426 } else { | |
427 // MediaCodec should be usable after meeting EOS, but it is not on some | |
428 // devices. b/8125974 To avoid the case, we recreate a new one. | |
429 media_codec_->Stop(); | |
430 ConfigureMediaCodec(); | |
431 } | |
432 decoder_met_eos_ = false; | |
433 num_bytes_used_in_the_pending_buffer_ = 0; | |
434 state_ = NO_ERROR; | |
435 | |
436 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
437 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); | |
438 } | |
439 | |
440 void AndroidVideoDecodeAccelerator::Destroy() { | |
441 DCHECK(thread_checker_.CalledOnValidThread()); | |
442 | |
443 if (media_codec_) | |
444 media_codec_->Stop(); | |
445 if (surface_texture_id_) | |
446 glDeleteTextures(1, &surface_texture_id_); | |
447 if (copier_.get()) | |
448 copier_->Destroy(); | |
449 delete this; | |
450 } | |
451 | |
452 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { | |
453 client_->NotifyInitializeDone(); | |
454 } | |
455 | |
456 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
457 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
458 } | |
459 | |
460 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
461 const media::Picture& picture) { | |
462 client_->PictureReady(picture); | |
463 } | |
464 | |
465 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
466 int input_buffer_id) { | |
467 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
468 } | |
469 | |
470 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
471 client_->NotifyFlushDone(); | |
472 } | |
473 | |
474 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
475 client_->NotifyResetDone(); | |
476 } | |
477 | |
478 void AndroidVideoDecodeAccelerator::NotifyError( | |
479 media::VideoDecodeAccelerator::Error error) { | |
480 client_->NotifyError(error); | |
481 } | |
482 | |
483 } // namespace content | |
OLD | NEW |