OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/common/gpu/media/android_video_decode_accelerator.h" |
| 6 |
| 7 #include <jni.h> |
| 8 |
| 9 #include "base/android/jni_android.h" |
| 10 #include "base/android/scoped_java_ref.h" |
| 11 #include "base/bind.h" |
| 12 #include "base/logging.h" |
| 13 #include "base/message_loop.h" |
| 14 #include "content/common/android/surface_callback.h" |
| 15 #include "content/common/gpu/gpu_channel.h" |
| 16 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" |
| 17 #include "media/base/bitstream_buffer.h" |
| 18 #include "media/base/limits.h" |
| 19 #include "media/video/picture.h" |
| 20 #include "ui/gl/gl_bindings.h" |
| 21 |
| 22 using base::android::MethodID; |
| 23 using base::android::ScopedJavaLocalRef; |
| 24 |
| 25 namespace content { |
| 26 |
| 27 // Helper macros for dealing with failure. If |result| evaluates false, emit |
| 28 // |log| to ERROR, register |error| with the decoder, and return. |
| 29 #define RETURN_ON_FAILURE(result, log, error) \ |
| 30 do { \ |
| 31 if (!(result)) { \ |
| 32 DLOG(ERROR) << log; \ |
| 33 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ |
| 34 &AndroidVideoDecodeAccelerator::NotifyError, \ |
| 35 base::AsWeakPtr(this), error)); \ |
| 36 state_ = ERROR; \ |
| 37 return; \ |
| 38 } \ |
| 39 } while (0) |
| 40 |
| 41 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling |
| 42 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we |
| 43 // have actual use case. |
| 44 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; |
| 45 |
| 46 // Max number of bitstreams notified to the client with |
| 47 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
| 48 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; |
| 49 |
| 50 // static |
| 51 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = |
| 52 base::TimeDelta::FromMilliseconds(10); |
| 53 |
| 54 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( |
| 55 media::VideoDecodeAccelerator::Client* client, |
| 56 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, |
| 57 const base::Callback<bool(void)>& make_context_current) |
| 58 : client_(client), |
| 59 make_context_current_(make_context_current), |
| 60 codec_(media::MediaCodecBridge::VIDEO_H264), |
| 61 state_(NO_ERROR), |
| 62 surface_texture_id_(0), |
| 63 picturebuffers_requested_(false), |
| 64 io_task_is_posted_(false), |
| 65 decoder_met_eos_(false), |
| 66 num_bytes_used_in_the_pending_buffer_(0), |
| 67 gl_decoder_(decoder) { |
| 68 } |
| 69 |
| 70 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { |
| 71 DCHECK(thread_checker_.CalledOnValidThread()); |
| 72 } |
| 73 |
| 74 bool AndroidVideoDecodeAccelerator::Initialize( |
| 75 media::VideoCodecProfile profile) { |
| 76 DCHECK(!media_codec_); |
| 77 DCHECK(thread_checker_.CalledOnValidThread()); |
| 78 |
| 79 if (!media::MediaCodecBridge::IsAvailable()) |
| 80 return false; |
| 81 |
| 82 if (profile == media::VP8PROFILE_MAIN) { |
| 83 codec_ = media::MediaCodecBridge::VIDEO_VP8; |
| 84 } else { |
| 85 // TODO(dwkang): enable H264 once b/8125974 is fixed. |
| 86 LOG(ERROR) << "Unsupported profile: " << profile; |
| 87 return false; |
| 88 } |
| 89 |
| 90 if (!make_context_current_.Run()) { |
| 91 LOG(ERROR) << "Failed to make this decoder's GL context current."; |
| 92 return false; |
| 93 } |
| 94 |
| 95 if (!gl_decoder_.get()) { |
| 96 LOG(ERROR) << "Failed to get gles2 decoder instance."; |
| 97 return false; |
| 98 } |
| 99 glGenTextures(1, &surface_texture_id_); |
| 100 glActiveTexture(GL_TEXTURE0); |
| 101 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); |
| 102 |
| 103 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); |
| 104 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); |
| 105 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
| 106 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| 107 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, |
| 108 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| 109 gl_decoder_->RestoreTextureUnitBindings(0); |
| 110 gl_decoder_->RestoreActiveTexture(); |
| 111 |
| 112 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); |
| 113 |
| 114 ConfigureMediaCodec(); |
| 115 |
| 116 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 117 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, |
| 118 base::AsWeakPtr(this))); |
| 119 return true; |
| 120 } |
| 121 |
| 122 void AndroidVideoDecodeAccelerator::DoIOTask() { |
| 123 io_task_is_posted_ = false; |
| 124 if (state_ == ERROR) { |
| 125 return; |
| 126 } |
| 127 |
| 128 DequeueOutput(); |
| 129 QueueInput(); |
| 130 |
| 131 if (!pending_bitstream_buffers_.empty() || |
| 132 !free_picture_ids_.empty()) { |
| 133 io_task_is_posted_ = true; |
| 134 // TODO(dwkang): PostDelayedTask() does not guarantee the task will awake |
| 135 // at the exact time. Need a better way for polling. |
| 136 MessageLoop::current()->PostDelayedTask( |
| 137 FROM_HERE, |
| 138 base::Bind( |
| 139 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), |
| 140 kDecodePollDelay); |
| 141 } |
| 142 } |
| 143 |
| 144 void AndroidVideoDecodeAccelerator::QueueInput() { |
| 145 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) |
| 146 return; |
| 147 if (pending_bitstream_buffers_.empty()) |
| 148 return; |
| 149 |
| 150 int input_buf_index = media_codec_->DequeueInputBuffer( |
| 151 media::MediaCodecBridge::kTimeOutNoWait); |
| 152 if (input_buf_index < 0) { |
| 153 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); |
| 154 return; |
| 155 } |
| 156 media::BitstreamBuffer& bitstream_buffer = |
| 157 pending_bitstream_buffers_.front(); |
| 158 |
| 159 if (bitstream_buffer.id() == -1) { |
| 160 media_codec_->QueueEOS(input_buf_index); |
| 161 pending_bitstream_buffers_.pop(); |
| 162 return; |
| 163 } |
| 164 // Abuse the presentation time argument to propagate the bitstream |
| 165 // buffer ID to the output, so we can report it back to the client in |
| 166 // PictureReady(). |
| 167 base::TimeDelta timestamp = |
| 168 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); |
| 169 |
| 170 int bytes_written = 0; |
| 171 scoped_ptr<base::SharedMemory> shm( |
| 172 new base::SharedMemory(bitstream_buffer.handle(), true)); |
| 173 |
| 174 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), |
| 175 "Failed to SharedMemory::Map()", |
| 176 UNREADABLE_INPUT); |
| 177 |
| 178 const size_t offset = num_bytes_used_in_the_pending_buffer_; |
| 179 bytes_written = media_codec_->QueueInputBuffer( |
| 180 input_buf_index, |
| 181 static_cast<const uint8*>(shm->memory()) + offset, |
| 182 bitstream_buffer.size() - offset, timestamp); |
| 183 num_bytes_used_in_the_pending_buffer_ += bytes_written; |
| 184 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); |
| 185 |
| 186 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { |
| 187 num_bytes_used_in_the_pending_buffer_ = 0; |
| 188 pending_bitstream_buffers_.pop(); |
| 189 |
| 190 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output |
| 191 // will be returned from the bitstream buffer. However, MediaCodec API is |
| 192 // not enough to guarantee it. |
| 193 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to |
| 194 // keep getting more bitstreams from the client, and throttle them by using |
| 195 // |bitstreams_notified_in_advance_|. |
| 196 // TODO(dwkang): check if there is a way to remove this workaround. |
| 197 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 198 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
| 199 base::AsWeakPtr(this), bitstream_buffer.id())); |
| 200 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); |
| 201 } |
| 202 } |
| 203 |
| 204 void AndroidVideoDecodeAccelerator::DequeueOutput() { |
| 205 if (picturebuffers_requested_ && output_picture_buffers_.empty()) |
| 206 return; |
| 207 |
| 208 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { |
| 209 // Don't have any picture buffer to send. Need to wait more. |
| 210 return; |
| 211 } |
| 212 |
| 213 bool eos = false; |
| 214 base::TimeDelta timestamp; |
| 215 int32 buf_index = 0; |
| 216 do { |
| 217 int32 offset = 0; |
| 218 int32 size = 0; |
| 219 buf_index = media_codec_->DequeueOutputBuffer( |
| 220 media::MediaCodecBridge::kTimeOutNoWait, |
| 221 &offset, &size, ×tamp, &eos); |
| 222 switch (buf_index) { |
| 223 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: |
| 224 return; |
| 225 |
| 226 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { |
| 227 int32 width, height; |
| 228 media_codec_->GetOutputFormat(&width, &height); |
| 229 |
| 230 if (!picturebuffers_requested_) { |
| 231 picturebuffers_requested_ = true; |
| 232 size_ = gfx::Size(width, height); |
| 233 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 234 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, |
| 235 base::AsWeakPtr(this))); |
| 236 } else { |
| 237 // TODO(dwkang): support the dynamic resolution change. |
| 238 // Currently, we assume that there is no resolution change in the |
| 239 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen |
| 240 // more than once. However, we allows it if resolution is the same |
| 241 // as the previous one because |media_codec_| can be reset in Reset(). |
| 242 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), |
| 243 "Dynamic resolution change is not supported.", |
| 244 PLATFORM_FAILURE); |
| 245 } |
| 246 return; |
| 247 } |
| 248 |
| 249 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: |
| 250 media_codec_->GetOutputBuffers(); |
| 251 break; |
| 252 } |
| 253 } while (buf_index < 0); |
| 254 |
| 255 media_codec_->ReleaseOutputBuffer(buf_index, true); |
| 256 |
| 257 if (eos) { |
| 258 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 259 &AndroidVideoDecodeAccelerator::NotifyFlushDone, |
| 260 base::AsWeakPtr(this))); |
| 261 decoder_met_eos_ = true; |
| 262 } else { |
| 263 int64 bitstream_buffer_id = timestamp.InMicroseconds(); |
| 264 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); |
| 265 |
| 266 // Removes ids former or equal than the id from decoder. Note that |
| 267 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder |
| 268 // because of frame reordering issue. We just maintain this roughly and use |
| 269 // for the throttling purpose. |
| 270 std::list<int32>::iterator it; |
| 271 for (it = bitstreams_notified_in_advance_.begin(); |
| 272 it != bitstreams_notified_in_advance_.end(); |
| 273 ++it) { |
| 274 if (*it == bitstream_buffer_id) { |
| 275 bitstreams_notified_in_advance_.erase( |
| 276 bitstreams_notified_in_advance_.begin(), ++it); |
| 277 break; |
| 278 } |
| 279 } |
| 280 } |
| 281 } |
| 282 |
| 283 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( |
| 284 int32 bitstream_id) { |
| 285 DCHECK(thread_checker_.CalledOnValidThread()); |
| 286 DCHECK_NE(bitstream_id, -1); |
| 287 DCHECK(!free_picture_ids_.empty()); |
| 288 |
| 289 RETURN_ON_FAILURE(make_context_current_.Run(), |
| 290 "Failed to make this decoder's GL context current.", |
| 291 PLATFORM_FAILURE); |
| 292 |
| 293 int32 picture_buffer_id = free_picture_ids_.front(); |
| 294 free_picture_ids_.pop(); |
| 295 |
| 296 float transfrom_matrix[16]; |
| 297 surface_texture_->UpdateTexImage(); |
| 298 surface_texture_->GetTransformMatrix(transfrom_matrix); |
| 299 |
| 300 OutputBufferMap::const_iterator i = |
| 301 output_picture_buffers_.find(picture_buffer_id); |
| 302 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), |
| 303 "Can't find a PictureBuffer for " << picture_buffer_id, |
| 304 PLATFORM_FAILURE); |
| 305 uint32 picture_buffer_texture_id = i->second.texture_id(); |
| 306 |
| 307 RETURN_ON_FAILURE(gl_decoder_.get(), |
| 308 "Failed to get gles2 decoder instance.", |
| 309 ILLEGAL_STATE); |
| 310 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is |
| 311 // needed because it takes 10s of milliseconds to initialize. |
| 312 if (!copier_) { |
| 313 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); |
| 314 copier_->Initialize(gl_decoder_.get()); |
| 315 } |
| 316 |
| 317 // Here, we copy |surface_texture_id_| to the picture buffer instead of |
| 318 // setting new texture to |surface_texture_| by calling attachToGLContext() |
| 319 // because: |
| 320 // 1. Once we call detachFrameGLContext(), it deletes the texture previous |
| 321 // attached. |
| 322 // 2. SurfaceTexture requires us to apply a transform matrix when we show |
| 323 // the texture. |
| 324 copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, |
| 325 GL_TEXTURE_2D, surface_texture_id_, |
| 326 picture_buffer_texture_id, 0, size_.width(), |
| 327 size_.height(), false, false, false); |
| 328 |
| 329 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 330 &AndroidVideoDecodeAccelerator::NotifyPictureReady, |
| 331 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); |
| 332 } |
| 333 |
| 334 void AndroidVideoDecodeAccelerator::Decode( |
| 335 const media::BitstreamBuffer& bitstream_buffer) { |
| 336 DCHECK(thread_checker_.CalledOnValidThread()); |
| 337 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { |
| 338 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 339 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
| 340 base::AsWeakPtr(this), bitstream_buffer.id())); |
| 341 return; |
| 342 } |
| 343 |
| 344 pending_bitstream_buffers_.push(bitstream_buffer); |
| 345 |
| 346 if (!io_task_is_posted_) |
| 347 DoIOTask(); |
| 348 } |
| 349 |
| 350 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
| 351 const std::vector<media::PictureBuffer>& buffers) { |
| 352 DCHECK(thread_checker_.CalledOnValidThread()); |
| 353 DCHECK(output_picture_buffers_.empty()); |
| 354 |
| 355 for (size_t i = 0; i < buffers.size(); ++i) { |
| 356 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); |
| 357 free_picture_ids_.push(buffers[i].id()); |
| 358 } |
| 359 |
| 360 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, |
| 361 "Invalid picture buffers were passed.", |
| 362 INVALID_ARGUMENT); |
| 363 |
| 364 if (!io_task_is_posted_) |
| 365 DoIOTask(); |
| 366 } |
| 367 |
| 368 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( |
| 369 int32 picture_buffer_id) { |
| 370 DCHECK(thread_checker_.CalledOnValidThread()); |
| 371 free_picture_ids_.push(picture_buffer_id); |
| 372 |
| 373 if (!io_task_is_posted_) |
| 374 DoIOTask(); |
| 375 } |
| 376 |
| 377 void AndroidVideoDecodeAccelerator::Flush() { |
| 378 DCHECK(thread_checker_.CalledOnValidThread()); |
| 379 |
| 380 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
| 381 } |
| 382 |
| 383 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { |
| 384 DCHECK(surface_texture_.get()); |
| 385 |
| 386 media_codec_.reset(new media::MediaCodecBridge(codec_)); |
| 387 |
| 388 JNIEnv* env = base::android::AttachCurrentThread(); |
| 389 CHECK(env); |
| 390 ScopedJavaLocalRef<jclass> cls( |
| 391 base::android::GetClass(env, "android/view/Surface")); |
| 392 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( |
| 393 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); |
| 394 ScopedJavaLocalRef<jobject> j_surface( |
| 395 env, env->NewObject( |
| 396 cls.obj(), constructor, |
| 397 surface_texture_->j_surface_texture().obj())); |
| 398 |
| 399 // VDA does not pass the container indicated resolution in the initialization |
| 400 // phase. Here, we set 720p by default. |
| 401 // TODO(dwkang): find out a way to remove the following hard-coded value. |
| 402 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); |
| 403 content::ReleaseSurface(j_surface.obj()); |
| 404 media_codec_->GetOutputBuffers(); |
| 405 } |
| 406 |
| 407 void AndroidVideoDecodeAccelerator::Reset() { |
| 408 DCHECK(thread_checker_.CalledOnValidThread()); |
| 409 |
| 410 while(!pending_bitstream_buffers_.empty()) { |
| 411 media::BitstreamBuffer& bitstream_buffer = |
| 412 pending_bitstream_buffers_.front(); |
| 413 pending_bitstream_buffers_.pop(); |
| 414 |
| 415 if (bitstream_buffer.id() != -1) { |
| 416 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 417 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
| 418 base::AsWeakPtr(this), bitstream_buffer.id())); |
| 419 } |
| 420 } |
| 421 bitstreams_notified_in_advance_.clear(); |
| 422 |
| 423 if (!decoder_met_eos_) { |
| 424 media_codec_->Reset(); |
| 425 } else { |
| 426 // MediaCodec should be usable after meeting EOS, but it is not on some |
| 427 // devices. b/8125974 To avoid the case, we recreate a new one. |
| 428 media_codec_->Stop(); |
| 429 ConfigureMediaCodec(); |
| 430 } |
| 431 decoder_met_eos_ = false; |
| 432 num_bytes_used_in_the_pending_buffer_ = 0; |
| 433 state_ = NO_ERROR; |
| 434 |
| 435 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( |
| 436 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); |
| 437 } |
| 438 |
| 439 void AndroidVideoDecodeAccelerator::Destroy() { |
| 440 DCHECK(thread_checker_.CalledOnValidThread()); |
| 441 |
| 442 if (media_codec_) |
| 443 media_codec_->Stop(); |
| 444 if (surface_texture_id_) |
| 445 glDeleteTextures(1, &surface_texture_id_); |
| 446 if (copier_) |
| 447 copier_->Destroy(); |
| 448 delete this; |
| 449 } |
| 450 |
| 451 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { |
| 452 client_->NotifyInitializeDone(); |
| 453 } |
| 454 |
| 455 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { |
| 456 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); |
| 457 } |
| 458 |
| 459 void AndroidVideoDecodeAccelerator::NotifyPictureReady( |
| 460 const media::Picture& picture) { |
| 461 client_->PictureReady(picture); |
| 462 } |
| 463 |
| 464 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( |
| 465 int input_buffer_id) { |
| 466 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
| 467 } |
| 468 |
| 469 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { |
| 470 client_->NotifyFlushDone(); |
| 471 } |
| 472 |
| 473 void AndroidVideoDecodeAccelerator::NotifyResetDone() { |
| 474 client_->NotifyResetDone(); |
| 475 } |
| 476 |
| 477 void AndroidVideoDecodeAccelerator::NotifyError( |
| 478 media::VideoDecodeAccelerator::Error error) { |
| 479 client_->NotifyError(error); |
| 480 } |
| 481 |
| 482 } // namespace content |
OLD | NEW |