OLD | NEW |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/android_video_decode_accelerator.h" | 5 #include "media/gpu/android_video_decode_accelerator.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <memory> | 9 #include <memory> |
10 | 10 |
(...skipping 27 matching lines...) Expand all Loading... |
38 #include "media/gpu/shared_memory_region.h" | 38 #include "media/gpu/shared_memory_region.h" |
39 #include "media/video/picture.h" | 39 #include "media/video/picture.h" |
40 #include "ui/gl/android/scoped_java_surface.h" | 40 #include "ui/gl/android/scoped_java_surface.h" |
41 #include "ui/gl/android/surface_texture.h" | 41 #include "ui/gl/android/surface_texture.h" |
42 #include "ui/gl/gl_bindings.h" | 42 #include "ui/gl/gl_bindings.h" |
43 | 43 |
44 #if defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | 44 #if defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) |
45 #include "media/mojo/services/mojo_cdm_service.h" | 45 #include "media/mojo/services/mojo_cdm_service.h" |
46 #endif | 46 #endif |
47 | 47 |
48 #define POST_ERROR(error_code, error_message) \ | 48 #define POST_ERROR(error_code, error_message) \ |
49 do { \ | 49 do { \ |
50 DLOG(ERROR) << error_message; \ | 50 DLOG(ERROR) << error_message; \ |
51 PostError(FROM_HERE, media::VideoDecodeAccelerator::error_code); \ | 51 PostError(FROM_HERE, VideoDecodeAccelerator::error_code); \ |
52 } while (0) | 52 } while (0) |
53 | 53 |
54 namespace media { | 54 namespace media { |
55 | 55 |
56 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | 56 enum { kNumPictureBuffers = limits::kMaxVideoFrames + 1 }; |
57 | 57 |
58 // Max number of bitstreams notified to the client with | 58 // Max number of bitstreams notified to the client with |
59 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | 59 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. |
60 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | 60 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; |
61 | 61 |
62 // MediaCodec is only guaranteed to support baseline, but some devices may | 62 // MediaCodec is only guaranteed to support baseline, but some devices may |
63 // support others. Advertise support for all H264 profiles and let the | 63 // support others. Advertise support for all H264 profiles and let the |
64 // MediaCodec fail when decoding if it's not actually supported. It's assumed | 64 // MediaCodec fail when decoding if it's not actually supported. It's assumed |
65 // that consumers won't have software fallback for H264 on Android anyway. | 65 // that consumers won't have software fallback for H264 on Android anyway. |
66 static const media::VideoCodecProfile kSupportedH264Profiles[] = { | 66 static const VideoCodecProfile kSupportedH264Profiles[] = { |
67 media::H264PROFILE_BASELINE, | 67 H264PROFILE_BASELINE, |
68 media::H264PROFILE_MAIN, | 68 H264PROFILE_MAIN, |
69 media::H264PROFILE_EXTENDED, | 69 H264PROFILE_EXTENDED, |
70 media::H264PROFILE_HIGH, | 70 H264PROFILE_HIGH, |
71 media::H264PROFILE_HIGH10PROFILE, | 71 H264PROFILE_HIGH10PROFILE, |
72 media::H264PROFILE_HIGH422PROFILE, | 72 H264PROFILE_HIGH422PROFILE, |
73 media::H264PROFILE_HIGH444PREDICTIVEPROFILE, | 73 H264PROFILE_HIGH444PREDICTIVEPROFILE, |
74 media::H264PROFILE_SCALABLEBASELINE, | 74 H264PROFILE_SCALABLEBASELINE, |
75 media::H264PROFILE_SCALABLEHIGH, | 75 H264PROFILE_SCALABLEHIGH, |
76 media::H264PROFILE_STEREOHIGH, | 76 H264PROFILE_STEREOHIGH, |
77 media::H264PROFILE_MULTIVIEWHIGH}; | 77 H264PROFILE_MULTIVIEWHIGH}; |
78 | 78 |
79 // Because MediaCodec is thread-hostile (must be poked on a single thread) and | 79 // Because MediaCodec is thread-hostile (must be poked on a single thread) and |
80 // has no callback mechanism (b/11990118), we must drive it by polling for | 80 // has no callback mechanism (b/11990118), we must drive it by polling for |
81 // complete frames (and available input buffers, when the codec is fully | 81 // complete frames (and available input buffers, when the codec is fully |
82 // saturated). This function defines the polling delay. The value used is an | 82 // saturated). This function defines the polling delay. The value used is an |
83 // arbitrary choice that trades off CPU utilization (spinning) against latency. | 83 // arbitrary choice that trades off CPU utilization (spinning) against latency. |
84 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay(). | 84 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay(). |
85 static inline const base::TimeDelta DecodePollDelay() { | 85 static inline const base::TimeDelta DecodePollDelay() { |
86 // An alternative to this polling scheme could be to dedicate a new thread | 86 // An alternative to this polling scheme could be to dedicate a new thread |
87 // (instead of using the ChildThread) to run the MediaCodec, and make that | 87 // (instead of using the ChildThread) to run the MediaCodec, and make that |
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
362 }; | 362 }; |
363 | 363 |
364 static base::LazyInstance<AVDATimerManager>::Leaky g_avda_timer = | 364 static base::LazyInstance<AVDATimerManager>::Leaky g_avda_timer = |
365 LAZY_INSTANCE_INITIALIZER; | 365 LAZY_INSTANCE_INITIALIZER; |
366 | 366 |
367 AndroidVideoDecodeAccelerator::CodecConfig::CodecConfig() {} | 367 AndroidVideoDecodeAccelerator::CodecConfig::CodecConfig() {} |
368 | 368 |
369 AndroidVideoDecodeAccelerator::CodecConfig::~CodecConfig() {} | 369 AndroidVideoDecodeAccelerator::CodecConfig::~CodecConfig() {} |
370 | 370 |
371 AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord( | 371 AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord( |
372 const media::BitstreamBuffer& bitstream_buffer) | 372 const BitstreamBuffer& bitstream_buffer) |
373 : buffer(bitstream_buffer) { | 373 : buffer(bitstream_buffer) { |
374 if (buffer.id() != -1) | 374 if (buffer.id() != -1) |
375 memory.reset(new SharedMemoryRegion(buffer, true)); | 375 memory.reset(new SharedMemoryRegion(buffer, true)); |
376 } | 376 } |
377 | 377 |
378 AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord( | 378 AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord( |
379 BitstreamRecord&& other) | 379 BitstreamRecord&& other) |
380 : buffer(std::move(other.buffer)), memory(std::move(other.memory)) {} | 380 : buffer(std::move(other.buffer)), memory(std::move(other.memory)) {} |
381 | 381 |
382 AndroidVideoDecodeAccelerator::BitstreamRecord::~BitstreamRecord() {} | 382 AndroidVideoDecodeAccelerator::BitstreamRecord::~BitstreamRecord() {} |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
440 | 440 |
441 // We signalled that we support deferred initialization, so see if the client | 441 // We signalled that we support deferred initialization, so see if the client |
442 // does also. | 442 // does also. |
443 deferred_initialization_pending_ = config.is_deferred_initialization_allowed; | 443 deferred_initialization_pending_ = config.is_deferred_initialization_allowed; |
444 | 444 |
445 if (config_.is_encrypted && !deferred_initialization_pending_) { | 445 if (config_.is_encrypted && !deferred_initialization_pending_) { |
446 DLOG(ERROR) << "Deferred initialization must be used for encrypted streams"; | 446 DLOG(ERROR) << "Deferred initialization must be used for encrypted streams"; |
447 return false; | 447 return false; |
448 } | 448 } |
449 | 449 |
450 if (codec_config_->codec_ != media::kCodecVP8 && | 450 if (codec_config_->codec_ != kCodecVP8 && |
451 codec_config_->codec_ != media::kCodecVP9 && | 451 codec_config_->codec_ != kCodecVP9 && |
452 codec_config_->codec_ != media::kCodecH264) { | 452 codec_config_->codec_ != kCodecH264) { |
453 LOG(ERROR) << "Unsupported profile: " << config.profile; | 453 LOG(ERROR) << "Unsupported profile: " << config.profile; |
454 return false; | 454 return false; |
455 } | 455 } |
456 | 456 |
457 // Only use MediaCodec for VP8/9 if it's likely backed by hardware | 457 // Only use MediaCodec for VP8/9 if it's likely backed by hardware |
458 // or if the stream is encrypted. | 458 // or if the stream is encrypted. |
459 if ((codec_config_->codec_ == media::kCodecVP8 || | 459 if ((codec_config_->codec_ == kCodecVP8 || |
460 codec_config_->codec_ == media::kCodecVP9) && | 460 codec_config_->codec_ == kCodecVP9) && |
461 !config_.is_encrypted && | 461 !config_.is_encrypted && |
462 media::VideoCodecBridge::IsKnownUnaccelerated( | 462 VideoCodecBridge::IsKnownUnaccelerated(codec_config_->codec_, |
463 codec_config_->codec_, media::MEDIA_CODEC_DECODER)) { | 463 MEDIA_CODEC_DECODER)) { |
464 DVLOG(1) << "Initialization failed: " | 464 DVLOG(1) << "Initialization failed: " |
465 << (codec_config_->codec_ == media::kCodecVP8 ? "vp8" : "vp9") | 465 << (codec_config_->codec_ == kCodecVP8 ? "vp8" : "vp9") |
466 << " is not hardware accelerated"; | 466 << " is not hardware accelerated"; |
467 return false; | 467 return false; |
468 } | 468 } |
469 | 469 |
470 auto gles_decoder = get_gles2_decoder_cb_.Run(); | 470 auto gles_decoder = get_gles2_decoder_cb_.Run(); |
471 if (!gles_decoder) { | 471 if (!gles_decoder) { |
472 LOG(ERROR) << "Failed to get gles2 decoder instance."; | 472 LOG(ERROR) << "Failed to get gles2 decoder instance."; |
473 return false; | 473 return false; |
474 } | 474 } |
475 | 475 |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
592 return false; | 592 return false; |
593 if (pending_bitstream_records_.empty()) | 593 if (pending_bitstream_records_.empty()) |
594 return false; | 594 return false; |
595 | 595 |
596 int input_buf_index = pending_input_buf_index_; | 596 int input_buf_index = pending_input_buf_index_; |
597 | 597 |
598 // Do not dequeue a new input buffer if we failed with MEDIA_CODEC_NO_KEY. | 598 // Do not dequeue a new input buffer if we failed with MEDIA_CODEC_NO_KEY. |
599 // That status does not return this buffer back to the pool of | 599 // That status does not return this buffer back to the pool of |
600 // available input buffers. We have to reuse it in QueueSecureInputBuffer(). | 600 // available input buffers. We have to reuse it in QueueSecureInputBuffer(). |
601 if (input_buf_index == -1) { | 601 if (input_buf_index == -1) { |
602 media::MediaCodecStatus status = | 602 MediaCodecStatus status = |
603 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); | 603 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index); |
604 switch (status) { | 604 switch (status) { |
605 case media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER: | 605 case MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER: |
606 return false; | 606 return false; |
607 case media::MEDIA_CODEC_ERROR: | 607 case MEDIA_CODEC_ERROR: |
608 POST_ERROR(PLATFORM_FAILURE, "Failed to DequeueInputBuffer"); | 608 POST_ERROR(PLATFORM_FAILURE, "Failed to DequeueInputBuffer"); |
609 return false; | 609 return false; |
610 case media::MEDIA_CODEC_OK: | 610 case MEDIA_CODEC_OK: |
611 break; | 611 break; |
612 default: | 612 default: |
613 NOTREACHED() << "Unknown DequeueInputBuffer status " << status; | 613 NOTREACHED() << "Unknown DequeueInputBuffer status " << status; |
614 return false; | 614 return false; |
615 } | 615 } |
616 } | 616 } |
617 | 617 |
618 DCHECK_NE(input_buf_index, -1); | 618 DCHECK_NE(input_buf_index, -1); |
619 | 619 |
620 media::BitstreamBuffer bitstream_buffer = | 620 BitstreamBuffer bitstream_buffer = pending_bitstream_records_.front().buffer; |
621 pending_bitstream_records_.front().buffer; | |
622 | 621 |
623 if (bitstream_buffer.id() == -1) { | 622 if (bitstream_buffer.id() == -1) { |
624 pending_bitstream_records_.pop(); | 623 pending_bitstream_records_.pop(); |
625 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | 624 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", |
626 pending_bitstream_records_.size()); | 625 pending_bitstream_records_.size()); |
627 | 626 |
628 media_codec_->QueueEOS(input_buf_index); | 627 media_codec_->QueueEOS(input_buf_index); |
629 return true; | 628 return true; |
630 } | 629 } |
631 | 630 |
632 std::unique_ptr<SharedMemoryRegion> shm; | 631 std::unique_ptr<SharedMemoryRegion> shm; |
633 | 632 |
634 if (pending_input_buf_index_ == -1) { | 633 if (pending_input_buf_index_ == -1) { |
635 // When |pending_input_buf_index_| is not -1, the buffer is already dequeued | 634 // When |pending_input_buf_index_| is not -1, the buffer is already dequeued |
636 // from MediaCodec, filled with data and bitstream_buffer.handle() is | 635 // from MediaCodec, filled with data and bitstream_buffer.handle() is |
637 // closed. | 636 // closed. |
638 shm = std::move(pending_bitstream_records_.front().memory); | 637 shm = std::move(pending_bitstream_records_.front().memory); |
639 | 638 |
640 if (!shm->Map()) { | 639 if (!shm->Map()) { |
641 POST_ERROR(UNREADABLE_INPUT, "Failed to SharedMemoryRegion::Map()"); | 640 POST_ERROR(UNREADABLE_INPUT, "Failed to SharedMemoryRegion::Map()"); |
642 return false; | 641 return false; |
643 } | 642 } |
644 } | 643 } |
645 | 644 |
646 const base::TimeDelta presentation_timestamp = | 645 const base::TimeDelta presentation_timestamp = |
647 bitstream_buffer.presentation_timestamp(); | 646 bitstream_buffer.presentation_timestamp(); |
648 DCHECK(presentation_timestamp != media::kNoTimestamp()) | 647 DCHECK(presentation_timestamp != kNoTimestamp()) |
649 << "Bitstream buffers must have valid presentation timestamps"; | 648 << "Bitstream buffers must have valid presentation timestamps"; |
650 | 649 |
651 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt | 650 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt |
652 // ref frames, but it's OK to overwrite it because we only expect a single | 651 // ref frames, but it's OK to overwrite it because we only expect a single |
653 // output frame to have that timestamp. AVDA clients only use the bitstream | 652 // output frame to have that timestamp. AVDA clients only use the bitstream |
654 // buffer id in the returned Pictures to map a bitstream buffer back to a | 653 // buffer id in the returned Pictures to map a bitstream buffer back to a |
655 // timestamp on their side, so either one of the bitstream buffer ids will | 654 // timestamp on their side, so either one of the bitstream buffer ids will |
656 // result in them finding the right timestamp. | 655 // result in them finding the right timestamp. |
657 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); | 656 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id(); |
658 | 657 |
659 // Notice that |memory| will be null if we repeatedly enqueue the same buffer, | 658 // Notice that |memory| will be null if we repeatedly enqueue the same buffer, |
660 // this happens after MEDIA_CODEC_NO_KEY. | 659 // this happens after MEDIA_CODEC_NO_KEY. |
661 const uint8_t* memory = | 660 const uint8_t* memory = |
662 shm ? static_cast<const uint8_t*>(shm->memory()) : nullptr; | 661 shm ? static_cast<const uint8_t*>(shm->memory()) : nullptr; |
663 const std::string& key_id = bitstream_buffer.key_id(); | 662 const std::string& key_id = bitstream_buffer.key_id(); |
664 const std::string& iv = bitstream_buffer.iv(); | 663 const std::string& iv = bitstream_buffer.iv(); |
665 const std::vector<media::SubsampleEntry>& subsamples = | 664 const std::vector<SubsampleEntry>& subsamples = bitstream_buffer.subsamples(); |
666 bitstream_buffer.subsamples(); | |
667 | 665 |
668 media::MediaCodecStatus status; | 666 MediaCodecStatus status; |
669 if (key_id.empty() || iv.empty()) { | 667 if (key_id.empty() || iv.empty()) { |
670 status = media_codec_->QueueInputBuffer(input_buf_index, memory, | 668 status = media_codec_->QueueInputBuffer(input_buf_index, memory, |
671 bitstream_buffer.size(), | 669 bitstream_buffer.size(), |
672 presentation_timestamp); | 670 presentation_timestamp); |
673 } else { | 671 } else { |
674 status = media_codec_->QueueSecureInputBuffer( | 672 status = media_codec_->QueueSecureInputBuffer( |
675 input_buf_index, memory, bitstream_buffer.size(), key_id, iv, | 673 input_buf_index, memory, bitstream_buffer.size(), key_id, iv, |
676 subsamples, presentation_timestamp); | 674 subsamples, presentation_timestamp); |
677 } | 675 } |
678 | 676 |
679 DVLOG(2) << __FUNCTION__ | 677 DVLOG(2) << __FUNCTION__ |
680 << ": Queue(Secure)InputBuffer: pts:" << presentation_timestamp | 678 << ": Queue(Secure)InputBuffer: pts:" << presentation_timestamp |
681 << " status:" << status; | 679 << " status:" << status; |
682 | 680 |
683 if (status == media::MEDIA_CODEC_NO_KEY) { | 681 if (status == MEDIA_CODEC_NO_KEY) { |
684 // Keep trying to enqueue the same input buffer. | 682 // Keep trying to enqueue the same input buffer. |
685 // The buffer is owned by us (not the MediaCodec) and is filled with data. | 683 // The buffer is owned by us (not the MediaCodec) and is filled with data. |
686 DVLOG(1) << "QueueSecureInputBuffer failed: NO_KEY"; | 684 DVLOG(1) << "QueueSecureInputBuffer failed: NO_KEY"; |
687 pending_input_buf_index_ = input_buf_index; | 685 pending_input_buf_index_ = input_buf_index; |
688 state_ = WAITING_FOR_KEY; | 686 state_ = WAITING_FOR_KEY; |
689 return false; | 687 return false; |
690 } | 688 } |
691 | 689 |
692 pending_input_buf_index_ = -1; | 690 pending_input_buf_index_ = -1; |
693 pending_bitstream_records_.pop(); | 691 pending_bitstream_records_.pop(); |
694 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | 692 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", |
695 pending_bitstream_records_.size()); | 693 pending_bitstream_records_.size()); |
696 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | 694 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output |
697 // will be returned from the bitstream buffer. However, MediaCodec API is | 695 // will be returned from the bitstream buffer. However, MediaCodec API is |
698 // not enough to guarantee it. | 696 // not enough to guarantee it. |
699 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | 697 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to |
700 // keep getting more bitstreams from the client, and throttle them by using | 698 // keep getting more bitstreams from the client, and throttle them by using |
701 // |bitstreams_notified_in_advance_|. | 699 // |bitstreams_notified_in_advance_|. |
702 // TODO(dwkang): check if there is a way to remove this workaround. | 700 // TODO(dwkang): check if there is a way to remove this workaround. |
703 base::ThreadTaskRunnerHandle::Get()->PostTask( | 701 base::ThreadTaskRunnerHandle::Get()->PostTask( |
704 FROM_HERE, | 702 FROM_HERE, |
705 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 703 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
706 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); | 704 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); |
707 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | 705 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); |
708 | 706 |
709 if (status != media::MEDIA_CODEC_OK) { | 707 if (status != MEDIA_CODEC_OK) { |
710 POST_ERROR(PLATFORM_FAILURE, "Failed to QueueInputBuffer: " << status); | 708 POST_ERROR(PLATFORM_FAILURE, "Failed to QueueInputBuffer: " << status); |
711 return false; | 709 return false; |
712 } | 710 } |
713 | 711 |
714 return true; | 712 return true; |
715 } | 713 } |
716 | 714 |
717 bool AndroidVideoDecodeAccelerator::DequeueOutput() { | 715 bool AndroidVideoDecodeAccelerator::DequeueOutput() { |
718 DCHECK(thread_checker_.CalledOnValidThread()); | 716 DCHECK(thread_checker_.CalledOnValidThread()); |
719 TRACE_EVENT0("media", "AVDA::DequeueOutput"); | 717 TRACE_EVENT0("media", "AVDA::DequeueOutput"); |
720 base::AutoReset<bool> auto_reset(&defer_errors_, true); | 718 base::AutoReset<bool> auto_reset(&defer_errors_, true); |
721 if (state_ == ERROR || state_ == WAITING_FOR_CODEC) | 719 if (state_ == ERROR || state_ == WAITING_FOR_CODEC) |
722 return false; | 720 return false; |
723 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | 721 if (picturebuffers_requested_ && output_picture_buffers_.empty()) |
724 return false; | 722 return false; |
725 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | 723 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { |
726 // Don't have any picture buffer to send. Need to wait. | 724 // Don't have any picture buffer to send. Need to wait. |
727 return false; | 725 return false; |
728 } | 726 } |
729 | 727 |
730 bool eos = false; | 728 bool eos = false; |
731 base::TimeDelta presentation_timestamp; | 729 base::TimeDelta presentation_timestamp; |
732 int32_t buf_index = 0; | 730 int32_t buf_index = 0; |
733 do { | 731 do { |
734 size_t offset = 0; | 732 size_t offset = 0; |
735 size_t size = 0; | 733 size_t size = 0; |
736 | 734 |
737 TRACE_EVENT_BEGIN0("media", "AVDA::DequeueOutput"); | 735 TRACE_EVENT_BEGIN0("media", "AVDA::DequeueOutput"); |
738 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer( | 736 MediaCodecStatus status = media_codec_->DequeueOutputBuffer( |
739 NoWaitTimeOut(), &buf_index, &offset, &size, &presentation_timestamp, | 737 NoWaitTimeOut(), &buf_index, &offset, &size, &presentation_timestamp, |
740 &eos, NULL); | 738 &eos, NULL); |
741 TRACE_EVENT_END2("media", "AVDA::DequeueOutput", "status", status, | 739 TRACE_EVENT_END2("media", "AVDA::DequeueOutput", "status", status, |
742 "presentation_timestamp (ms)", | 740 "presentation_timestamp (ms)", |
743 presentation_timestamp.InMilliseconds()); | 741 presentation_timestamp.InMilliseconds()); |
744 | 742 |
745 switch (status) { | 743 switch (status) { |
746 case media::MEDIA_CODEC_ERROR: | 744 case MEDIA_CODEC_ERROR: |
747 // Do not post an error if we are draining for reset and destroy. | 745 // Do not post an error if we are draining for reset and destroy. |
748 // Instead, run the drain completion task. | 746 // Instead, run the drain completion task. |
749 if (IsDrainingForResetOrDestroy()) { | 747 if (IsDrainingForResetOrDestroy()) { |
750 DVLOG(1) << __FUNCTION__ << ": error while codec draining"; | 748 DVLOG(1) << __FUNCTION__ << ": error while codec draining"; |
751 state_ = ERROR; | 749 state_ = ERROR; |
752 OnDrainCompleted(); | 750 OnDrainCompleted(); |
753 } else { | 751 } else { |
754 POST_ERROR(PLATFORM_FAILURE, "DequeueOutputBuffer failed."); | 752 POST_ERROR(PLATFORM_FAILURE, "DequeueOutputBuffer failed."); |
755 } | 753 } |
756 return false; | 754 return false; |
757 | 755 |
758 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: | 756 case MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER: |
759 return false; | 757 return false; |
760 | 758 |
761 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { | 759 case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: { |
762 // An OUTPUT_FORMAT_CHANGED is not reported after flush() if the frame | 760 // An OUTPUT_FORMAT_CHANGED is not reported after flush() if the frame |
763 // size does not change. Therefore we have to keep track on the format | 761 // size does not change. Therefore we have to keep track on the format |
764 // even if draining, unless we are draining for destroy. | 762 // even if draining, unless we are draining for destroy. |
765 if (drain_type_ == DRAIN_FOR_DESTROY) | 763 if (drain_type_ == DRAIN_FOR_DESTROY) |
766 return true; // ignore | 764 return true; // ignore |
767 | 765 |
768 if (media_codec_->GetOutputSize(&size_) != media::MEDIA_CODEC_OK) { | 766 if (media_codec_->GetOutputSize(&size_) != MEDIA_CODEC_OK) { |
769 POST_ERROR(PLATFORM_FAILURE, "GetOutputSize failed."); | 767 POST_ERROR(PLATFORM_FAILURE, "GetOutputSize failed."); |
770 return false; | 768 return false; |
771 } | 769 } |
772 | 770 |
773 DVLOG(3) << __FUNCTION__ | 771 DVLOG(3) << __FUNCTION__ |
774 << " OUTPUT_FORMAT_CHANGED, new size: " << size_.ToString(); | 772 << " OUTPUT_FORMAT_CHANGED, new size: " << size_.ToString(); |
775 | 773 |
776 // Don't request picture buffers if we already have some. This avoids | 774 // Don't request picture buffers if we already have some. This avoids |
777 // having to dismiss the existing buffers which may actively reference | 775 // having to dismiss the existing buffers which may actively reference |
778 // decoded images. Breaking their connection to the decoded image will | 776 // decoded images. Breaking their connection to the decoded image will |
779 // cause rendering of black frames. Instead, we let the existing | 777 // cause rendering of black frames. Instead, we let the existing |
780 // PictureBuffers live on and we simply update their size the next time | 778 // PictureBuffers live on and we simply update their size the next time |
781 // they're attachted to an image of the new resolution. See the | 779 // they're attachted to an image of the new resolution. See the |
782 // size update in |SendDecodedFrameToClient| and https://crbug/587994. | 780 // size update in |SendDecodedFrameToClient| and https://crbug/587994. |
783 if (output_picture_buffers_.empty() && !picturebuffers_requested_) { | 781 if (output_picture_buffers_.empty() && !picturebuffers_requested_) { |
784 picturebuffers_requested_ = true; | 782 picturebuffers_requested_ = true; |
785 base::ThreadTaskRunnerHandle::Get()->PostTask( | 783 base::ThreadTaskRunnerHandle::Get()->PostTask( |
786 FROM_HERE, | 784 FROM_HERE, |
787 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, | 785 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers, |
788 weak_this_factory_.GetWeakPtr())); | 786 weak_this_factory_.GetWeakPtr())); |
789 return false; | 787 return false; |
790 } | 788 } |
791 | 789 |
792 return true; | 790 return true; |
793 } | 791 } |
794 | 792 |
795 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: | 793 case MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED: |
796 break; | 794 break; |
797 | 795 |
798 case media::MEDIA_CODEC_OK: | 796 case MEDIA_CODEC_OK: |
799 DCHECK_GE(buf_index, 0); | 797 DCHECK_GE(buf_index, 0); |
800 DVLOG(3) << __FUNCTION__ << ": pts:" << presentation_timestamp | 798 DVLOG(3) << __FUNCTION__ << ": pts:" << presentation_timestamp |
801 << " buf_index:" << buf_index << " offset:" << offset | 799 << " buf_index:" << buf_index << " offset:" << offset |
802 << " size:" << size << " eos:" << eos; | 800 << " size:" << size << " eos:" << eos; |
803 break; | 801 break; |
804 | 802 |
805 default: | 803 default: |
806 NOTREACHED(); | 804 NOTREACHED(); |
807 break; | 805 break; |
808 } | 806 } |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
892 | 890 |
893 bool size_changed = false; | 891 bool size_changed = false; |
894 if (i->second.size() != size_) { | 892 if (i->second.size() != size_) { |
895 // Size may have changed due to resolution change since the last time this | 893 // Size may have changed due to resolution change since the last time this |
896 // PictureBuffer was used. | 894 // PictureBuffer was used. |
897 strategy_->UpdatePictureBufferSize(&i->second, size_); | 895 strategy_->UpdatePictureBufferSize(&i->second, size_); |
898 size_changed = true; | 896 size_changed = true; |
899 } | 897 } |
900 | 898 |
901 const bool allow_overlay = strategy_->ArePicturesOverlayable(); | 899 const bool allow_overlay = strategy_->ArePicturesOverlayable(); |
902 media::Picture picture(picture_buffer_id, bitstream_id, gfx::Rect(size_), | 900 Picture picture(picture_buffer_id, bitstream_id, gfx::Rect(size_), |
903 allow_overlay); | 901 allow_overlay); |
904 picture.set_size_changed(size_changed); | 902 picture.set_size_changed(size_changed); |
905 | 903 |
906 // Notify picture ready before calling UseCodecBufferForPictureBuffer() since | 904 // Notify picture ready before calling UseCodecBufferForPictureBuffer() since |
907 // that process may be slow and shouldn't delay delivery of the frame to the | 905 // that process may be slow and shouldn't delay delivery of the frame to the |
908 // renderer. The picture is only used on the same thread as this method is | 906 // renderer. The picture is only used on the same thread as this method is |
909 // called, so it is safe to do this. | 907 // called, so it is safe to do this. |
910 NotifyPictureReady(picture); | 908 NotifyPictureReady(picture); |
911 | 909 |
912 // Connect the PictureBuffer to the decoded frame, via whatever mechanism the | 910 // Connect the PictureBuffer to the decoded frame, via whatever mechanism the |
913 // strategy likes. | 911 // strategy likes. |
914 strategy_->UseCodecBufferForPictureBuffer(codec_buffer_index, i->second); | 912 strategy_->UseCodecBufferForPictureBuffer(codec_buffer_index, i->second); |
915 } | 913 } |
916 | 914 |
917 void AndroidVideoDecodeAccelerator::Decode( | 915 void AndroidVideoDecodeAccelerator::Decode( |
918 const media::BitstreamBuffer& bitstream_buffer) { | 916 const BitstreamBuffer& bitstream_buffer) { |
919 DCHECK(thread_checker_.CalledOnValidThread()); | 917 DCHECK(thread_checker_.CalledOnValidThread()); |
920 | 918 |
921 // If we previously deferred a codec restart, take care of it now. This can | 919 // If we previously deferred a codec restart, take care of it now. This can |
922 // happen on older devices where configuration changes require a codec reset. | 920 // happen on older devices where configuration changes require a codec reset. |
923 if (codec_needs_reset_) { | 921 if (codec_needs_reset_) { |
924 DCHECK_EQ(drain_type_, DRAIN_TYPE_NONE); | 922 DCHECK_EQ(drain_type_, DRAIN_TYPE_NONE); |
925 ResetCodecState(); | 923 ResetCodecState(); |
926 } | 924 } |
927 | 925 |
928 if (bitstream_buffer.id() >= 0 && bitstream_buffer.size() > 0) { | 926 if (bitstream_buffer.id() >= 0 && bitstream_buffer.size() > 0) { |
929 DecodeBuffer(bitstream_buffer); | 927 DecodeBuffer(bitstream_buffer); |
930 return; | 928 return; |
931 } | 929 } |
932 | 930 |
933 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 931 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
934 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 932 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
935 | 933 |
936 if (bitstream_buffer.id() < 0) { | 934 if (bitstream_buffer.id() < 0) { |
937 POST_ERROR(INVALID_ARGUMENT, | 935 POST_ERROR(INVALID_ARGUMENT, |
938 "Invalid bistream_buffer, id: " << bitstream_buffer.id()); | 936 "Invalid bistream_buffer, id: " << bitstream_buffer.id()); |
939 } else { | 937 } else { |
940 base::ThreadTaskRunnerHandle::Get()->PostTask( | 938 base::ThreadTaskRunnerHandle::Get()->PostTask( |
941 FROM_HERE, | 939 FROM_HERE, |
942 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | 940 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, |
943 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); | 941 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id())); |
944 } | 942 } |
945 } | 943 } |
946 | 944 |
947 void AndroidVideoDecodeAccelerator::DecodeBuffer( | 945 void AndroidVideoDecodeAccelerator::DecodeBuffer( |
948 const media::BitstreamBuffer& bitstream_buffer) { | 946 const BitstreamBuffer& bitstream_buffer) { |
949 pending_bitstream_records_.push(BitstreamRecord(bitstream_buffer)); | 947 pending_bitstream_records_.push(BitstreamRecord(bitstream_buffer)); |
950 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", | 948 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", |
951 pending_bitstream_records_.size()); | 949 pending_bitstream_records_.size()); |
952 | 950 |
953 DoIOTask(true); | 951 DoIOTask(true); |
954 } | 952 } |
955 | 953 |
956 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | 954 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { |
957 if (client_) { | 955 if (client_) { |
958 client_->ProvidePictureBuffers(kNumPictureBuffers, PIXEL_FORMAT_UNKNOWN, 1, | 956 client_->ProvidePictureBuffers(kNumPictureBuffers, PIXEL_FORMAT_UNKNOWN, 1, |
959 strategy_->GetPictureBufferSize(), | 957 strategy_->GetPictureBufferSize(), |
960 strategy_->GetTextureTarget()); | 958 strategy_->GetTextureTarget()); |
961 } | 959 } |
962 } | 960 } |
963 | 961 |
964 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | 962 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( |
965 const std::vector<media::PictureBuffer>& buffers) { | 963 const std::vector<PictureBuffer>& buffers) { |
966 DCHECK(thread_checker_.CalledOnValidThread()); | 964 DCHECK(thread_checker_.CalledOnValidThread()); |
967 DCHECK(output_picture_buffers_.empty()); | 965 DCHECK(output_picture_buffers_.empty()); |
968 DCHECK(free_picture_ids_.empty()); | 966 DCHECK(free_picture_ids_.empty()); |
969 | 967 |
970 if (buffers.size() < kNumPictureBuffers) { | 968 if (buffers.size() < kNumPictureBuffers) { |
971 POST_ERROR(INVALID_ARGUMENT, "Not enough picture buffers assigned."); | 969 POST_ERROR(INVALID_ARGUMENT, "Not enough picture buffers assigned."); |
972 return; | 970 return; |
973 } | 971 } |
974 | 972 |
975 const bool have_context = make_context_current_cb_.Run(); | 973 const bool have_context = make_context_current_cb_.Run(); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1051 base::PostTaskAndReplyWithResult( | 1049 base::PostTaskAndReplyWithResult( |
1052 task_runner.get(), FROM_HERE, | 1050 task_runner.get(), FROM_HERE, |
1053 base::Bind(&AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread, | 1051 base::Bind(&AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread, |
1054 codec_config_), | 1052 codec_config_), |
1055 base::Bind(&AndroidVideoDecodeAccelerator::OnCodecConfigured, | 1053 base::Bind(&AndroidVideoDecodeAccelerator::OnCodecConfigured, |
1056 weak_this_factory_.GetWeakPtr())); | 1054 weak_this_factory_.GetWeakPtr())); |
1057 } | 1055 } |
1058 | 1056 |
1059 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodecSynchronously() { | 1057 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodecSynchronously() { |
1060 state_ = WAITING_FOR_CODEC; | 1058 state_ = WAITING_FOR_CODEC; |
1061 std::unique_ptr<media::VideoCodecBridge> media_codec = | 1059 std::unique_ptr<VideoCodecBridge> media_codec = |
1062 ConfigureMediaCodecOnAnyThread(codec_config_); | 1060 ConfigureMediaCodecOnAnyThread(codec_config_); |
1063 OnCodecConfigured(std::move(media_codec)); | 1061 OnCodecConfigured(std::move(media_codec)); |
1064 return !!media_codec_; | 1062 return !!media_codec_; |
1065 } | 1063 } |
1066 | 1064 |
1067 std::unique_ptr<media::VideoCodecBridge> | 1065 std::unique_ptr<VideoCodecBridge> |
1068 AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread( | 1066 AndroidVideoDecodeAccelerator::ConfigureMediaCodecOnAnyThread( |
1069 scoped_refptr<CodecConfig> codec_config) { | 1067 scoped_refptr<CodecConfig> codec_config) { |
1070 TRACE_EVENT0("media", "AVDA::ConfigureMediaCodec"); | 1068 TRACE_EVENT0("media", "AVDA::ConfigureMediaCodec"); |
1071 | 1069 |
1072 jobject media_crypto = codec_config->media_crypto_ | 1070 jobject media_crypto = codec_config->media_crypto_ |
1073 ? codec_config->media_crypto_->obj() | 1071 ? codec_config->media_crypto_->obj() |
1074 : nullptr; | 1072 : nullptr; |
1075 | 1073 |
1076 // |needs_protected_surface_| implies encrypted stream. | 1074 // |needs_protected_surface_| implies encrypted stream. |
1077 DCHECK(!codec_config->needs_protected_surface_ || media_crypto); | 1075 DCHECK(!codec_config->needs_protected_surface_ || media_crypto); |
1078 | 1076 |
1079 return std::unique_ptr<media::VideoCodecBridge>( | 1077 return std::unique_ptr<VideoCodecBridge>(VideoCodecBridge::CreateDecoder( |
1080 media::VideoCodecBridge::CreateDecoder( | 1078 codec_config->codec_, codec_config->needs_protected_surface_, |
1081 codec_config->codec_, codec_config->needs_protected_surface_, | 1079 codec_config->initial_expected_coded_size_, |
1082 codec_config->initial_expected_coded_size_, | 1080 codec_config->surface_.j_surface().obj(), media_crypto, true)); |
1083 codec_config->surface_.j_surface().obj(), media_crypto, true)); | |
1084 } | 1081 } |
1085 | 1082 |
1086 void AndroidVideoDecodeAccelerator::OnCodecConfigured( | 1083 void AndroidVideoDecodeAccelerator::OnCodecConfigured( |
1087 std::unique_ptr<media::VideoCodecBridge> media_codec) { | 1084 std::unique_ptr<VideoCodecBridge> media_codec) { |
1088 DCHECK(thread_checker_.CalledOnValidThread()); | 1085 DCHECK(thread_checker_.CalledOnValidThread()); |
1089 DCHECK(state_ == WAITING_FOR_CODEC || state_ == SURFACE_DESTROYED); | 1086 DCHECK(state_ == WAITING_FOR_CODEC || state_ == SURFACE_DESTROYED); |
1090 | 1087 |
1091 // Record one instance of the codec being initialized. | 1088 // Record one instance of the codec being initialized. |
1092 RecordFormatChangedMetric(FormatChangedValue::CodecInitialized); | 1089 RecordFormatChangedMetric(FormatChangedValue::CodecInitialized); |
1093 | 1090 |
1094 // If we are supposed to notify that initialization is complete, then do so | 1091 // If we are supposed to notify that initialization is complete, then do so |
1095 // now. Otherwise, this is a reconfiguration. | 1092 // now. Otherwise, this is a reconfiguration. |
1096 if (deferred_initialization_pending_) { | 1093 if (deferred_initialization_pending_) { |
1097 // Losing the output surface is not considered an error state, so notify | 1094 // Losing the output surface is not considered an error state, so notify |
(...skipping 28 matching lines...) Expand all Loading... |
1126 // another drain request is present, but DRAIN_FOR_DESTROY can. | 1123 // another drain request is present, but DRAIN_FOR_DESTROY can. |
1127 DCHECK_NE(drain_type, DRAIN_TYPE_NONE); | 1124 DCHECK_NE(drain_type, DRAIN_TYPE_NONE); |
1128 DCHECK(drain_type_ == DRAIN_TYPE_NONE || drain_type == DRAIN_FOR_DESTROY) | 1125 DCHECK(drain_type_ == DRAIN_TYPE_NONE || drain_type == DRAIN_FOR_DESTROY) |
1129 << "Unexpected StartCodecDrain() with drain type " << drain_type | 1126 << "Unexpected StartCodecDrain() with drain type " << drain_type |
1130 << " while already draining with drain type " << drain_type_; | 1127 << " while already draining with drain type " << drain_type_; |
1131 | 1128 |
1132 const bool enqueue_eos = drain_type_ == DRAIN_TYPE_NONE; | 1129 const bool enqueue_eos = drain_type_ == DRAIN_TYPE_NONE; |
1133 drain_type_ = drain_type; | 1130 drain_type_ = drain_type; |
1134 | 1131 |
1135 if (enqueue_eos) | 1132 if (enqueue_eos) |
1136 DecodeBuffer(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | 1133 DecodeBuffer(BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); |
1137 } | 1134 } |
1138 | 1135 |
1139 bool AndroidVideoDecodeAccelerator::IsDrainingForResetOrDestroy() const { | 1136 bool AndroidVideoDecodeAccelerator::IsDrainingForResetOrDestroy() const { |
1140 return drain_type_ == DRAIN_FOR_RESET || drain_type_ == DRAIN_FOR_DESTROY; | 1137 return drain_type_ == DRAIN_FOR_RESET || drain_type_ == DRAIN_FOR_DESTROY; |
1141 } | 1138 } |
1142 | 1139 |
1143 void AndroidVideoDecodeAccelerator::OnDrainCompleted() { | 1140 void AndroidVideoDecodeAccelerator::OnDrainCompleted() { |
1144 DVLOG(2) << __FUNCTION__; | 1141 DVLOG(2) << __FUNCTION__; |
1145 DCHECK(thread_checker_.CalledOnValidThread()); | 1142 DCHECK(thread_checker_.CalledOnValidThread()); |
1146 | 1143 |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1259 bitstreams_notified_in_advance_.clear(); | 1256 bitstreams_notified_in_advance_.clear(); |
1260 | 1257 |
1261 // Any error that is waiting to post can be ignored. | 1258 // Any error that is waiting to post can be ignored. |
1262 error_sequence_token_++; | 1259 error_sequence_token_++; |
1263 | 1260 |
1264 DCHECK(strategy_); | 1261 DCHECK(strategy_); |
1265 strategy_->ReleaseCodecBuffers(output_picture_buffers_); | 1262 strategy_->ReleaseCodecBuffers(output_picture_buffers_); |
1266 | 1263 |
1267 // Some VP8 files require complete MediaCodec drain before we can call | 1264 // Some VP8 files require complete MediaCodec drain before we can call |
1268 // MediaCodec.flush() or MediaCodec.reset(). http://crbug.com/598963. | 1265 // MediaCodec.flush() or MediaCodec.reset(). http://crbug.com/598963. |
1269 if (media_codec_ && codec_config_->codec_ == media::kCodecVP8) { | 1266 if (media_codec_ && codec_config_->codec_ == kCodecVP8) { |
1270 // Postpone ResetCodecState() after the drain. | 1267 // Postpone ResetCodecState() after the drain. |
1271 StartCodecDrain(DRAIN_FOR_RESET); | 1268 StartCodecDrain(DRAIN_FOR_RESET); |
1272 } else { | 1269 } else { |
1273 ResetCodecState(); | 1270 ResetCodecState(); |
1274 base::ThreadTaskRunnerHandle::Get()->PostTask( | 1271 base::ThreadTaskRunnerHandle::Get()->PostTask( |
1275 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, | 1272 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone, |
1276 weak_this_factory_.GetWeakPtr())); | 1273 weak_this_factory_.GetWeakPtr())); |
1277 } | 1274 } |
1278 } | 1275 } |
1279 | 1276 |
(...skipping 11 matching lines...) Expand all Loading... |
1291 // If we have an OnFrameAvailable handler, tell it that we're going away. | 1288 // If we have an OnFrameAvailable handler, tell it that we're going away. |
1292 if (on_frame_available_handler_) { | 1289 if (on_frame_available_handler_) { |
1293 on_frame_available_handler_->ClearOwner(); | 1290 on_frame_available_handler_->ClearOwner(); |
1294 on_frame_available_handler_ = nullptr; | 1291 on_frame_available_handler_ = nullptr; |
1295 } | 1292 } |
1296 | 1293 |
1297 client_ = nullptr; | 1294 client_ = nullptr; |
1298 | 1295 |
1299 // Some VP8 files require complete MediaCodec drain before we can call | 1296 // Some VP8 files require complete MediaCodec drain before we can call |
1300 // MediaCodec.flush() or MediaCodec.reset(). http://crbug.com/598963. | 1297 // MediaCodec.flush() or MediaCodec.reset(). http://crbug.com/598963. |
1301 if (media_codec_ && codec_config_->codec_ == media::kCodecVP8) { | 1298 if (media_codec_ && codec_config_->codec_ == kCodecVP8) { |
1302 // Clear pending_bitstream_records_. | 1299 // Clear pending_bitstream_records_. |
1303 while (!pending_bitstream_records_.empty()) | 1300 while (!pending_bitstream_records_.empty()) |
1304 pending_bitstream_records_.pop(); | 1301 pending_bitstream_records_.pop(); |
1305 | 1302 |
1306 // Postpone ActualDestroy after the drain. | 1303 // Postpone ActualDestroy after the drain. |
1307 StartCodecDrain(DRAIN_FOR_DESTROY); | 1304 StartCodecDrain(DRAIN_FOR_DESTROY); |
1308 } else { | 1305 } else { |
1309 ActualDestroy(); | 1306 ActualDestroy(); |
1310 } | 1307 } |
1311 } | 1308 } |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1348 const { | 1345 const { |
1349 return thread_checker_; | 1346 return thread_checker_; |
1350 } | 1347 } |
1351 | 1348 |
1352 base::WeakPtr<gpu::gles2::GLES2Decoder> | 1349 base::WeakPtr<gpu::gles2::GLES2Decoder> |
1353 AndroidVideoDecodeAccelerator::GetGlDecoder() const { | 1350 AndroidVideoDecodeAccelerator::GetGlDecoder() const { |
1354 return get_gles2_decoder_cb_.Run(); | 1351 return get_gles2_decoder_cb_.Run(); |
1355 } | 1352 } |
1356 | 1353 |
1357 gpu::gles2::TextureRef* AndroidVideoDecodeAccelerator::GetTextureForPicture( | 1354 gpu::gles2::TextureRef* AndroidVideoDecodeAccelerator::GetTextureForPicture( |
1358 const media::PictureBuffer& picture_buffer) { | 1355 const PictureBuffer& picture_buffer) { |
1359 auto gles_decoder = GetGlDecoder(); | 1356 auto gles_decoder = GetGlDecoder(); |
1360 RETURN_ON_FAILURE(this, gles_decoder, "Failed to get GL decoder", | 1357 RETURN_ON_FAILURE(this, gles_decoder, "Failed to get GL decoder", |
1361 ILLEGAL_STATE, nullptr); | 1358 ILLEGAL_STATE, nullptr); |
1362 RETURN_ON_FAILURE(this, gles_decoder->GetContextGroup(), | 1359 RETURN_ON_FAILURE(this, gles_decoder->GetContextGroup(), |
1363 "Null gles_decoder->GetContextGroup()", ILLEGAL_STATE, | 1360 "Null gles_decoder->GetContextGroup()", ILLEGAL_STATE, |
1364 nullptr); | 1361 nullptr); |
1365 gpu::gles2::TextureManager* texture_manager = | 1362 gpu::gles2::TextureManager* texture_manager = |
1366 gles_decoder->GetContextGroup()->texture_manager(); | 1363 gles_decoder->GetContextGroup()->texture_manager(); |
1367 RETURN_ON_FAILURE(this, texture_manager, "Null texture_manager", | 1364 RETURN_ON_FAILURE(this, texture_manager, "Null texture_manager", |
1368 ILLEGAL_STATE, nullptr); | 1365 ILLEGAL_STATE, nullptr); |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1427 } | 1424 } |
1428 | 1425 |
1429 void AndroidVideoDecodeAccelerator::OnFrameAvailable() { | 1426 void AndroidVideoDecodeAccelerator::OnFrameAvailable() { |
1430 // Remember: this may be on any thread. | 1427 // Remember: this may be on any thread. |
1431 DCHECK(strategy_); | 1428 DCHECK(strategy_); |
1432 strategy_->OnFrameAvailable(); | 1429 strategy_->OnFrameAvailable(); |
1433 } | 1430 } |
1434 | 1431 |
1435 void AndroidVideoDecodeAccelerator::PostError( | 1432 void AndroidVideoDecodeAccelerator::PostError( |
1436 const ::tracked_objects::Location& from_here, | 1433 const ::tracked_objects::Location& from_here, |
1437 media::VideoDecodeAccelerator::Error error) { | 1434 VideoDecodeAccelerator::Error error) { |
1438 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 1435 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
1439 from_here, | 1436 from_here, |
1440 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, | 1437 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, |
1441 weak_this_factory_.GetWeakPtr(), error, error_sequence_token_), | 1438 weak_this_factory_.GetWeakPtr(), error, error_sequence_token_), |
1442 (defer_errors_ ? ErrorPostingDelay() : base::TimeDelta())); | 1439 (defer_errors_ ? ErrorPostingDelay() : base::TimeDelta())); |
1443 state_ = ERROR; | 1440 state_ = ERROR; |
1444 } | 1441 } |
1445 | 1442 |
1446 void AndroidVideoDecodeAccelerator::InitializeCdm() { | 1443 void AndroidVideoDecodeAccelerator::InitializeCdm() { |
1447 DVLOG(2) << __FUNCTION__ << ": " << config_.cdm_id; | 1444 DVLOG(2) << __FUNCTION__ << ": " << config_.cdm_id; |
1448 | 1445 |
1449 #if !defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | 1446 #if !defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) |
1450 NOTIMPLEMENTED(); | 1447 NOTIMPLEMENTED(); |
1451 NotifyInitializationComplete(false); | 1448 NotifyInitializationComplete(false); |
1452 #else | 1449 #else |
1453 // Store the CDM to hold a reference to it. | 1450 // Store the CDM to hold a reference to it. |
1454 cdm_for_reference_holding_only_ = | 1451 cdm_for_reference_holding_only_ = |
1455 media::MojoCdmService::LegacyGetCdm(config_.cdm_id); | 1452 MojoCdmService::LegacyGetCdm(config_.cdm_id); |
1456 DCHECK(cdm_for_reference_holding_only_); | 1453 DCHECK(cdm_for_reference_holding_only_); |
1457 | 1454 |
1458 // On Android platform the CdmContext must be a MediaDrmBridgeCdmContext. | 1455 // On Android platform the CdmContext must be a MediaDrmBridgeCdmContext. |
1459 media_drm_bridge_cdm_context_ = static_cast<media::MediaDrmBridgeCdmContext*>( | 1456 media_drm_bridge_cdm_context_ = static_cast<MediaDrmBridgeCdmContext*>( |
1460 cdm_for_reference_holding_only_->GetCdmContext()); | 1457 cdm_for_reference_holding_only_->GetCdmContext()); |
1461 DCHECK(media_drm_bridge_cdm_context_); | 1458 DCHECK(media_drm_bridge_cdm_context_); |
1462 | 1459 |
1463 // Register CDM callbacks. The callbacks registered will be posted back to | 1460 // Register CDM callbacks. The callbacks registered will be posted back to |
1464 // this thread via BindToCurrentLoop. | 1461 // this thread via BindToCurrentLoop. |
1465 | 1462 |
1466 // Since |this| holds a reference to the |cdm_|, by the time the CDM is | 1463 // Since |this| holds a reference to the |cdm_|, by the time the CDM is |
1467 // destructed, UnregisterPlayer() must have been called and |this| has been | 1464 // destructed, UnregisterPlayer() must have been called and |this| has been |
1468 // destructed as well. So the |cdm_unset_cb| will never have a chance to be | 1465 // destructed as well. So the |cdm_unset_cb| will never have a chance to be |
1469 // called. | 1466 // called. |
1470 // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms. | 1467 // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms. |
1471 cdm_registration_id_ = media_drm_bridge_cdm_context_->RegisterPlayer( | 1468 cdm_registration_id_ = media_drm_bridge_cdm_context_->RegisterPlayer( |
1472 media::BindToCurrentLoop( | 1469 BindToCurrentLoop(base::Bind(&AndroidVideoDecodeAccelerator::OnKeyAdded, |
1473 base::Bind(&AndroidVideoDecodeAccelerator::OnKeyAdded, | 1470 weak_this_factory_.GetWeakPtr())), |
1474 weak_this_factory_.GetWeakPtr())), | |
1475 base::Bind(&base::DoNothing)); | 1471 base::Bind(&base::DoNothing)); |
1476 | 1472 |
1477 // Deferred initialization will continue in OnMediaCryptoReady(). | 1473 // Deferred initialization will continue in OnMediaCryptoReady(). |
1478 media_drm_bridge_cdm_context_->SetMediaCryptoReadyCB(media::BindToCurrentLoop( | 1474 media_drm_bridge_cdm_context_->SetMediaCryptoReadyCB(BindToCurrentLoop( |
1479 base::Bind(&AndroidVideoDecodeAccelerator::OnMediaCryptoReady, | 1475 base::Bind(&AndroidVideoDecodeAccelerator::OnMediaCryptoReady, |
1480 weak_this_factory_.GetWeakPtr()))); | 1476 weak_this_factory_.GetWeakPtr()))); |
1481 #endif // !defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) | 1477 #endif // !defined(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS) |
1482 } | 1478 } |
1483 | 1479 |
1484 void AndroidVideoDecodeAccelerator::OnMediaCryptoReady( | 1480 void AndroidVideoDecodeAccelerator::OnMediaCryptoReady( |
1485 media::MediaDrmBridgeCdmContext::JavaObjectPtr media_crypto, | 1481 MediaDrmBridgeCdmContext::JavaObjectPtr media_crypto, |
1486 bool needs_protected_surface) { | 1482 bool needs_protected_surface) { |
1487 DVLOG(1) << __FUNCTION__; | 1483 DVLOG(1) << __FUNCTION__; |
1488 | 1484 |
1489 if (!media_crypto) { | 1485 if (!media_crypto) { |
1490 LOG(ERROR) << "MediaCrypto is not available, can't play encrypted stream."; | 1486 LOG(ERROR) << "MediaCrypto is not available, can't play encrypted stream."; |
1491 cdm_for_reference_holding_only_ = nullptr; | 1487 cdm_for_reference_holding_only_ = nullptr; |
1492 media_drm_bridge_cdm_context_ = nullptr; | 1488 media_drm_bridge_cdm_context_ = nullptr; |
1493 NotifyInitializationComplete(false); | 1489 NotifyInitializationComplete(false); |
1494 return; | 1490 return; |
1495 } | 1491 } |
(...skipping 18 matching lines...) Expand all Loading... |
1514 state_ = NO_ERROR; | 1510 state_ = NO_ERROR; |
1515 | 1511 |
1516 DoIOTask(true); | 1512 DoIOTask(true); |
1517 } | 1513 } |
1518 | 1514 |
1519 void AndroidVideoDecodeAccelerator::NotifyInitializationComplete(bool success) { | 1515 void AndroidVideoDecodeAccelerator::NotifyInitializationComplete(bool success) { |
1520 if (client_) | 1516 if (client_) |
1521 client_->NotifyInitializationComplete(success); | 1517 client_->NotifyInitializationComplete(success); |
1522 } | 1518 } |
1523 | 1519 |
1524 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | 1520 void AndroidVideoDecodeAccelerator::NotifyPictureReady(const Picture& picture) { |
1525 const media::Picture& picture) { | |
1526 if (client_) | 1521 if (client_) |
1527 client_->PictureReady(picture); | 1522 client_->PictureReady(picture); |
1528 } | 1523 } |
1529 | 1524 |
1530 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | 1525 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( |
1531 int input_buffer_id) { | 1526 int input_buffer_id) { |
1532 if (client_) | 1527 if (client_) |
1533 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 1528 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
1534 } | 1529 } |
1535 | 1530 |
1536 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | 1531 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { |
1537 if (client_) | 1532 if (client_) |
1538 client_->NotifyFlushDone(); | 1533 client_->NotifyFlushDone(); |
1539 } | 1534 } |
1540 | 1535 |
1541 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | 1536 void AndroidVideoDecodeAccelerator::NotifyResetDone() { |
1542 if (client_) | 1537 if (client_) |
1543 client_->NotifyResetDone(); | 1538 client_->NotifyResetDone(); |
1544 } | 1539 } |
1545 | 1540 |
1546 void AndroidVideoDecodeAccelerator::NotifyError( | 1541 void AndroidVideoDecodeAccelerator::NotifyError( |
1547 media::VideoDecodeAccelerator::Error error, | 1542 VideoDecodeAccelerator::Error error, |
1548 int token) { | 1543 int token) { |
1549 DVLOG(1) << __FUNCTION__ << ": error: " << error << " token: " << token | 1544 DVLOG(1) << __FUNCTION__ << ": error: " << error << " token: " << token |
1550 << " current: " << error_sequence_token_; | 1545 << " current: " << error_sequence_token_; |
1551 if (token != error_sequence_token_) | 1546 if (token != error_sequence_token_) |
1552 return; | 1547 return; |
1553 | 1548 |
1554 if (client_) | 1549 if (client_) |
1555 client_->NotifyError(error); | 1550 client_->NotifyError(error); |
1556 } | 1551 } |
1557 | 1552 |
(...skipping 19 matching lines...) Expand all Loading... |
1577 | 1572 |
1578 // static | 1573 // static |
1579 bool AndroidVideoDecodeAccelerator::UseDeferredRenderingStrategy( | 1574 bool AndroidVideoDecodeAccelerator::UseDeferredRenderingStrategy( |
1580 const gpu::GpuPreferences& gpu_preferences) { | 1575 const gpu::GpuPreferences& gpu_preferences) { |
1581 // TODO(liberato, watk): Figure out what we want to do about zero copy for | 1576 // TODO(liberato, watk): Figure out what we want to do about zero copy for |
1582 // fullscreen external SurfaceView in WebView. http://crbug.com/582170. | 1577 // fullscreen external SurfaceView in WebView. http://crbug.com/582170. |
1583 return !gpu_preferences.enable_threaded_texture_mailboxes; | 1578 return !gpu_preferences.enable_threaded_texture_mailboxes; |
1584 } | 1579 } |
1585 | 1580 |
1586 // static | 1581 // static |
1587 media::VideoDecodeAccelerator::Capabilities | 1582 VideoDecodeAccelerator::Capabilities |
1588 AndroidVideoDecodeAccelerator::GetCapabilities( | 1583 AndroidVideoDecodeAccelerator::GetCapabilities( |
1589 const gpu::GpuPreferences& gpu_preferences) { | 1584 const gpu::GpuPreferences& gpu_preferences) { |
1590 Capabilities capabilities; | 1585 Capabilities capabilities; |
1591 SupportedProfiles& profiles = capabilities.supported_profiles; | 1586 SupportedProfiles& profiles = capabilities.supported_profiles; |
1592 | 1587 |
1593 // Only support VP8 on Android versions where we don't have to synchronously | 1588 // Only support VP8 on Android versions where we don't have to synchronously |
1594 // tear down the MediaCodec on surface destruction because VP8 requires | 1589 // tear down the MediaCodec on surface destruction because VP8 requires |
1595 // us to completely drain the decoder before releasing it, which is difficult | 1590 // us to completely drain the decoder before releasing it, which is difficult |
1596 // and time consuming to do while the surface is being destroyed. | 1591 // and time consuming to do while the surface is being destroyed. |
1597 if (base::android::BuildInfo::GetInstance()->sdk_int() >= 18 && | 1592 if (base::android::BuildInfo::GetInstance()->sdk_int() >= 18 && |
1598 media::MediaCodecUtil::IsVp8DecoderAvailable()) { | 1593 MediaCodecUtil::IsVp8DecoderAvailable()) { |
1599 SupportedProfile profile; | 1594 SupportedProfile profile; |
1600 profile.profile = media::VP8PROFILE_ANY; | 1595 profile.profile = VP8PROFILE_ANY; |
1601 profile.min_resolution.SetSize(0, 0); | 1596 profile.min_resolution.SetSize(0, 0); |
1602 profile.max_resolution.SetSize(3840, 2160); | 1597 profile.max_resolution.SetSize(3840, 2160); |
1603 // If we know MediaCodec will just create a software codec, prefer our | 1598 // If we know MediaCodec will just create a software codec, prefer our |
1604 // internal software decoder instead. It's more up to date and secured | 1599 // internal software decoder instead. It's more up to date and secured |
1605 // within the renderer sandbox. However if the content is encrypted, we | 1600 // within the renderer sandbox. However if the content is encrypted, we |
1606 // must use MediaCodec anyways since MediaDrm offers no way to decrypt | 1601 // must use MediaCodec anyways since MediaDrm offers no way to decrypt |
1607 // the buffers and let us use our internal software decoders. | 1602 // the buffers and let us use our internal software decoders. |
1608 profile.encrypted_only = media::VideoCodecBridge::IsKnownUnaccelerated( | 1603 profile.encrypted_only = |
1609 media::kCodecVP8, media::MEDIA_CODEC_DECODER); | 1604 VideoCodecBridge::IsKnownUnaccelerated(kCodecVP8, MEDIA_CODEC_DECODER); |
1610 profiles.push_back(profile); | 1605 profiles.push_back(profile); |
1611 } | 1606 } |
1612 | 1607 |
1613 if (media::MediaCodecUtil::IsVp9DecoderAvailable()) { | 1608 if (MediaCodecUtil::IsVp9DecoderAvailable()) { |
1614 SupportedProfile profile; | 1609 SupportedProfile profile; |
1615 profile.min_resolution.SetSize(0, 0); | 1610 profile.min_resolution.SetSize(0, 0); |
1616 profile.max_resolution.SetSize(3840, 2160); | 1611 profile.max_resolution.SetSize(3840, 2160); |
1617 // If we know MediaCodec will just create a software codec, prefer our | 1612 // If we know MediaCodec will just create a software codec, prefer our |
1618 // internal software decoder instead. It's more up to date and secured | 1613 // internal software decoder instead. It's more up to date and secured |
1619 // within the renderer sandbox. However if the content is encrypted, we | 1614 // within the renderer sandbox. However if the content is encrypted, we |
1620 // must use MediaCodec anyways since MediaDrm offers no way to decrypt | 1615 // must use MediaCodec anyways since MediaDrm offers no way to decrypt |
1621 // the buffers and let us use our internal software decoders. | 1616 // the buffers and let us use our internal software decoders. |
1622 profile.encrypted_only = media::VideoCodecBridge::IsKnownUnaccelerated( | 1617 profile.encrypted_only = |
1623 media::kCodecVP9, media::MEDIA_CODEC_DECODER); | 1618 VideoCodecBridge::IsKnownUnaccelerated(kCodecVP9, MEDIA_CODEC_DECODER); |
1624 profile.profile = media::VP9PROFILE_PROFILE0; | 1619 profile.profile = VP9PROFILE_PROFILE0; |
1625 profiles.push_back(profile); | 1620 profiles.push_back(profile); |
1626 profile.profile = media::VP9PROFILE_PROFILE1; | 1621 profile.profile = VP9PROFILE_PROFILE1; |
1627 profiles.push_back(profile); | 1622 profiles.push_back(profile); |
1628 profile.profile = media::VP9PROFILE_PROFILE2; | 1623 profile.profile = VP9PROFILE_PROFILE2; |
1629 profiles.push_back(profile); | 1624 profiles.push_back(profile); |
1630 profile.profile = media::VP9PROFILE_PROFILE3; | 1625 profile.profile = VP9PROFILE_PROFILE3; |
1631 profiles.push_back(profile); | 1626 profiles.push_back(profile); |
1632 } | 1627 } |
1633 | 1628 |
1634 for (const auto& supported_profile : kSupportedH264Profiles) { | 1629 for (const auto& supported_profile : kSupportedH264Profiles) { |
1635 SupportedProfile profile; | 1630 SupportedProfile profile; |
1636 profile.profile = supported_profile; | 1631 profile.profile = supported_profile; |
1637 profile.min_resolution.SetSize(0, 0); | 1632 profile.min_resolution.SetSize(0, 0); |
1638 // Advertise support for 4k and let the MediaCodec fail when decoding if it | 1633 // Advertise support for 4k and let the MediaCodec fail when decoding if it |
1639 // doesn't support the resolution. It's assumed that consumers won't have | 1634 // doesn't support the resolution. It's assumed that consumers won't have |
1640 // software fallback for H264 on Android anyway. | 1635 // software fallback for H264 on Android anyway. |
1641 profile.max_resolution.SetSize(3840, 2160); | 1636 profile.max_resolution.SetSize(3840, 2160); |
1642 profiles.push_back(profile); | 1637 profiles.push_back(profile); |
1643 } | 1638 } |
1644 | 1639 |
1645 capabilities.flags = media::VideoDecodeAccelerator::Capabilities:: | 1640 capabilities.flags = |
1646 SUPPORTS_DEFERRED_INITIALIZATION; | 1641 VideoDecodeAccelerator::Capabilities::SUPPORTS_DEFERRED_INITIALIZATION; |
1647 if (UseDeferredRenderingStrategy(gpu_preferences)) { | 1642 if (UseDeferredRenderingStrategy(gpu_preferences)) { |
1648 capabilities.flags |= media::VideoDecodeAccelerator::Capabilities:: | 1643 capabilities.flags |= VideoDecodeAccelerator::Capabilities:: |
1649 NEEDS_ALL_PICTURE_BUFFERS_TO_DECODE; | 1644 NEEDS_ALL_PICTURE_BUFFERS_TO_DECODE; |
1650 if (media::MediaCodecUtil::IsSurfaceViewOutputSupported()) { | 1645 if (MediaCodecUtil::IsSurfaceViewOutputSupported()) { |
1651 capabilities.flags |= media::VideoDecodeAccelerator::Capabilities:: | 1646 capabilities.flags |= VideoDecodeAccelerator::Capabilities:: |
1652 SUPPORTS_EXTERNAL_OUTPUT_SURFACE; | 1647 SUPPORTS_EXTERNAL_OUTPUT_SURFACE; |
1653 } | 1648 } |
1654 } | 1649 } |
1655 | 1650 |
1656 return capabilities; | 1651 return capabilities; |
1657 } | 1652 } |
1658 | 1653 |
1659 } // namespace media | 1654 } // namespace media |
OLD | NEW |