OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/v4l2_video_decode_accelerator.h" | 5 #include "media/gpu/v4l2_video_decode_accelerator.h" |
6 | 6 |
7 #include <dlfcn.h> | 7 #include <dlfcn.h> |
8 #include <errno.h> | 8 #include <errno.h> |
9 #include <fcntl.h> | 9 #include <fcntl.h> |
10 #include <linux/videodev2.h> | 10 #include <linux/videodev2.h> |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
78 }; | 78 }; |
79 | 79 |
80 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef { | 80 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef { |
81 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); | 81 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); |
82 ~EGLSyncKHRRef(); | 82 ~EGLSyncKHRRef(); |
83 EGLDisplay const egl_display; | 83 EGLDisplay const egl_display; |
84 EGLSyncKHR egl_sync; | 84 EGLSyncKHR egl_sync; |
85 }; | 85 }; |
86 | 86 |
87 struct V4L2VideoDecodeAccelerator::PictureRecord { | 87 struct V4L2VideoDecodeAccelerator::PictureRecord { |
88 PictureRecord(bool cleared, const media::Picture& picture); | 88 PictureRecord(bool cleared, const Picture& picture); |
89 ~PictureRecord(); | 89 ~PictureRecord(); |
90 bool cleared; // Whether the texture is cleared and safe to render from. | 90 bool cleared; // Whether the texture is cleared and safe to render from. |
91 media::Picture picture; // The decoded picture. | 91 Picture picture; // The decoded picture. |
92 }; | 92 }; |
93 | 93 |
94 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef( | 94 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef( |
95 base::WeakPtr<Client>& client, | 95 base::WeakPtr<Client>& client, |
96 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner, | 96 scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner, |
97 std::unique_ptr<SharedMemoryRegion> shm, | 97 std::unique_ptr<SharedMemoryRegion> shm, |
98 int32_t input_id) | 98 int32_t input_id) |
99 : client(client), | 99 : client(client), |
100 client_task_runner(client_task_runner), | 100 client_task_runner(client_task_runner), |
101 shm(std::move(shm)), | 101 shm(std::move(shm)), |
(...skipping 27 matching lines...) Expand all Loading... |
129 | 129 |
130 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() | 130 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord() |
131 : state(kFree), | 131 : state(kFree), |
132 egl_image(EGL_NO_IMAGE_KHR), | 132 egl_image(EGL_NO_IMAGE_KHR), |
133 egl_sync(EGL_NO_SYNC_KHR), | 133 egl_sync(EGL_NO_SYNC_KHR), |
134 picture_id(-1), | 134 picture_id(-1), |
135 cleared(false) {} | 135 cleared(false) {} |
136 | 136 |
137 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} | 137 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {} |
138 | 138 |
139 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord( | 139 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared, |
140 bool cleared, | 140 const Picture& picture) |
141 const media::Picture& picture) | |
142 : cleared(cleared), picture(picture) {} | 141 : cleared(cleared), picture(picture) {} |
143 | 142 |
144 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} | 143 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {} |
145 | 144 |
146 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( | 145 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator( |
147 EGLDisplay egl_display, | 146 EGLDisplay egl_display, |
148 const GetGLContextCallback& get_gl_context_cb, | 147 const GetGLContextCallback& get_gl_context_cb, |
149 const MakeGLContextCurrentCallback& make_context_current_cb, | 148 const MakeGLContextCurrentCallback& make_context_current_cb, |
150 const scoped_refptr<V4L2Device>& device) | 149 const scoped_refptr<V4L2Device>& device) |
151 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 150 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
(...skipping 13 matching lines...) Expand all Loading... |
165 output_buffer_queued_count_(0), | 164 output_buffer_queued_count_(0), |
166 output_dpb_size_(0), | 165 output_dpb_size_(0), |
167 output_planes_count_(0), | 166 output_planes_count_(0), |
168 picture_clearing_count_(0), | 167 picture_clearing_count_(0), |
169 pictures_assigned_(base::WaitableEvent::ResetPolicy::AUTOMATIC, | 168 pictures_assigned_(base::WaitableEvent::ResetPolicy::AUTOMATIC, |
170 base::WaitableEvent::InitialState::NOT_SIGNALED), | 169 base::WaitableEvent::InitialState::NOT_SIGNALED), |
171 device_poll_thread_("V4L2DevicePollThread"), | 170 device_poll_thread_("V4L2DevicePollThread"), |
172 egl_display_(egl_display), | 171 egl_display_(egl_display), |
173 get_gl_context_cb_(get_gl_context_cb), | 172 get_gl_context_cb_(get_gl_context_cb), |
174 make_context_current_cb_(make_context_current_cb), | 173 make_context_current_cb_(make_context_current_cb), |
175 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 174 video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN), |
176 output_format_fourcc_(0), | 175 output_format_fourcc_(0), |
177 egl_image_format_fourcc_(0), | 176 egl_image_format_fourcc_(0), |
178 egl_image_planes_count_(0), | 177 egl_image_planes_count_(0), |
179 weak_this_factory_(this) { | 178 weak_this_factory_(this) { |
180 weak_this_ = weak_this_factory_.GetWeakPtr(); | 179 weak_this_ = weak_this_factory_.GetWeakPtr(); |
181 } | 180 } |
182 | 181 |
183 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() { | 182 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() { |
184 DCHECK(!decoder_thread_.IsRunning()); | 183 DCHECK(!decoder_thread_.IsRunning()); |
185 DCHECK(!device_poll_thread_.IsRunning()); | 184 DCHECK(!device_poll_thread_.IsRunning()); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
265 | 264 |
266 if (!SetupFormats()) | 265 if (!SetupFormats()) |
267 return false; | 266 return false; |
268 | 267 |
269 // Subscribe to the resolution change event. | 268 // Subscribe to the resolution change event. |
270 struct v4l2_event_subscription sub; | 269 struct v4l2_event_subscription sub; |
271 memset(&sub, 0, sizeof(sub)); | 270 memset(&sub, 0, sizeof(sub)); |
272 sub.type = V4L2_EVENT_SOURCE_CHANGE; | 271 sub.type = V4L2_EVENT_SOURCE_CHANGE; |
273 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub); | 272 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub); |
274 | 273 |
275 if (video_profile_ >= media::H264PROFILE_MIN && | 274 if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) { |
276 video_profile_ <= media::H264PROFILE_MAX) { | 275 decoder_h264_parser_.reset(new H264Parser()); |
277 decoder_h264_parser_.reset(new media::H264Parser()); | |
278 } | 276 } |
279 | 277 |
280 if (!CreateInputBuffers()) | 278 if (!CreateInputBuffers()) |
281 return false; | 279 return false; |
282 | 280 |
283 if (!decoder_thread_.Start()) { | 281 if (!decoder_thread_.Start()) { |
284 LOG(ERROR) << "Initialize(): decoder thread failed to start"; | 282 LOG(ERROR) << "Initialize(): decoder thread failed to start"; |
285 return false; | 283 return false; |
286 } | 284 } |
287 | 285 |
288 decoder_state_ = kInitialized; | 286 decoder_state_ = kInitialized; |
289 | 287 |
290 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. | 288 // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here. |
291 decoder_thread_.message_loop()->PostTask( | 289 decoder_thread_.message_loop()->PostTask( |
292 FROM_HERE, base::Bind(base::IgnoreResult( | 290 FROM_HERE, base::Bind(base::IgnoreResult( |
293 &V4L2VideoDecodeAccelerator::StartDevicePoll), | 291 &V4L2VideoDecodeAccelerator::StartDevicePoll), |
294 base::Unretained(this))); | 292 base::Unretained(this))); |
295 | 293 |
296 return true; | 294 return true; |
297 } | 295 } |
298 | 296 |
299 void V4L2VideoDecodeAccelerator::Decode( | 297 void V4L2VideoDecodeAccelerator::Decode( |
300 const media::BitstreamBuffer& bitstream_buffer) { | 298 const BitstreamBuffer& bitstream_buffer) { |
301 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() | 299 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() |
302 << ", size=" << bitstream_buffer.size(); | 300 << ", size=" << bitstream_buffer.size(); |
303 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | 301 DCHECK(decode_task_runner_->BelongsToCurrentThread()); |
304 | 302 |
305 if (bitstream_buffer.id() < 0) { | 303 if (bitstream_buffer.id() < 0) { |
306 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); | 304 LOG(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id(); |
307 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) | 305 if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle())) |
308 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); | 306 base::SharedMemory::CloseHandle(bitstream_buffer.handle()); |
309 NOTIFY_ERROR(INVALID_ARGUMENT); | 307 NOTIFY_ERROR(INVALID_ARGUMENT); |
310 return; | 308 return; |
311 } | 309 } |
312 | 310 |
313 // DecodeTask() will take care of running a DecodeBufferTask(). | 311 // DecodeTask() will take care of running a DecodeBufferTask(). |
314 decoder_thread_.message_loop()->PostTask( | 312 decoder_thread_.message_loop()->PostTask( |
315 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeTask, | 313 FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeTask, |
316 base::Unretained(this), bitstream_buffer)); | 314 base::Unretained(this), bitstream_buffer)); |
317 } | 315 } |
318 | 316 |
319 void V4L2VideoDecodeAccelerator::AssignPictureBuffers( | 317 void V4L2VideoDecodeAccelerator::AssignPictureBuffers( |
320 const std::vector<media::PictureBuffer>& buffers) { | 318 const std::vector<PictureBuffer>& buffers) { |
321 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size(); | 319 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size(); |
322 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 320 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
323 | 321 |
324 const uint32_t req_buffer_count = | 322 const uint32_t req_buffer_count = |
325 output_dpb_size_ + kDpbOutputBufferExtraCount; | 323 output_dpb_size_ + kDpbOutputBufferExtraCount; |
326 | 324 |
327 if (buffers.size() < req_buffer_count) { | 325 if (buffers.size() < req_buffer_count) { |
328 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture" | 326 LOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture" |
329 << " buffers. (Got " << buffers.size() | 327 << " buffers. (Got " << buffers.size() |
330 << ", requested " << req_buffer_count << ")"; | 328 << ", requested " << req_buffer_count << ")"; |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
514 | 512 |
515 bool V4L2VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( | 513 bool V4L2VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( |
516 const base::WeakPtr<Client>& decode_client, | 514 const base::WeakPtr<Client>& decode_client, |
517 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { | 515 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
518 decode_client_ = decode_client; | 516 decode_client_ = decode_client; |
519 decode_task_runner_ = decode_task_runner; | 517 decode_task_runner_ = decode_task_runner; |
520 return true; | 518 return true; |
521 } | 519 } |
522 | 520 |
523 // static | 521 // static |
524 media::VideoDecodeAccelerator::SupportedProfiles | 522 VideoDecodeAccelerator::SupportedProfiles |
525 V4L2VideoDecodeAccelerator::GetSupportedProfiles() { | 523 V4L2VideoDecodeAccelerator::GetSupportedProfiles() { |
526 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); | 524 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder); |
527 if (!device) | 525 if (!device) |
528 return SupportedProfiles(); | 526 return SupportedProfiles(); |
529 | 527 |
530 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), | 528 return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_), |
531 supported_input_fourccs_); | 529 supported_input_fourccs_); |
532 } | 530 } |
533 | 531 |
534 void V4L2VideoDecodeAccelerator::DecodeTask( | 532 void V4L2VideoDecodeAccelerator::DecodeTask( |
535 const media::BitstreamBuffer& bitstream_buffer) { | 533 const BitstreamBuffer& bitstream_buffer) { |
536 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id(); | 534 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id(); |
537 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); | 535 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); |
538 DCHECK_NE(decoder_state_, kUninitialized); | 536 DCHECK_NE(decoder_state_, kUninitialized); |
539 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id", | 537 TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id", |
540 bitstream_buffer.id()); | 538 bitstream_buffer.id()); |
541 | 539 |
542 std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( | 540 std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
543 decode_client_, decode_task_runner_, | 541 decode_client_, decode_task_runner_, |
544 std::unique_ptr<SharedMemoryRegion>( | 542 std::unique_ptr<SharedMemoryRegion>( |
545 new SharedMemoryRegion(bitstream_buffer, true)), | 543 new SharedMemoryRegion(bitstream_buffer, true)), |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
689 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer(). | 687 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer(). |
690 decoder_current_bitstream_buffer_.reset(); | 688 decoder_current_bitstream_buffer_.reset(); |
691 } | 689 } |
692 ScheduleDecodeBufferTaskIfNeeded(); | 690 ScheduleDecodeBufferTaskIfNeeded(); |
693 } | 691 } |
694 } | 692 } |
695 | 693 |
696 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(const uint8_t* data, | 694 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(const uint8_t* data, |
697 size_t size, | 695 size_t size, |
698 size_t* endpos) { | 696 size_t* endpos) { |
699 if (video_profile_ >= media::H264PROFILE_MIN && | 697 if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) { |
700 video_profile_ <= media::H264PROFILE_MAX) { | |
701 // For H264, we need to feed HW one frame at a time. This is going to take | 698 // For H264, we need to feed HW one frame at a time. This is going to take |
702 // some parsing of our input stream. | 699 // some parsing of our input stream. |
703 decoder_h264_parser_->SetStream(data, size); | 700 decoder_h264_parser_->SetStream(data, size); |
704 media::H264NALU nalu; | 701 H264NALU nalu; |
705 media::H264Parser::Result result; | 702 H264Parser::Result result; |
706 *endpos = 0; | 703 *endpos = 0; |
707 | 704 |
708 // Keep on peeking the next NALs while they don't indicate a frame | 705 // Keep on peeking the next NALs while they don't indicate a frame |
709 // boundary. | 706 // boundary. |
710 for (;;) { | 707 for (;;) { |
711 bool end_of_frame = false; | 708 bool end_of_frame = false; |
712 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu); | 709 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu); |
713 if (result == media::H264Parser::kInvalidStream || | 710 if (result == H264Parser::kInvalidStream || |
714 result == media::H264Parser::kUnsupportedStream) | 711 result == H264Parser::kUnsupportedStream) |
715 return false; | 712 return false; |
716 if (result == media::H264Parser::kEOStream) { | 713 if (result == H264Parser::kEOStream) { |
717 // We've reached the end of the buffer before finding a frame boundary. | 714 // We've reached the end of the buffer before finding a frame boundary. |
718 decoder_partial_frame_pending_ = true; | 715 decoder_partial_frame_pending_ = true; |
719 return true; | 716 return true; |
720 } | 717 } |
721 switch (nalu.nal_unit_type) { | 718 switch (nalu.nal_unit_type) { |
722 case media::H264NALU::kNonIDRSlice: | 719 case H264NALU::kNonIDRSlice: |
723 case media::H264NALU::kIDRSlice: | 720 case H264NALU::kIDRSlice: |
724 if (nalu.size < 1) | 721 if (nalu.size < 1) |
725 return false; | 722 return false; |
726 // For these two, if the "first_mb_in_slice" field is zero, start a | 723 // For these two, if the "first_mb_in_slice" field is zero, start a |
727 // new frame and return. This field is Exp-Golomb coded starting on | 724 // new frame and return. This field is Exp-Golomb coded starting on |
728 // the eighth data bit of the NAL; a zero value is encoded with a | 725 // the eighth data bit of the NAL; a zero value is encoded with a |
729 // leading '1' bit in the byte, which we can detect as the byte being | 726 // leading '1' bit in the byte, which we can detect as the byte being |
730 // (unsigned) greater than or equal to 0x80. | 727 // (unsigned) greater than or equal to 0x80. |
731 if (nalu.data[1] >= 0x80) { | 728 if (nalu.data[1] >= 0x80) { |
732 end_of_frame = true; | 729 end_of_frame = true; |
733 break; | 730 break; |
734 } | 731 } |
735 break; | 732 break; |
736 case media::H264NALU::kSEIMessage: | 733 case H264NALU::kSEIMessage: |
737 case media::H264NALU::kSPS: | 734 case H264NALU::kSPS: |
738 case media::H264NALU::kPPS: | 735 case H264NALU::kPPS: |
739 case media::H264NALU::kAUD: | 736 case H264NALU::kAUD: |
740 case media::H264NALU::kEOSeq: | 737 case H264NALU::kEOSeq: |
741 case media::H264NALU::kEOStream: | 738 case H264NALU::kEOStream: |
742 case media::H264NALU::kReserved14: | 739 case H264NALU::kReserved14: |
743 case media::H264NALU::kReserved15: | 740 case H264NALU::kReserved15: |
744 case media::H264NALU::kReserved16: | 741 case H264NALU::kReserved16: |
745 case media::H264NALU::kReserved17: | 742 case H264NALU::kReserved17: |
746 case media::H264NALU::kReserved18: | 743 case H264NALU::kReserved18: |
747 // These unconditionally signal a frame boundary. | 744 // These unconditionally signal a frame boundary. |
748 end_of_frame = true; | 745 end_of_frame = true; |
749 break; | 746 break; |
750 default: | 747 default: |
751 // For all others, keep going. | 748 // For all others, keep going. |
752 break; | 749 break; |
753 } | 750 } |
754 if (end_of_frame) { | 751 if (end_of_frame) { |
755 if (!decoder_partial_frame_pending_ && *endpos == 0) { | 752 if (!decoder_partial_frame_pending_ && *endpos == 0) { |
756 // The frame was previously restarted, and we haven't filled the | 753 // The frame was previously restarted, and we haven't filled the |
757 // current frame with any contents yet. Start the new frame here and | 754 // current frame with any contents yet. Start the new frame here and |
758 // continue parsing NALs. | 755 // continue parsing NALs. |
759 } else { | 756 } else { |
760 // The frame wasn't previously restarted and/or we have contents for | 757 // The frame wasn't previously restarted and/or we have contents for |
761 // the current frame; signal the start of a new frame here: we don't | 758 // the current frame; signal the start of a new frame here: we don't |
762 // have a partial frame anymore. | 759 // have a partial frame anymore. |
763 decoder_partial_frame_pending_ = false; | 760 decoder_partial_frame_pending_ = false; |
764 return true; | 761 return true; |
765 } | 762 } |
766 } | 763 } |
767 *endpos = (nalu.data + nalu.size) - data; | 764 *endpos = (nalu.data + nalu.size) - data; |
768 } | 765 } |
769 NOTREACHED(); | 766 NOTREACHED(); |
770 return false; | 767 return false; |
771 } else { | 768 } else { |
772 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN); | 769 DCHECK_GE(video_profile_, VP8PROFILE_MIN); |
773 DCHECK_LE(video_profile_, media::VP9PROFILE_MAX); | 770 DCHECK_LE(video_profile_, VP9PROFILE_MAX); |
774 // For VP8/9, we can just dump the entire buffer. No fragmentation needed, | 771 // For VP8/9, we can just dump the entire buffer. No fragmentation needed, |
775 // and we never return a partial frame. | 772 // and we never return a partial frame. |
776 *endpos = size; | 773 *endpos = size; |
777 decoder_partial_frame_pending_ = false; | 774 decoder_partial_frame_pending_ = false; |
778 return true; | 775 return true; |
779 } | 776 } |
780 } | 777 } |
781 | 778 |
782 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() { | 779 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() { |
783 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); | 780 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); |
(...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1169 DCHECK_GE(bitstream_buffer_id, 0); | 1166 DCHECK_GE(bitstream_buffer_id, 0); |
1170 DVLOG(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index | 1167 DVLOG(3) << "Dequeue output buffer: dqbuf index=" << dqbuf.index |
1171 << " bitstream input_id=" << bitstream_buffer_id; | 1168 << " bitstream input_id=" << bitstream_buffer_id; |
1172 if (image_processor_device_) { | 1169 if (image_processor_device_) { |
1173 output_record.state = kAtProcessor; | 1170 output_record.state = kAtProcessor; |
1174 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); | 1171 image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id); |
1175 std::vector<int> fds; | 1172 std::vector<int> fds; |
1176 for (auto& fd : output_record.fds) { | 1173 for (auto& fd : output_record.fds) { |
1177 fds.push_back(fd.get()); | 1174 fds.push_back(fd.get()); |
1178 } | 1175 } |
1179 scoped_refptr<media::VideoFrame> frame = | 1176 scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs( |
1180 media::VideoFrame::WrapExternalDmabufs( | 1177 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), |
1181 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_), | 1178 coded_size_, gfx::Rect(visible_size_), visible_size_, fds, |
1182 coded_size_, gfx::Rect(visible_size_), visible_size_, fds, | 1179 base::TimeDelta()); |
1183 base::TimeDelta()); | |
1184 // Unretained is safe because |this| owns image processor and there will | 1180 // Unretained is safe because |this| owns image processor and there will |
1185 // be no callbacks after processor destroys. Also, this class ensures it | 1181 // be no callbacks after processor destroys. Also, this class ensures it |
1186 // is safe to post a task from child thread to decoder thread using | 1182 // is safe to post a task from child thread to decoder thread using |
1187 // Unretained. | 1183 // Unretained. |
1188 image_processor_->Process( | 1184 image_processor_->Process( |
1189 frame, dqbuf.index, | 1185 frame, dqbuf.index, |
1190 media::BindToCurrentLoop( | 1186 BindToCurrentLoop( |
1191 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, | 1187 base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed, |
1192 base::Unretained(this), bitstream_buffer_id))); | 1188 base::Unretained(this), bitstream_buffer_id))); |
1193 } else { | 1189 } else { |
1194 output_record.state = kAtClient; | 1190 output_record.state = kAtClient; |
1195 decoder_frames_at_client_++; | 1191 decoder_frames_at_client_++; |
1196 const media::Picture picture(output_record.picture_id, | 1192 const Picture picture(output_record.picture_id, bitstream_buffer_id, |
1197 bitstream_buffer_id, | 1193 gfx::Rect(visible_size_), false); |
1198 gfx::Rect(visible_size_), false); | |
1199 pending_picture_ready_.push( | 1194 pending_picture_ready_.push( |
1200 PictureRecord(output_record.cleared, picture)); | 1195 PictureRecord(output_record.cleared, picture)); |
1201 SendPictureReady(); | 1196 SendPictureReady(); |
1202 output_record.cleared = true; | 1197 output_record.cleared = true; |
1203 } | 1198 } |
1204 } | 1199 } |
1205 } | 1200 } |
1206 | 1201 |
1207 NotifyFlushDoneIfNeeded(); | 1202 NotifyFlushDoneIfNeeded(); |
1208 } | 1203 } |
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1479 | 1474 |
1480 if (decoder_state_ == kError) { | 1475 if (decoder_state_ == kError) { |
1481 DVLOG(2) << "ResetDoneTask(): early out: kError state"; | 1476 DVLOG(2) << "ResetDoneTask(): early out: kError state"; |
1482 return; | 1477 return; |
1483 } | 1478 } |
1484 | 1479 |
1485 if (!StartDevicePoll()) | 1480 if (!StartDevicePoll()) |
1486 return; | 1481 return; |
1487 | 1482 |
1488 // Reset format-specific bits. | 1483 // Reset format-specific bits. |
1489 if (video_profile_ >= media::H264PROFILE_MIN && | 1484 if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) { |
1490 video_profile_ <= media::H264PROFILE_MAX) { | 1485 decoder_h264_parser_.reset(new H264Parser()); |
1491 decoder_h264_parser_.reset(new media::H264Parser()); | |
1492 } | 1486 } |
1493 | 1487 |
1494 // Jobs drained, we're finished resetting. | 1488 // Jobs drained, we're finished resetting. |
1495 DCHECK_EQ(decoder_state_, kResetting); | 1489 DCHECK_EQ(decoder_state_, kResetting); |
1496 if (output_buffer_map_.empty()) { | 1490 if (output_buffer_map_.empty()) { |
1497 // We must have gotten Reset() before we had a chance to request buffers | 1491 // We must have gotten Reset() before we had a chance to request buffers |
1498 // from the client. | 1492 // from the client. |
1499 decoder_state_ = kInitialized; | 1493 decoder_state_ = kInitialized; |
1500 } else { | 1494 } else { |
1501 decoder_state_ = kAfterReset; | 1495 decoder_state_ = kAfterReset; |
(...skipping 650 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2152 base::Unretained(this))); | 2146 base::Unretained(this))); |
2153 } | 2147 } |
2154 | 2148 |
2155 void V4L2VideoDecodeAccelerator::SendPictureReady() { | 2149 void V4L2VideoDecodeAccelerator::SendPictureReady() { |
2156 DVLOG(3) << "SendPictureReady()"; | 2150 DVLOG(3) << "SendPictureReady()"; |
2157 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); | 2151 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); |
2158 bool resetting_or_flushing = | 2152 bool resetting_or_flushing = |
2159 (decoder_state_ == kResetting || decoder_flushing_); | 2153 (decoder_state_ == kResetting || decoder_flushing_); |
2160 while (pending_picture_ready_.size() > 0) { | 2154 while (pending_picture_ready_.size() > 0) { |
2161 bool cleared = pending_picture_ready_.front().cleared; | 2155 bool cleared = pending_picture_ready_.front().cleared; |
2162 const media::Picture& picture = pending_picture_ready_.front().picture; | 2156 const Picture& picture = pending_picture_ready_.front().picture; |
2163 if (cleared && picture_clearing_count_ == 0) { | 2157 if (cleared && picture_clearing_count_ == 0) { |
2164 // This picture is cleared. It can be posted to a thread different than | 2158 // This picture is cleared. It can be posted to a thread different than |
2165 // the main GPU thread to reduce latency. This should be the case after | 2159 // the main GPU thread to reduce latency. This should be the case after |
2166 // all pictures are cleared at the beginning. | 2160 // all pictures are cleared at the beginning. |
2167 decode_task_runner_->PostTask( | 2161 decode_task_runner_->PostTask( |
2168 FROM_HERE, | 2162 FROM_HERE, |
2169 base::Bind(&Client::PictureReady, decode_client_, picture)); | 2163 base::Bind(&Client::PictureReady, decode_client_, picture)); |
2170 pending_picture_ready_.pop(); | 2164 pending_picture_ready_.pop(); |
2171 } else if (!cleared || resetting_or_flushing) { | 2165 } else if (!cleared || resetting_or_flushing) { |
2172 DVLOG(3) << "SendPictureReady()" | 2166 DVLOG(3) << "SendPictureReady()" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2215 DCHECK_EQ(output_record.state, kAtProcessor); | 2209 DCHECK_EQ(output_record.state, kAtProcessor); |
2216 if (!image_processor_bitstream_buffer_ids_.empty() && | 2210 if (!image_processor_bitstream_buffer_ids_.empty() && |
2217 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) { | 2211 image_processor_bitstream_buffer_ids_.front() == bitstream_buffer_id) { |
2218 DVLOG(3) << __func__ << ": picture_id=" << output_record.picture_id; | 2212 DVLOG(3) << __func__ << ": picture_id=" << output_record.picture_id; |
2219 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); | 2213 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR); |
2220 DCHECK_NE(output_record.picture_id, -1); | 2214 DCHECK_NE(output_record.picture_id, -1); |
2221 // Send the processed frame to render. | 2215 // Send the processed frame to render. |
2222 output_record.state = kAtClient; | 2216 output_record.state = kAtClient; |
2223 decoder_frames_at_client_++; | 2217 decoder_frames_at_client_++; |
2224 image_processor_bitstream_buffer_ids_.pop(); | 2218 image_processor_bitstream_buffer_ids_.pop(); |
2225 const media::Picture picture(output_record.picture_id, bitstream_buffer_id, | 2219 const Picture picture(output_record.picture_id, bitstream_buffer_id, |
2226 gfx::Rect(visible_size_), false); | 2220 gfx::Rect(visible_size_), false); |
2227 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); | 2221 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture)); |
2228 SendPictureReady(); | 2222 SendPictureReady(); |
2229 output_record.cleared = true; | 2223 output_record.cleared = true; |
2230 // Flush or resolution change may be waiting image processor to finish. | 2224 // Flush or resolution change may be waiting image processor to finish. |
2231 if (image_processor_bitstream_buffer_ids_.empty()) { | 2225 if (image_processor_bitstream_buffer_ids_.empty()) { |
2232 NotifyFlushDoneIfNeeded(); | 2226 NotifyFlushDoneIfNeeded(); |
2233 if (decoder_state_ == kChangingResolution) | 2227 if (decoder_state_ == kChangingResolution) |
2234 StartResolutionChange(); | 2228 StartResolutionChange(); |
2235 } | 2229 } |
2236 } else { | 2230 } else { |
2237 DVLOG(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found " | 2231 DVLOG(2) << "Bitstream buffer id " << bitstream_buffer_id << " not found " |
2238 << "because of Reset. Drop the buffer"; | 2232 << "because of Reset. Drop the buffer"; |
2239 output_record.state = kFree; | 2233 output_record.state = kFree; |
2240 free_output_buffers_.push(output_buffer_index); | 2234 free_output_buffers_.push(output_buffer_index); |
2241 Enqueue(); | 2235 Enqueue(); |
2242 } | 2236 } |
2243 } | 2237 } |
2244 | 2238 |
2245 void V4L2VideoDecodeAccelerator::ImageProcessorError() { | 2239 void V4L2VideoDecodeAccelerator::ImageProcessorError() { |
2246 LOG(ERROR) << "Image processor error"; | 2240 LOG(ERROR) << "Image processor error"; |
2247 NOTIFY_ERROR(PLATFORM_FAILURE); | 2241 NOTIFY_ERROR(PLATFORM_FAILURE); |
2248 } | 2242 } |
2249 | 2243 |
2250 } // namespace media | 2244 } // namespace media |
OLD | NEW |