OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // The bulk of this file is support code; sorry about that. Here's an overview | 5 // The bulk of this file is support code; sorry about that. Here's an overview |
6 // to hopefully help readers of this code: | 6 // to hopefully help readers of this code: |
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or | 7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or |
8 // Win/EGL. | 8 // Win/EGL. |
9 // - ClientState is an enum for the state of the decode client used by the test. | 9 // - ClientState is an enum for the state of the decode client used by the test. |
10 // - ClientStateNotification is a barrier abstraction that allows the test code | 10 // - ClientStateNotification is a barrier abstraction that allows the test code |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
78 #error The VideoAccelerator tests are not supported on this platform. | 78 #error The VideoAccelerator tests are not supported on this platform. |
79 #endif // OS_WIN | 79 #endif // OS_WIN |
80 | 80 |
81 #if defined(USE_OZONE) | 81 #if defined(USE_OZONE) |
82 #include "ui/ozone/public/native_pixmap.h" | 82 #include "ui/ozone/public/native_pixmap.h" |
83 #include "ui/ozone/public/ozone_gpu_test_helper.h" | 83 #include "ui/ozone/public/ozone_gpu_test_helper.h" |
84 #include "ui/ozone/public/ozone_platform.h" | 84 #include "ui/ozone/public/ozone_platform.h" |
85 #include "ui/ozone/public/surface_factory_ozone.h" | 85 #include "ui/ozone/public/surface_factory_ozone.h" |
86 #endif // defined(USE_OZONE) | 86 #endif // defined(USE_OZONE) |
87 | 87 |
88 using media::VideoDecodeAccelerator; | 88 namespace media { |
89 | 89 |
90 namespace media { | |
91 namespace { | 90 namespace { |
92 | 91 |
93 // Values optionally filled in from flags; see main() below. | 92 // Values optionally filled in from flags; see main() below. |
94 // The syntax of multiple test videos is: | 93 // The syntax of multiple test videos is: |
95 // test-video1;test-video2;test-video3 | 94 // test-video1;test-video2;test-video3 |
96 // where only the first video is required and other optional videos would be | 95 // where only the first video is required and other optional videos would be |
97 // decoded by concurrent decoders. | 96 // decoded by concurrent decoders. |
98 // The syntax of each test-video is: | 97 // The syntax of each test-video is: |
99 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender | 98 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender |
100 // where only the first field is required. Value details: | 99 // where only the first field is required. Value details: |
101 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream. | 100 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream. |
102 // - |width| and |height| are in pixels. | 101 // - |width| and |height| are in pixels. |
103 // - |numframes| is the number of picture frames in the file. | 102 // - |numframes| is the number of picture frames in the file. |
104 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream. | 103 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream. |
105 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds | 104 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds |
106 // expected to be achieved with and without rendering to the screen, resp. | 105 // expected to be achieved with and without rendering to the screen, resp. |
107 // (the latter tests just decode speed). | 106 // (the latter tests just decode speed). |
108 // - |profile| is the media::VideoCodecProfile set during Initialization. | 107 // - |profile| is the VideoCodecProfile set during Initialization. |
109 // An empty value for a numeric field means "ignore". | 108 // An empty value for a numeric field means "ignore". |
110 const base::FilePath::CharType* g_test_video_data = | 109 const base::FilePath::CharType* g_test_video_data = |
111 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11"); | 110 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11"); |
112 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1"); | 111 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1"); |
113 | 112 |
114 // The file path of the test output log. This is used to communicate the test | 113 // The file path of the test output log. This is used to communicate the test |
115 // results to CrOS autotests. We can enable the log and specify the filename by | 114 // results to CrOS autotests. We can enable the log and specify the filename by |
116 // the "--output_log" switch. | 115 // the "--output_log" switch. |
117 const base::FilePath::CharType* g_output_log = NULL; | 116 const base::FilePath::CharType* g_output_log = NULL; |
118 | 117 |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
159 | 158 |
160 struct TestVideoFile { | 159 struct TestVideoFile { |
161 explicit TestVideoFile(base::FilePath::StringType file_name) | 160 explicit TestVideoFile(base::FilePath::StringType file_name) |
162 : file_name(file_name), | 161 : file_name(file_name), |
163 width(-1), | 162 width(-1), |
164 height(-1), | 163 height(-1), |
165 num_frames(-1), | 164 num_frames(-1), |
166 num_fragments(-1), | 165 num_fragments(-1), |
167 min_fps_render(-1), | 166 min_fps_render(-1), |
168 min_fps_no_render(-1), | 167 min_fps_no_render(-1), |
169 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN), | 168 profile(VIDEO_CODEC_PROFILE_UNKNOWN), |
170 reset_after_frame_num(END_OF_STREAM_RESET) {} | 169 reset_after_frame_num(END_OF_STREAM_RESET) {} |
171 | 170 |
172 base::FilePath::StringType file_name; | 171 base::FilePath::StringType file_name; |
173 int width; | 172 int width; |
174 int height; | 173 int height; |
175 int num_frames; | 174 int num_frames; |
176 int num_fragments; | 175 int num_fragments; |
177 int min_fps_render; | 176 int min_fps_render; |
178 int min_fps_no_render; | 177 int min_fps_no_render; |
179 media::VideoCodecProfile profile; | 178 VideoCodecProfile profile; |
180 int reset_after_frame_num; | 179 int reset_after_frame_num; |
181 std::string data_str; | 180 std::string data_str; |
182 }; | 181 }; |
183 | 182 |
184 const gfx::Size kThumbnailsPageSize(1600, 1200); | 183 const gfx::Size kThumbnailsPageSize(1600, 1200); |
185 const gfx::Size kThumbnailSize(160, 120); | 184 const gfx::Size kThumbnailSize(160, 120); |
186 const int kMD5StringLength = 32; | 185 const int kMD5StringLength = 32; |
187 | 186 |
188 // Read in golden MD5s for the thumbnailed rendering of this video | 187 // Read in golden MD5s for the thumbnailed rendering of this video |
189 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file, | 188 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file, |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
281 // either a buffer allocated by the VDA, or by a preallocated pixmap. | 280 // either a buffer allocated by the VDA, or by a preallocated pixmap. |
282 class TextureRef : public base::RefCounted<TextureRef> { | 281 class TextureRef : public base::RefCounted<TextureRef> { |
283 public: | 282 public: |
284 static scoped_refptr<TextureRef> Create( | 283 static scoped_refptr<TextureRef> Create( |
285 uint32_t texture_id, | 284 uint32_t texture_id, |
286 const base::Closure& no_longer_needed_cb); | 285 const base::Closure& no_longer_needed_cb); |
287 | 286 |
288 static scoped_refptr<TextureRef> CreatePreallocated( | 287 static scoped_refptr<TextureRef> CreatePreallocated( |
289 uint32_t texture_id, | 288 uint32_t texture_id, |
290 const base::Closure& no_longer_needed_cb, | 289 const base::Closure& no_longer_needed_cb, |
291 media::VideoPixelFormat pixel_format, | 290 VideoPixelFormat pixel_format, |
292 const gfx::Size& size); | 291 const gfx::Size& size); |
293 | 292 |
294 gfx::GpuMemoryBufferHandle ExportGpuMemoryBufferHandle() const; | 293 gfx::GpuMemoryBufferHandle ExportGpuMemoryBufferHandle() const; |
295 | 294 |
296 int32_t texture_id() const { return texture_id_; } | 295 int32_t texture_id() const { return texture_id_; } |
297 | 296 |
298 private: | 297 private: |
299 friend class base::RefCounted<TextureRef>; | 298 friend class base::RefCounted<TextureRef>; |
300 | 299 |
301 TextureRef(uint32_t texture_id, const base::Closure& no_longer_needed_cb) | 300 TextureRef(uint32_t texture_id, const base::Closure& no_longer_needed_cb) |
(...skipping 14 matching lines...) Expand all Loading... |
316 | 315 |
317 // static | 316 // static |
318 scoped_refptr<TextureRef> TextureRef::Create( | 317 scoped_refptr<TextureRef> TextureRef::Create( |
319 uint32_t texture_id, | 318 uint32_t texture_id, |
320 const base::Closure& no_longer_needed_cb) { | 319 const base::Closure& no_longer_needed_cb) { |
321 return make_scoped_refptr(new TextureRef(texture_id, no_longer_needed_cb)); | 320 return make_scoped_refptr(new TextureRef(texture_id, no_longer_needed_cb)); |
322 } | 321 } |
323 | 322 |
324 #if defined(USE_OZONE) | 323 #if defined(USE_OZONE) |
325 gfx::BufferFormat VideoPixelFormatToGfxBufferFormat( | 324 gfx::BufferFormat VideoPixelFormatToGfxBufferFormat( |
326 media::VideoPixelFormat pixel_format) { | 325 VideoPixelFormat pixel_format) { |
327 switch (pixel_format) { | 326 switch (pixel_format) { |
328 case media::VideoPixelFormat::PIXEL_FORMAT_ARGB: | 327 case VideoPixelFormat::PIXEL_FORMAT_ARGB: |
329 return gfx::BufferFormat::BGRA_8888; | 328 return gfx::BufferFormat::BGRA_8888; |
330 case media::VideoPixelFormat::PIXEL_FORMAT_XRGB: | 329 case VideoPixelFormat::PIXEL_FORMAT_XRGB: |
331 return gfx::BufferFormat::BGRX_8888; | 330 return gfx::BufferFormat::BGRX_8888; |
332 case media::VideoPixelFormat::PIXEL_FORMAT_NV12: | 331 case VideoPixelFormat::PIXEL_FORMAT_NV12: |
333 return gfx::BufferFormat::YUV_420_BIPLANAR; | 332 return gfx::BufferFormat::YUV_420_BIPLANAR; |
334 default: | 333 default: |
335 LOG_ASSERT(false) << "Unknown VideoPixelFormat"; | 334 LOG_ASSERT(false) << "Unknown VideoPixelFormat"; |
336 return gfx::BufferFormat::BGRX_8888; | 335 return gfx::BufferFormat::BGRX_8888; |
337 } | 336 } |
338 } | 337 } |
339 #endif | 338 #endif |
340 | 339 |
341 // static | 340 // static |
342 scoped_refptr<TextureRef> TextureRef::CreatePreallocated( | 341 scoped_refptr<TextureRef> TextureRef::CreatePreallocated( |
343 uint32_t texture_id, | 342 uint32_t texture_id, |
344 const base::Closure& no_longer_needed_cb, | 343 const base::Closure& no_longer_needed_cb, |
345 media::VideoPixelFormat pixel_format, | 344 VideoPixelFormat pixel_format, |
346 const gfx::Size& size) { | 345 const gfx::Size& size) { |
347 scoped_refptr<TextureRef> texture_ref; | 346 scoped_refptr<TextureRef> texture_ref; |
348 #if defined(USE_OZONE) | 347 #if defined(USE_OZONE) |
349 texture_ref = TextureRef::Create(texture_id, no_longer_needed_cb); | 348 texture_ref = TextureRef::Create(texture_id, no_longer_needed_cb); |
350 LOG_ASSERT(texture_ref); | 349 LOG_ASSERT(texture_ref); |
351 | 350 |
352 ui::OzonePlatform* platform = ui::OzonePlatform::GetInstance(); | 351 ui::OzonePlatform* platform = ui::OzonePlatform::GetInstance(); |
353 ui::SurfaceFactoryOzone* factory = platform->GetSurfaceFactoryOzone(); | 352 ui::SurfaceFactoryOzone* factory = platform->GetSurfaceFactoryOzone(); |
354 gfx::BufferFormat buffer_format = | 353 gfx::BufferFormat buffer_format = |
355 VideoPixelFormatToGfxBufferFormat(pixel_format); | 354 VideoPixelFormatToGfxBufferFormat(pixel_format); |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
404 GLRenderingVDAClient(size_t window_id, | 403 GLRenderingVDAClient(size_t window_id, |
405 RenderingHelper* rendering_helper, | 404 RenderingHelper* rendering_helper, |
406 ClientStateNotification<ClientState>* note, | 405 ClientStateNotification<ClientState>* note, |
407 const std::string& encoded_data, | 406 const std::string& encoded_data, |
408 int num_in_flight_decodes, | 407 int num_in_flight_decodes, |
409 int num_play_throughs, | 408 int num_play_throughs, |
410 int reset_after_frame_num, | 409 int reset_after_frame_num, |
411 int delete_decoder_state, | 410 int delete_decoder_state, |
412 int frame_width, | 411 int frame_width, |
413 int frame_height, | 412 int frame_height, |
414 media::VideoCodecProfile profile, | 413 VideoCodecProfile profile, |
415 int fake_decoder, | 414 int fake_decoder, |
416 bool suppress_rendering, | 415 bool suppress_rendering, |
417 int delay_reuse_after_frame_num, | 416 int delay_reuse_after_frame_num, |
418 int decode_calls_per_second, | 417 int decode_calls_per_second, |
419 bool render_as_thumbnails); | 418 bool render_as_thumbnails); |
420 ~GLRenderingVDAClient() override; | 419 ~GLRenderingVDAClient() override; |
421 void CreateAndStartDecoder(); | 420 void CreateAndStartDecoder(); |
422 | 421 |
423 // VideoDecodeAccelerator::Client implementation. | 422 // VideoDecodeAccelerator::Client implementation. |
424 // The heart of the Client. | 423 // The heart of the Client. |
425 void ProvidePictureBuffers(uint32_t requested_num_of_buffers, | 424 void ProvidePictureBuffers(uint32_t requested_num_of_buffers, |
426 VideoPixelFormat format, | 425 VideoPixelFormat format, |
427 uint32_t textures_per_buffer, | 426 uint32_t textures_per_buffer, |
428 const gfx::Size& dimensions, | 427 const gfx::Size& dimensions, |
429 uint32_t texture_target) override; | 428 uint32_t texture_target) override; |
430 void DismissPictureBuffer(int32_t picture_buffer_id) override; | 429 void DismissPictureBuffer(int32_t picture_buffer_id) override; |
431 void PictureReady(const media::Picture& picture) override; | 430 void PictureReady(const Picture& picture) override; |
432 // Simple state changes. | 431 // Simple state changes. |
433 void NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) override; | 432 void NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) override; |
434 void NotifyFlushDone() override; | 433 void NotifyFlushDone() override; |
435 void NotifyResetDone() override; | 434 void NotifyResetDone() override; |
436 void NotifyError(VideoDecodeAccelerator::Error error) override; | 435 void NotifyError(VideoDecodeAccelerator::Error error) override; |
437 | 436 |
438 void OutputFrameDeliveryTimes(base::File* output); | 437 void OutputFrameDeliveryTimes(base::File* output); |
439 | 438 |
440 // Simple getters for inspecting the state of the Client. | 439 // Simple getters for inspecting the state of the Client. |
441 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; } | 440 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; } |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
488 std::unique_ptr<GpuVideoDecodeAcceleratorFactoryImpl> vda_factory_; | 487 std::unique_ptr<GpuVideoDecodeAcceleratorFactoryImpl> vda_factory_; |
489 int remaining_play_throughs_; | 488 int remaining_play_throughs_; |
490 int reset_after_frame_num_; | 489 int reset_after_frame_num_; |
491 int delete_decoder_state_; | 490 int delete_decoder_state_; |
492 ClientState state_; | 491 ClientState state_; |
493 int num_skipped_fragments_; | 492 int num_skipped_fragments_; |
494 int num_queued_fragments_; | 493 int num_queued_fragments_; |
495 int num_decoded_frames_; | 494 int num_decoded_frames_; |
496 int num_done_bitstream_buffers_; | 495 int num_done_bitstream_buffers_; |
497 base::TimeTicks initialize_done_ticks_; | 496 base::TimeTicks initialize_done_ticks_; |
498 media::VideoCodecProfile profile_; | 497 VideoCodecProfile profile_; |
499 int fake_decoder_; | 498 int fake_decoder_; |
500 GLenum texture_target_; | 499 GLenum texture_target_; |
501 VideoPixelFormat pixel_format_; | 500 VideoPixelFormat pixel_format_; |
502 bool suppress_rendering_; | 501 bool suppress_rendering_; |
503 std::vector<base::TimeTicks> frame_delivery_times_; | 502 std::vector<base::TimeTicks> frame_delivery_times_; |
504 int delay_reuse_after_frame_num_; | 503 int delay_reuse_after_frame_num_; |
505 // A map from bitstream buffer id to the decode start time of the buffer. | 504 // A map from bitstream buffer id to the decode start time of the buffer. |
506 std::map<int, base::TimeTicks> decode_start_time_; | 505 std::map<int, base::TimeTicks> decode_start_time_; |
507 // The decode time of all decoded frames. | 506 // The decode time of all decoded frames. |
508 std::vector<base::TimeDelta> decode_time_; | 507 std::vector<base::TimeDelta> decode_time_; |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
545 size_t window_id, | 544 size_t window_id, |
546 RenderingHelper* rendering_helper, | 545 RenderingHelper* rendering_helper, |
547 ClientStateNotification<ClientState>* note, | 546 ClientStateNotification<ClientState>* note, |
548 const std::string& encoded_data, | 547 const std::string& encoded_data, |
549 int num_in_flight_decodes, | 548 int num_in_flight_decodes, |
550 int num_play_throughs, | 549 int num_play_throughs, |
551 int reset_after_frame_num, | 550 int reset_after_frame_num, |
552 int delete_decoder_state, | 551 int delete_decoder_state, |
553 int frame_width, | 552 int frame_width, |
554 int frame_height, | 553 int frame_height, |
555 media::VideoCodecProfile profile, | 554 VideoCodecProfile profile, |
556 int fake_decoder, | 555 int fake_decoder, |
557 bool suppress_rendering, | 556 bool suppress_rendering, |
558 int delay_reuse_after_frame_num, | 557 int delay_reuse_after_frame_num, |
559 int decode_calls_per_second, | 558 int decode_calls_per_second, |
560 bool render_as_thumbnails) | 559 bool render_as_thumbnails) |
561 : window_id_(window_id), | 560 : window_id_(window_id), |
562 rendering_helper_(rendering_helper), | 561 rendering_helper_(rendering_helper), |
563 frame_size_(frame_width, frame_height), | 562 frame_size_(frame_width, frame_height), |
564 encoded_data_(encoded_data), | 563 encoded_data_(encoded_data), |
565 num_in_flight_decodes_(num_in_flight_decodes), | 564 num_in_flight_decodes_(num_in_flight_decodes), |
(...skipping 18 matching lines...) Expand all Loading... |
584 render_as_thumbnails_(render_as_thumbnails), | 583 render_as_thumbnails_(render_as_thumbnails), |
585 next_picture_buffer_id_(1), | 584 next_picture_buffer_id_(1), |
586 weak_this_factory_(this) { | 585 weak_this_factory_(this) { |
587 LOG_ASSERT(num_in_flight_decodes > 0); | 586 LOG_ASSERT(num_in_flight_decodes > 0); |
588 LOG_ASSERT(num_play_throughs > 0); | 587 LOG_ASSERT(num_play_throughs > 0); |
589 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0. | 588 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0. |
590 if (decode_calls_per_second_ > 0) | 589 if (decode_calls_per_second_ > 0) |
591 LOG_ASSERT(1 == num_in_flight_decodes_); | 590 LOG_ASSERT(1 == num_in_flight_decodes_); |
592 | 591 |
593 // Default to H264 baseline if no profile provided. | 592 // Default to H264 baseline if no profile provided. |
594 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN | 593 profile_ = |
595 ? profile | 594 (profile != VIDEO_CODEC_PROFILE_UNKNOWN ? profile : H264PROFILE_BASELINE); |
596 : media::H264PROFILE_BASELINE); | |
597 | 595 |
598 weak_this_ = weak_this_factory_.GetWeakPtr(); | 596 weak_this_ = weak_this_factory_.GetWeakPtr(); |
599 } | 597 } |
600 | 598 |
601 GLRenderingVDAClient::~GLRenderingVDAClient() { | 599 GLRenderingVDAClient::~GLRenderingVDAClient() { |
602 DeleteDecoder(); // Clean up in case of expected error. | 600 DeleteDecoder(); // Clean up in case of expected error. |
603 LOG_ASSERT(decoder_deleted()); | 601 LOG_ASSERT(decoder_deleted()); |
604 SetState(CS_DESTROYED); | 602 SetState(CS_DESTROYED); |
605 } | 603 } |
606 | 604 |
607 void GLRenderingVDAClient::CreateAndStartDecoder() { | 605 void GLRenderingVDAClient::CreateAndStartDecoder() { |
608 LOG_ASSERT(decoder_deleted()); | 606 LOG_ASSERT(decoder_deleted()); |
609 LOG_ASSERT(!decoder_.get()); | 607 LOG_ASSERT(!decoder_.get()); |
610 | 608 |
611 if (fake_decoder_) { | 609 if (fake_decoder_) { |
612 decoder_.reset(new FakeVideoDecodeAccelerator( | 610 decoder_.reset(new FakeVideoDecodeAccelerator( |
613 frame_size_, base::Bind(&DoNothingReturnTrue))); | 611 frame_size_, base::Bind(&DoNothingReturnTrue))); |
614 LOG_ASSERT(decoder_->Initialize(profile_, this)); | 612 LOG_ASSERT(decoder_->Initialize(profile_, this)); |
615 } else { | 613 } else { |
616 if (!vda_factory_) { | 614 if (!vda_factory_) { |
617 vda_factory_ = GpuVideoDecodeAcceleratorFactoryImpl::Create( | 615 vda_factory_ = GpuVideoDecodeAcceleratorFactoryImpl::Create( |
618 base::Bind(&RenderingHelper::GetGLContext, | 616 base::Bind(&RenderingHelper::GetGLContext, |
619 base::Unretained(rendering_helper_)), | 617 base::Unretained(rendering_helper_)), |
620 base::Bind(&DoNothingReturnTrue), base::Bind(&DummyBindImage)); | 618 base::Bind(&DoNothingReturnTrue), base::Bind(&DummyBindImage)); |
621 LOG_ASSERT(vda_factory_); | 619 LOG_ASSERT(vda_factory_); |
622 } | 620 } |
623 | 621 |
624 VideoDecodeAccelerator::Config config(profile_); | 622 VideoDecodeAccelerator::Config config(profile_); |
625 if (g_test_import) { | 623 if (g_test_import) { |
626 config.output_mode = | 624 config.output_mode = VideoDecodeAccelerator::Config::OutputMode::IMPORT; |
627 media::VideoDecodeAccelerator::Config::OutputMode::IMPORT; | |
628 } | 625 } |
629 gpu::GpuPreferences gpu_preferences; | 626 gpu::GpuPreferences gpu_preferences; |
630 decoder_ = vda_factory_->CreateVDA(this, config, gpu_preferences); | 627 decoder_ = vda_factory_->CreateVDA(this, config, gpu_preferences); |
631 } | 628 } |
632 | 629 |
633 LOG_ASSERT(decoder_) << "Failed creating a VDA"; | 630 LOG_ASSERT(decoder_) << "Failed creating a VDA"; |
634 | 631 |
635 decoder_->TryToSetupDecodeOnSeparateThread( | 632 decoder_->TryToSetupDecodeOnSeparateThread( |
636 weak_this_, base::ThreadTaskRunnerHandle::Get()); | 633 weak_this_, base::ThreadTaskRunnerHandle::Get()); |
637 | 634 |
638 weak_vda_ptr_factory_.reset( | 635 weak_vda_ptr_factory_.reset( |
639 new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get())); | 636 new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get())); |
640 weak_vda_ = weak_vda_ptr_factory_->GetWeakPtr(); | 637 weak_vda_ = weak_vda_ptr_factory_->GetWeakPtr(); |
641 | 638 |
642 SetState(CS_DECODER_SET); | 639 SetState(CS_DECODER_SET); |
643 FinishInitialization(); | 640 FinishInitialization(); |
644 } | 641 } |
645 | 642 |
646 void GLRenderingVDAClient::ProvidePictureBuffers( | 643 void GLRenderingVDAClient::ProvidePictureBuffers( |
647 uint32_t requested_num_of_buffers, | 644 uint32_t requested_num_of_buffers, |
648 VideoPixelFormat pixel_format, | 645 VideoPixelFormat pixel_format, |
649 uint32_t textures_per_buffer, | 646 uint32_t textures_per_buffer, |
650 const gfx::Size& dimensions, | 647 const gfx::Size& dimensions, |
651 uint32_t texture_target) { | 648 uint32_t texture_target) { |
652 if (decoder_deleted()) | 649 if (decoder_deleted()) |
653 return; | 650 return; |
654 LOG_ASSERT(textures_per_buffer == 1u); | 651 LOG_ASSERT(textures_per_buffer == 1u); |
655 std::vector<media::PictureBuffer> buffers; | 652 std::vector<PictureBuffer> buffers; |
656 | 653 |
657 requested_num_of_buffers += kExtraPictureBuffers; | 654 requested_num_of_buffers += kExtraPictureBuffers; |
658 if (pixel_format == media::PIXEL_FORMAT_UNKNOWN) | 655 if (pixel_format == PIXEL_FORMAT_UNKNOWN) |
659 pixel_format = media::PIXEL_FORMAT_ARGB; | 656 pixel_format = PIXEL_FORMAT_ARGB; |
660 | 657 |
661 LOG_ASSERT((pixel_format_ == PIXEL_FORMAT_UNKNOWN) || | 658 LOG_ASSERT((pixel_format_ == PIXEL_FORMAT_UNKNOWN) || |
662 (pixel_format_ == pixel_format)); | 659 (pixel_format_ == pixel_format)); |
663 pixel_format_ = pixel_format; | 660 pixel_format_ = pixel_format; |
664 | 661 |
665 texture_target_ = texture_target; | 662 texture_target_ = texture_target; |
666 for (uint32_t i = 0; i < requested_num_of_buffers; ++i) { | 663 for (uint32_t i = 0; i < requested_num_of_buffers; ++i) { |
667 uint32_t texture_id; | 664 uint32_t texture_id; |
668 base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC, | 665 base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC, |
669 base::WaitableEvent::InitialState::NOT_SIGNALED); | 666 base::WaitableEvent::InitialState::NOT_SIGNALED); |
(...skipping 13 matching lines...) Expand all Loading... |
683 texture_ref = TextureRef::Create(texture_id, delete_texture_cb); | 680 texture_ref = TextureRef::Create(texture_id, delete_texture_cb); |
684 } | 681 } |
685 | 682 |
686 LOG_ASSERT(texture_ref); | 683 LOG_ASSERT(texture_ref); |
687 | 684 |
688 int32_t picture_buffer_id = next_picture_buffer_id_++; | 685 int32_t picture_buffer_id = next_picture_buffer_id_++; |
689 LOG_ASSERT( | 686 LOG_ASSERT( |
690 active_textures_.insert(std::make_pair(picture_buffer_id, texture_ref)) | 687 active_textures_.insert(std::make_pair(picture_buffer_id, texture_ref)) |
691 .second); | 688 .second); |
692 | 689 |
693 media::PictureBuffer::TextureIds ids; | 690 PictureBuffer::TextureIds ids; |
694 ids.push_back(texture_id); | 691 ids.push_back(texture_id); |
695 buffers.push_back(media::PictureBuffer(picture_buffer_id, dimensions, ids)); | 692 buffers.push_back(PictureBuffer(picture_buffer_id, dimensions, ids)); |
696 } | 693 } |
697 decoder_->AssignPictureBuffers(buffers); | 694 decoder_->AssignPictureBuffers(buffers); |
698 | 695 |
699 if (g_test_import) { | 696 if (g_test_import) { |
700 for (const auto& buffer : buffers) { | 697 for (const auto& buffer : buffers) { |
701 TextureRefMap::iterator texture_it = active_textures_.find(buffer.id()); | 698 TextureRefMap::iterator texture_it = active_textures_.find(buffer.id()); |
702 ASSERT_NE(active_textures_.end(), texture_it); | 699 ASSERT_NE(active_textures_.end(), texture_it); |
703 | 700 |
704 const gfx::GpuMemoryBufferHandle& handle = | 701 const gfx::GpuMemoryBufferHandle& handle = |
705 texture_it->second->ExportGpuMemoryBufferHandle(); | 702 texture_it->second->ExportGpuMemoryBufferHandle(); |
706 LOG_ASSERT(!handle.is_null()) << "Failed producing GMB handle"; | 703 LOG_ASSERT(!handle.is_null()) << "Failed producing GMB handle"; |
707 decoder_->ImportBufferForPicture(buffer.id(), handle); | 704 decoder_->ImportBufferForPicture(buffer.id(), handle); |
708 } | 705 } |
709 } | 706 } |
710 } | 707 } |
711 | 708 |
712 void GLRenderingVDAClient::DismissPictureBuffer(int32_t picture_buffer_id) { | 709 void GLRenderingVDAClient::DismissPictureBuffer(int32_t picture_buffer_id) { |
713 LOG_ASSERT(1U == active_textures_.erase(picture_buffer_id)); | 710 LOG_ASSERT(1U == active_textures_.erase(picture_buffer_id)); |
714 } | 711 } |
715 | 712 |
716 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) { | 713 void GLRenderingVDAClient::PictureReady(const Picture& picture) { |
717 // We shouldn't be getting pictures delivered after Reset has completed. | 714 // We shouldn't be getting pictures delivered after Reset has completed. |
718 LOG_ASSERT(state_ < CS_RESET); | 715 LOG_ASSERT(state_ < CS_RESET); |
719 | 716 |
720 if (decoder_deleted()) | 717 if (decoder_deleted()) |
721 return; | 718 return; |
722 | 719 |
723 base::TimeTicks now = base::TimeTicks::Now(); | 720 base::TimeTicks now = base::TimeTicks::Now(); |
724 | 721 |
725 frame_delivery_times_.push_back(now); | 722 frame_delivery_times_.push_back(now); |
726 | 723 |
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
903 STLClearObject(&encoded_data_); | 900 STLClearObject(&encoded_data_); |
904 active_textures_.clear(); | 901 active_textures_.clear(); |
905 | 902 |
906 // Cascade through the rest of the states to simplify test code below. | 903 // Cascade through the rest of the states to simplify test code below. |
907 for (int i = state_ + 1; i < CS_MAX; ++i) | 904 for (int i = state_ + 1; i < CS_MAX; ++i) |
908 SetState(static_cast<ClientState>(i)); | 905 SetState(static_cast<ClientState>(i)); |
909 } | 906 } |
910 | 907 |
911 std::string GLRenderingVDAClient::GetBytesForFirstFragment(size_t start_pos, | 908 std::string GLRenderingVDAClient::GetBytesForFirstFragment(size_t start_pos, |
912 size_t* end_pos) { | 909 size_t* end_pos) { |
913 if (profile_ < media::H264PROFILE_MAX) { | 910 if (profile_ < H264PROFILE_MAX) { |
914 *end_pos = start_pos; | 911 *end_pos = start_pos; |
915 while (*end_pos + 4 < encoded_data_.size()) { | 912 while (*end_pos + 4 < encoded_data_.size()) { |
916 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame | 913 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame |
917 return GetBytesForNextFragment(*end_pos, end_pos); | 914 return GetBytesForNextFragment(*end_pos, end_pos); |
918 GetBytesForNextNALU(*end_pos, end_pos); | 915 GetBytesForNextNALU(*end_pos, end_pos); |
919 num_skipped_fragments_++; | 916 num_skipped_fragments_++; |
920 } | 917 } |
921 *end_pos = start_pos; | 918 *end_pos = start_pos; |
922 return std::string(); | 919 return std::string(); |
923 } | 920 } |
924 DCHECK_LE(profile_, media::VP9PROFILE_MAX); | 921 DCHECK_LE(profile_, VP9PROFILE_MAX); |
925 return GetBytesForNextFragment(start_pos, end_pos); | 922 return GetBytesForNextFragment(start_pos, end_pos); |
926 } | 923 } |
927 | 924 |
928 std::string GLRenderingVDAClient::GetBytesForNextFragment(size_t start_pos, | 925 std::string GLRenderingVDAClient::GetBytesForNextFragment(size_t start_pos, |
929 size_t* end_pos) { | 926 size_t* end_pos) { |
930 if (profile_ < media::H264PROFILE_MAX) { | 927 if (profile_ < H264PROFILE_MAX) { |
931 *end_pos = start_pos; | 928 *end_pos = start_pos; |
932 GetBytesForNextNALU(*end_pos, end_pos); | 929 GetBytesForNextNALU(*end_pos, end_pos); |
933 if (start_pos != *end_pos) { | 930 if (start_pos != *end_pos) { |
934 num_queued_fragments_++; | 931 num_queued_fragments_++; |
935 } | 932 } |
936 return encoded_data_.substr(start_pos, *end_pos - start_pos); | 933 return encoded_data_.substr(start_pos, *end_pos - start_pos); |
937 } | 934 } |
938 DCHECK_LE(profile_, media::VP9PROFILE_MAX); | 935 DCHECK_LE(profile_, VP9PROFILE_MAX); |
939 return GetBytesForNextFrame(start_pos, end_pos); | 936 return GetBytesForNextFrame(start_pos, end_pos); |
940 } | 937 } |
941 | 938 |
942 void GLRenderingVDAClient::GetBytesForNextNALU(size_t start_pos, | 939 void GLRenderingVDAClient::GetBytesForNextNALU(size_t start_pos, |
943 size_t* end_pos) { | 940 size_t* end_pos) { |
944 *end_pos = start_pos; | 941 *end_pos = start_pos; |
945 if (*end_pos + 4 > encoded_data_.size()) | 942 if (*end_pos + 4 > encoded_data_.size()) |
946 return; | 943 return; |
947 LOG_ASSERT(LookingAtNAL(encoded_data_, start_pos)); | 944 LOG_ASSERT(LookingAtNAL(encoded_data_, start_pos)); |
948 *end_pos += 4; | 945 *end_pos += 4; |
(...skipping 15 matching lines...) Expand all Loading... |
964 uint32_t frame_size = *reinterpret_cast<uint32_t*>(&encoded_data_[*end_pos]); | 961 uint32_t frame_size = *reinterpret_cast<uint32_t*>(&encoded_data_[*end_pos]); |
965 *end_pos += 12; // Skip frame header. | 962 *end_pos += 12; // Skip frame header. |
966 bytes.append(encoded_data_.substr(*end_pos, frame_size)); | 963 bytes.append(encoded_data_.substr(*end_pos, frame_size)); |
967 *end_pos += frame_size; | 964 *end_pos += frame_size; |
968 num_queued_fragments_++; | 965 num_queued_fragments_++; |
969 return bytes; | 966 return bytes; |
970 } | 967 } |
971 | 968 |
972 static bool FragmentHasConfigInfo(const uint8_t* data, | 969 static bool FragmentHasConfigInfo(const uint8_t* data, |
973 size_t size, | 970 size_t size, |
974 media::VideoCodecProfile profile) { | 971 VideoCodecProfile profile) { |
975 if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) { | 972 if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) { |
976 media::H264Parser parser; | 973 H264Parser parser; |
977 parser.SetStream(data, size); | 974 parser.SetStream(data, size); |
978 media::H264NALU nalu; | 975 H264NALU nalu; |
979 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu); | 976 H264Parser::Result result = parser.AdvanceToNextNALU(&nalu); |
980 if (result != media::H264Parser::kOk) { | 977 if (result != H264Parser::kOk) { |
981 // Let the VDA figure out there's something wrong with the stream. | 978 // Let the VDA figure out there's something wrong with the stream. |
982 return false; | 979 return false; |
983 } | 980 } |
984 | 981 |
985 return nalu.nal_unit_type == media::H264NALU::kSPS; | 982 return nalu.nal_unit_type == H264NALU::kSPS; |
986 } else if (profile >= media::VP8PROFILE_MIN && | 983 } else if (profile >= VP8PROFILE_MIN && profile <= VP9PROFILE_MAX) { |
987 profile <= media::VP9PROFILE_MAX) { | |
988 return (size > 0 && !(data[0] & 0x01)); | 984 return (size > 0 && !(data[0] & 0x01)); |
989 } | 985 } |
990 // Shouldn't happen at this point. | 986 // Shouldn't happen at this point. |
991 LOG(FATAL) << "Invalid profile: " << profile; | 987 LOG(FATAL) << "Invalid profile: " << profile; |
992 return false; | 988 return false; |
993 } | 989 } |
994 | 990 |
995 void GLRenderingVDAClient::DecodeNextFragment() { | 991 void GLRenderingVDAClient::DecodeNextFragment() { |
996 if (decoder_deleted()) | 992 if (decoder_deleted()) |
997 return; | 993 return; |
(...skipping 23 matching lines...) Expand all Loading... |
1021 | 1017 |
1022 // Populate the shared memory buffer w/ the fragment, duplicate its handle, | 1018 // Populate the shared memory buffer w/ the fragment, duplicate its handle, |
1023 // and hand it off to the decoder. | 1019 // and hand it off to the decoder. |
1024 base::SharedMemory shm; | 1020 base::SharedMemory shm; |
1025 LOG_ASSERT(shm.CreateAndMapAnonymous(next_fragment_size)); | 1021 LOG_ASSERT(shm.CreateAndMapAnonymous(next_fragment_size)); |
1026 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size); | 1022 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size); |
1027 base::SharedMemoryHandle dup_handle; | 1023 base::SharedMemoryHandle dup_handle; |
1028 bool result = | 1024 bool result = |
1029 shm.ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle); | 1025 shm.ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle); |
1030 LOG_ASSERT(result); | 1026 LOG_ASSERT(result); |
1031 media::BitstreamBuffer bitstream_buffer(next_bitstream_buffer_id_, dup_handle, | 1027 BitstreamBuffer bitstream_buffer(next_bitstream_buffer_id_, dup_handle, |
1032 next_fragment_size); | 1028 next_fragment_size); |
1033 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now(); | 1029 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now(); |
1034 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | 1030 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. |
1035 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF; | 1031 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF; |
1036 decoder_->Decode(bitstream_buffer); | 1032 decoder_->Decode(bitstream_buffer); |
1037 ++outstanding_decodes_; | 1033 ++outstanding_decodes_; |
1038 if (!remaining_play_throughs_ && | 1034 if (!remaining_play_throughs_ && |
1039 -delete_decoder_state_ == next_bitstream_buffer_id_) { | 1035 -delete_decoder_state_ == next_bitstream_buffer_id_) { |
1040 DeleteDecoder(); | 1036 DeleteDecoder(); |
1041 } | 1037 } |
1042 | 1038 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1153 LOG_ASSERT(base::StringToInt(fields[3], &video_file->num_frames)); | 1149 LOG_ASSERT(base::StringToInt(fields[3], &video_file->num_frames)); |
1154 if (!fields[4].empty()) | 1150 if (!fields[4].empty()) |
1155 LOG_ASSERT(base::StringToInt(fields[4], &video_file->num_fragments)); | 1151 LOG_ASSERT(base::StringToInt(fields[4], &video_file->num_fragments)); |
1156 if (!fields[5].empty()) | 1152 if (!fields[5].empty()) |
1157 LOG_ASSERT(base::StringToInt(fields[5], &video_file->min_fps_render)); | 1153 LOG_ASSERT(base::StringToInt(fields[5], &video_file->min_fps_render)); |
1158 if (!fields[6].empty()) | 1154 if (!fields[6].empty()) |
1159 LOG_ASSERT(base::StringToInt(fields[6], &video_file->min_fps_no_render)); | 1155 LOG_ASSERT(base::StringToInt(fields[6], &video_file->min_fps_no_render)); |
1160 int profile = -1; | 1156 int profile = -1; |
1161 if (!fields[7].empty()) | 1157 if (!fields[7].empty()) |
1162 LOG_ASSERT(base::StringToInt(fields[7], &profile)); | 1158 LOG_ASSERT(base::StringToInt(fields[7], &profile)); |
1163 video_file->profile = static_cast<media::VideoCodecProfile>(profile); | 1159 video_file->profile = static_cast<VideoCodecProfile>(profile); |
1164 | 1160 |
1165 // Read in the video data. | 1161 // Read in the video data. |
1166 base::FilePath filepath(video_file->file_name); | 1162 base::FilePath filepath(video_file->file_name); |
1167 LOG_ASSERT(base::ReadFileToString(filepath, &video_file->data_str)) | 1163 LOG_ASSERT(base::ReadFileToString(filepath, &video_file->data_str)) |
1168 << "test_video_file: " << filepath.MaybeAsASCII(); | 1164 << "test_video_file: " << filepath.MaybeAsASCII(); |
1169 | 1165 |
1170 test_video_files->push_back(video_file); | 1166 test_video_files->push_back(video_file); |
1171 } | 1167 } |
1172 } | 1168 } |
1173 | 1169 |
(...skipping 609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1783 media::VaapiWrapper::PreSandboxInitialization(); | 1779 media::VaapiWrapper::PreSandboxInitialization(); |
1784 #endif | 1780 #endif |
1785 | 1781 |
1786 media::g_env = | 1782 media::g_env = |
1787 reinterpret_cast<media::VideoDecodeAcceleratorTestEnvironment*>( | 1783 reinterpret_cast<media::VideoDecodeAcceleratorTestEnvironment*>( |
1788 testing::AddGlobalTestEnvironment( | 1784 testing::AddGlobalTestEnvironment( |
1789 new media::VideoDecodeAcceleratorTestEnvironment())); | 1785 new media::VideoDecodeAcceleratorTestEnvironment())); |
1790 | 1786 |
1791 return RUN_ALL_TESTS(); | 1787 return RUN_ALL_TESTS(); |
1792 } | 1788 } |
OLD | NEW |