OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <inttypes.h> | 5 #include <inttypes.h> |
6 #include <stddef.h> | 6 #include <stddef.h> |
7 #include <stdint.h> | 7 #include <stdint.h> |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 #include <memory> | 10 #include <memory> |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 #include "media/gpu/vaapi_wrapper.h" | 56 #include "media/gpu/vaapi_wrapper.h" |
57 // Status has been defined as int in Xlib.h. | 57 // Status has been defined as int in Xlib.h. |
58 #undef Status | 58 #undef Status |
59 #endif // defined(ARCH_CPU_X86_FAMILY) | 59 #endif // defined(ARCH_CPU_X86_FAMILY) |
60 #elif defined(OS_MACOSX) | 60 #elif defined(OS_MACOSX) |
61 #include "media/gpu/vt_video_encode_accelerator_mac.h" | 61 #include "media/gpu/vt_video_encode_accelerator_mac.h" |
62 #else | 62 #else |
63 #error The VideoEncodeAcceleratorUnittest is not supported on this platform. | 63 #error The VideoEncodeAcceleratorUnittest is not supported on this platform. |
64 #endif | 64 #endif |
65 | 65 |
66 using media::VideoEncodeAccelerator; | |
67 | |
68 namespace media { | 66 namespace media { |
69 namespace { | 67 namespace { |
70 | 68 |
71 const media::VideoPixelFormat kInputFormat = media::PIXEL_FORMAT_I420; | 69 const VideoPixelFormat kInputFormat = PIXEL_FORMAT_I420; |
72 | 70 |
73 // The absolute differences between original frame and decoded frame usually | 71 // The absolute differences between original frame and decoded frame usually |
74 // ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal | 72 // ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal |
75 // decoded frames. | 73 // decoded frames. |
76 const double kDecodeSimilarityThreshold = 10.0; | 74 const double kDecodeSimilarityThreshold = 10.0; |
77 | 75 |
78 // Arbitrarily chosen to add some depth to the pipeline. | 76 // Arbitrarily chosen to add some depth to the pipeline. |
79 const unsigned int kNumOutputBuffers = 4; | 77 const unsigned int kNumOutputBuffers = 4; |
80 const unsigned int kNumExtraInputFrames = 4; | 78 const unsigned int kNumExtraInputFrames = 4; |
81 // Maximum delay between requesting a keyframe and receiving one, in frames. | 79 // Maximum delay between requesting a keyframe and receiving one, in frames. |
(...skipping 24 matching lines...) Expand all Loading... |
106 | 104 |
107 // The syntax of multiple test streams is: | 105 // The syntax of multiple test streams is: |
108 // test-stream1;test-stream2;test-stream3 | 106 // test-stream1;test-stream2;test-stream3 |
109 // The syntax of each test stream is: | 107 // The syntax of each test stream is: |
110 // "in_filename:width:height:profile:out_filename:requested_bitrate | 108 // "in_filename:width:height:profile:out_filename:requested_bitrate |
111 // :requested_framerate:requested_subsequent_bitrate | 109 // :requested_framerate:requested_subsequent_bitrate |
112 // :requested_subsequent_framerate" | 110 // :requested_subsequent_framerate" |
113 // - |in_filename| must be an I420 (YUV planar) raw stream | 111 // - |in_filename| must be an I420 (YUV planar) raw stream |
114 // (see http://www.fourcc.org/yuv.php#IYUV). | 112 // (see http://www.fourcc.org/yuv.php#IYUV). |
115 // - |width| and |height| are in pixels. | 113 // - |width| and |height| are in pixels. |
116 // - |profile| to encode into (values of media::VideoCodecProfile). | 114 // - |profile| to encode into (values of VideoCodecProfile). |
117 // - |out_filename| filename to save the encoded stream to (optional). The | 115 // - |out_filename| filename to save the encoded stream to (optional). The |
118 // format for H264 is Annex-B byte stream. The format for VP8 is IVF. Output | 116 // format for H264 is Annex-B byte stream. The format for VP8 is IVF. Output |
119 // stream is saved for the simple encode test only. H264 raw stream and IVF | 117 // stream is saved for the simple encode test only. H264 raw stream and IVF |
120 // can be used as input of VDA unittest. H264 raw stream can be played by | 118 // can be used as input of VDA unittest. H264 raw stream can be played by |
121 // "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly. | 119 // "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly. |
122 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF | 120 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF |
123 // Further parameters are optional (need to provide preceding positional | 121 // Further parameters are optional (need to provide preceding positional |
124 // parameters if a specific subsequent parameter is required): | 122 // parameters if a specific subsequent parameter is required): |
125 // - |requested_bitrate| requested bitrate in bits per second. | 123 // - |requested_bitrate| requested bitrate in bits per second. |
126 // - |requested_framerate| requested initial framerate. | 124 // - |requested_framerate| requested initial framerate. |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
170 // makes sure starting address of YUV planes are 64 bytes-aligned. | 168 // makes sure starting address of YUV planes are 64 bytes-aligned. |
171 std::vector<char> aligned_in_file_data; | 169 std::vector<char> aligned_in_file_data; |
172 | 170 |
173 // Byte size of a frame of |aligned_in_file_data|. | 171 // Byte size of a frame of |aligned_in_file_data|. |
174 size_t aligned_buffer_size; | 172 size_t aligned_buffer_size; |
175 | 173 |
176 // Byte size for each aligned plane of a frame. | 174 // Byte size for each aligned plane of a frame. |
177 std::vector<size_t> aligned_plane_size; | 175 std::vector<size_t> aligned_plane_size; |
178 | 176 |
179 std::string out_filename; | 177 std::string out_filename; |
180 media::VideoCodecProfile requested_profile; | 178 VideoCodecProfile requested_profile; |
181 unsigned int requested_bitrate; | 179 unsigned int requested_bitrate; |
182 unsigned int requested_framerate; | 180 unsigned int requested_framerate; |
183 unsigned int requested_subsequent_bitrate; | 181 unsigned int requested_subsequent_bitrate; |
184 unsigned int requested_subsequent_framerate; | 182 unsigned int requested_subsequent_framerate; |
185 }; | 183 }; |
186 | 184 |
187 inline static size_t Align64Bytes(size_t value) { | 185 inline static size_t Align64Bytes(size_t value) { |
188 return (value + 63) & ~63; | 186 return (value + 63) & ~63; |
189 } | 187 } |
190 | 188 |
191 // Return the |percentile| from a sorted vector. | 189 // Return the |percentile| from a sorted vector. |
192 static base::TimeDelta Percentile( | 190 static base::TimeDelta Percentile( |
193 const std::vector<base::TimeDelta>& sorted_values, | 191 const std::vector<base::TimeDelta>& sorted_values, |
194 unsigned int percentile) { | 192 unsigned int percentile) { |
195 size_t size = sorted_values.size(); | 193 size_t size = sorted_values.size(); |
196 LOG_ASSERT(size > 0UL); | 194 LOG_ASSERT(size > 0UL); |
197 LOG_ASSERT(percentile <= 100UL); | 195 LOG_ASSERT(percentile <= 100UL); |
198 // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile. | 196 // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile. |
199 int index = | 197 int index = |
200 std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0); | 198 std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0); |
201 return sorted_values[index]; | 199 return sorted_values[index]; |
202 } | 200 } |
203 | 201 |
204 static bool IsH264(media::VideoCodecProfile profile) { | 202 static bool IsH264(VideoCodecProfile profile) { |
205 return profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX; | 203 return profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX; |
206 } | 204 } |
207 | 205 |
208 static bool IsVP8(media::VideoCodecProfile profile) { | 206 static bool IsVP8(VideoCodecProfile profile) { |
209 return profile >= media::VP8PROFILE_MIN && profile <= media::VP8PROFILE_MAX; | 207 return profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX; |
210 } | 208 } |
211 | 209 |
212 // ARM performs CPU cache management with CPU cache line granularity. We thus | 210 // ARM performs CPU cache management with CPU cache line granularity. We thus |
213 // need to ensure our buffers are CPU cache line-aligned (64 byte-aligned). | 211 // need to ensure our buffers are CPU cache line-aligned (64 byte-aligned). |
214 // Otherwise newer kernels will refuse to accept them, and on older kernels | 212 // Otherwise newer kernels will refuse to accept them, and on older kernels |
215 // we'll be treating ourselves to random corruption. | 213 // we'll be treating ourselves to random corruption. |
216 // Since we are just mapping and passing chunks of the input file directly to | 214 // Since we are just mapping and passing chunks of the input file directly to |
217 // the VEA as input frames to avoid copying large chunks of raw data on each | 215 // the VEA as input frames to avoid copying large chunks of raw data on each |
218 // frame and thus affecting performance measurements, we have to prepare a | 216 // frame and thus affecting performance measurements, we have to prepare a |
219 // temporary file with all planes aligned to 64-byte boundaries beforehand. | 217 // temporary file with all planes aligned to 64-byte boundaries beforehand. |
220 static void CreateAlignedInputStreamFile(const gfx::Size& coded_size, | 218 static void CreateAlignedInputStreamFile(const gfx::Size& coded_size, |
221 TestStream* test_stream) { | 219 TestStream* test_stream) { |
222 // Test case may have many encoders and memory should be prepared once. | 220 // Test case may have many encoders and memory should be prepared once. |
223 if (test_stream->coded_size == coded_size && | 221 if (test_stream->coded_size == coded_size && |
224 !test_stream->aligned_in_file_data.empty()) | 222 !test_stream->aligned_in_file_data.empty()) |
225 return; | 223 return; |
226 | 224 |
227 // All encoders in multiple encoder test reuse the same test_stream, make | 225 // All encoders in multiple encoder test reuse the same test_stream, make |
228 // sure they requested the same coded_size | 226 // sure they requested the same coded_size |
229 ASSERT_TRUE(test_stream->aligned_in_file_data.empty() || | 227 ASSERT_TRUE(test_stream->aligned_in_file_data.empty() || |
230 coded_size == test_stream->coded_size); | 228 coded_size == test_stream->coded_size); |
231 test_stream->coded_size = coded_size; | 229 test_stream->coded_size = coded_size; |
232 | 230 |
233 size_t num_planes = media::VideoFrame::NumPlanes(kInputFormat); | 231 size_t num_planes = VideoFrame::NumPlanes(kInputFormat); |
234 std::vector<size_t> padding_sizes(num_planes); | 232 std::vector<size_t> padding_sizes(num_planes); |
235 std::vector<size_t> coded_bpl(num_planes); | 233 std::vector<size_t> coded_bpl(num_planes); |
236 std::vector<size_t> visible_bpl(num_planes); | 234 std::vector<size_t> visible_bpl(num_planes); |
237 std::vector<size_t> visible_plane_rows(num_planes); | 235 std::vector<size_t> visible_plane_rows(num_planes); |
238 | 236 |
239 // Calculate padding in bytes to be added after each plane required to keep | 237 // Calculate padding in bytes to be added after each plane required to keep |
240 // starting addresses of all planes at a 64 byte boudnary. This padding will | 238 // starting addresses of all planes at a 64 byte boudnary. This padding will |
241 // be added after each plane when copying to the temporary file. | 239 // be added after each plane when copying to the temporary file. |
242 // At the same time we also need to take into account coded_size requested by | 240 // At the same time we also need to take into account coded_size requested by |
243 // the VEA; each row of visible_bpl bytes in the original file needs to be | 241 // the VEA; each row of visible_bpl bytes in the original file needs to be |
244 // copied into a row of coded_bpl bytes in the aligned file. | 242 // copied into a row of coded_bpl bytes in the aligned file. |
245 for (size_t i = 0; i < num_planes; i++) { | 243 for (size_t i = 0; i < num_planes; i++) { |
246 const size_t size = | 244 const size_t size = |
247 media::VideoFrame::PlaneSize(kInputFormat, i, coded_size).GetArea(); | 245 VideoFrame::PlaneSize(kInputFormat, i, coded_size).GetArea(); |
248 test_stream->aligned_plane_size.push_back(Align64Bytes(size)); | 246 test_stream->aligned_plane_size.push_back(Align64Bytes(size)); |
249 test_stream->aligned_buffer_size += test_stream->aligned_plane_size.back(); | 247 test_stream->aligned_buffer_size += test_stream->aligned_plane_size.back(); |
250 | 248 |
251 coded_bpl[i] = | 249 coded_bpl[i] = VideoFrame::RowBytes(i, kInputFormat, coded_size.width()); |
252 media::VideoFrame::RowBytes(i, kInputFormat, coded_size.width()); | 250 visible_bpl[i] = VideoFrame::RowBytes(i, kInputFormat, |
253 visible_bpl[i] = media::VideoFrame::RowBytes( | 251 test_stream->visible_size.width()); |
254 i, kInputFormat, test_stream->visible_size.width()); | 252 visible_plane_rows[i] = |
255 visible_plane_rows[i] = media::VideoFrame::Rows( | 253 VideoFrame::Rows(i, kInputFormat, test_stream->visible_size.height()); |
256 i, kInputFormat, test_stream->visible_size.height()); | |
257 const size_t padding_rows = | 254 const size_t padding_rows = |
258 media::VideoFrame::Rows(i, kInputFormat, coded_size.height()) - | 255 VideoFrame::Rows(i, kInputFormat, coded_size.height()) - |
259 visible_plane_rows[i]; | 256 visible_plane_rows[i]; |
260 padding_sizes[i] = padding_rows * coded_bpl[i] + Align64Bytes(size) - size; | 257 padding_sizes[i] = padding_rows * coded_bpl[i] + Align64Bytes(size) - size; |
261 } | 258 } |
262 | 259 |
263 base::FilePath src_file(test_stream->in_filename); | 260 base::FilePath src_file(test_stream->in_filename); |
264 int64_t src_file_size = 0; | 261 int64_t src_file_size = 0; |
265 LOG_ASSERT(base::GetFileSize(src_file, &src_file_size)); | 262 LOG_ASSERT(base::GetFileSize(src_file, &src_file_size)); |
266 | 263 |
267 size_t visible_buffer_size = media::VideoFrame::AllocationSize( | 264 size_t visible_buffer_size = |
268 kInputFormat, test_stream->visible_size); | 265 VideoFrame::AllocationSize(kInputFormat, test_stream->visible_size); |
269 LOG_ASSERT(src_file_size % visible_buffer_size == 0U) | 266 LOG_ASSERT(src_file_size % visible_buffer_size == 0U) |
270 << "Stream byte size is not a product of calculated frame byte size"; | 267 << "Stream byte size is not a product of calculated frame byte size"; |
271 | 268 |
272 test_stream->num_frames = src_file_size / visible_buffer_size; | 269 test_stream->num_frames = src_file_size / visible_buffer_size; |
273 | 270 |
274 LOG_ASSERT(test_stream->aligned_buffer_size > 0UL); | 271 LOG_ASSERT(test_stream->aligned_buffer_size > 0UL); |
275 test_stream->aligned_in_file_data.resize(test_stream->aligned_buffer_size * | 272 test_stream->aligned_in_file_data.resize(test_stream->aligned_buffer_size * |
276 test_stream->num_frames); | 273 test_stream->num_frames); |
277 | 274 |
278 base::File src(src_file, base::File::FLAG_OPEN | base::File::FLAG_READ); | 275 base::File src(src_file, base::File::FLAG_OPEN | base::File::FLAG_READ); |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
330 int width, height; | 327 int width, height; |
331 bool result = base::StringToInt(fields[1], &width); | 328 bool result = base::StringToInt(fields[1], &width); |
332 LOG_ASSERT(result); | 329 LOG_ASSERT(result); |
333 result = base::StringToInt(fields[2], &height); | 330 result = base::StringToInt(fields[2], &height); |
334 LOG_ASSERT(result); | 331 LOG_ASSERT(result); |
335 test_stream->visible_size = gfx::Size(width, height); | 332 test_stream->visible_size = gfx::Size(width, height); |
336 LOG_ASSERT(!test_stream->visible_size.IsEmpty()); | 333 LOG_ASSERT(!test_stream->visible_size.IsEmpty()); |
337 int profile; | 334 int profile; |
338 result = base::StringToInt(fields[3], &profile); | 335 result = base::StringToInt(fields[3], &profile); |
339 LOG_ASSERT(result); | 336 LOG_ASSERT(result); |
340 LOG_ASSERT(profile > media::VIDEO_CODEC_PROFILE_UNKNOWN); | 337 LOG_ASSERT(profile > VIDEO_CODEC_PROFILE_UNKNOWN); |
341 LOG_ASSERT(profile <= media::VIDEO_CODEC_PROFILE_MAX); | 338 LOG_ASSERT(profile <= VIDEO_CODEC_PROFILE_MAX); |
342 test_stream->requested_profile = | 339 test_stream->requested_profile = static_cast<VideoCodecProfile>(profile); |
343 static_cast<media::VideoCodecProfile>(profile); | |
344 | 340 |
345 if (fields.size() >= 5 && !fields[4].empty()) | 341 if (fields.size() >= 5 && !fields[4].empty()) |
346 test_stream->out_filename = fields[4]; | 342 test_stream->out_filename = fields[4]; |
347 | 343 |
348 if (fields.size() >= 6 && !fields[5].empty()) | 344 if (fields.size() >= 6 && !fields[5].empty()) |
349 LOG_ASSERT( | 345 LOG_ASSERT( |
350 base::StringToUint(fields[5], &test_stream->requested_bitrate)); | 346 base::StringToUint(fields[5], &test_stream->requested_bitrate)); |
351 | 347 |
352 if (fields.size() >= 7 && !fields[6].empty()) | 348 if (fields.size() >= 7 && !fields[6].empty()) |
353 LOG_ASSERT( | 349 LOG_ASSERT( |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
456 public: | 452 public: |
457 // To be called when a complete frame is found while processing a stream | 453 // To be called when a complete frame is found while processing a stream |
458 // buffer, passing true if the frame is a keyframe. Returns false if we | 454 // buffer, passing true if the frame is a keyframe. Returns false if we |
459 // are not interested in more frames and further processing should be aborted. | 455 // are not interested in more frames and further processing should be aborted. |
460 typedef base::Callback<bool(bool)> FrameFoundCallback; | 456 typedef base::Callback<bool(bool)> FrameFoundCallback; |
461 | 457 |
462 virtual ~StreamValidator() {} | 458 virtual ~StreamValidator() {} |
463 | 459 |
464 // Provide a StreamValidator instance for the given |profile|. | 460 // Provide a StreamValidator instance for the given |profile|. |
465 static std::unique_ptr<StreamValidator> Create( | 461 static std::unique_ptr<StreamValidator> Create( |
466 media::VideoCodecProfile profile, | 462 VideoCodecProfile profile, |
467 const FrameFoundCallback& frame_cb); | 463 const FrameFoundCallback& frame_cb); |
468 | 464 |
469 // Process and verify contents of a bitstream buffer. | 465 // Process and verify contents of a bitstream buffer. |
470 virtual void ProcessStreamBuffer(const uint8_t* stream, size_t size) = 0; | 466 virtual void ProcessStreamBuffer(const uint8_t* stream, size_t size) = 0; |
471 | 467 |
472 protected: | 468 protected: |
473 explicit StreamValidator(const FrameFoundCallback& frame_cb) | 469 explicit StreamValidator(const FrameFoundCallback& frame_cb) |
474 : frame_cb_(frame_cb) {} | 470 : frame_cb_(frame_cb) {} |
475 | 471 |
476 FrameFoundCallback frame_cb_; | 472 FrameFoundCallback frame_cb_; |
477 }; | 473 }; |
478 | 474 |
479 class H264Validator : public StreamValidator { | 475 class H264Validator : public StreamValidator { |
480 public: | 476 public: |
481 explicit H264Validator(const FrameFoundCallback& frame_cb) | 477 explicit H264Validator(const FrameFoundCallback& frame_cb) |
482 : StreamValidator(frame_cb), | 478 : StreamValidator(frame_cb), |
483 seen_sps_(false), | 479 seen_sps_(false), |
484 seen_pps_(false), | 480 seen_pps_(false), |
485 seen_idr_(false) {} | 481 seen_idr_(false) {} |
486 | 482 |
487 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; | 483 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; |
488 | 484 |
489 private: | 485 private: |
490 // Set to true when encoder provides us with the corresponding NALU type. | 486 // Set to true when encoder provides us with the corresponding NALU type. |
491 bool seen_sps_; | 487 bool seen_sps_; |
492 bool seen_pps_; | 488 bool seen_pps_; |
493 bool seen_idr_; | 489 bool seen_idr_; |
494 | 490 |
495 media::H264Parser h264_parser_; | 491 H264Parser h264_parser_; |
496 }; | 492 }; |
497 | 493 |
498 void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { | 494 void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { |
499 h264_parser_.SetStream(stream, size); | 495 h264_parser_.SetStream(stream, size); |
500 | 496 |
501 while (1) { | 497 while (1) { |
502 media::H264NALU nalu; | 498 H264NALU nalu; |
503 media::H264Parser::Result result; | 499 H264Parser::Result result; |
504 | 500 |
505 result = h264_parser_.AdvanceToNextNALU(&nalu); | 501 result = h264_parser_.AdvanceToNextNALU(&nalu); |
506 if (result == media::H264Parser::kEOStream) | 502 if (result == H264Parser::kEOStream) |
507 break; | 503 break; |
508 | 504 |
509 ASSERT_EQ(media::H264Parser::kOk, result); | 505 ASSERT_EQ(H264Parser::kOk, result); |
510 | 506 |
511 bool keyframe = false; | 507 bool keyframe = false; |
512 | 508 |
513 switch (nalu.nal_unit_type) { | 509 switch (nalu.nal_unit_type) { |
514 case media::H264NALU::kIDRSlice: | 510 case H264NALU::kIDRSlice: |
515 ASSERT_TRUE(seen_sps_); | 511 ASSERT_TRUE(seen_sps_); |
516 ASSERT_TRUE(seen_pps_); | 512 ASSERT_TRUE(seen_pps_); |
517 seen_idr_ = true; | 513 seen_idr_ = true; |
518 keyframe = true; | 514 keyframe = true; |
519 // fallthrough | 515 // fallthrough |
520 case media::H264NALU::kNonIDRSlice: { | 516 case H264NALU::kNonIDRSlice: { |
521 ASSERT_TRUE(seen_idr_); | 517 ASSERT_TRUE(seen_idr_); |
522 if (!frame_cb_.Run(keyframe)) | 518 if (!frame_cb_.Run(keyframe)) |
523 return; | 519 return; |
524 break; | 520 break; |
525 } | 521 } |
526 | 522 |
527 case media::H264NALU::kSPS: { | 523 case H264NALU::kSPS: { |
528 int sps_id; | 524 int sps_id; |
529 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParseSPS(&sps_id)); | 525 ASSERT_EQ(H264Parser::kOk, h264_parser_.ParseSPS(&sps_id)); |
530 seen_sps_ = true; | 526 seen_sps_ = true; |
531 break; | 527 break; |
532 } | 528 } |
533 | 529 |
534 case media::H264NALU::kPPS: { | 530 case H264NALU::kPPS: { |
535 ASSERT_TRUE(seen_sps_); | 531 ASSERT_TRUE(seen_sps_); |
536 int pps_id; | 532 int pps_id; |
537 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParsePPS(&pps_id)); | 533 ASSERT_EQ(H264Parser::kOk, h264_parser_.ParsePPS(&pps_id)); |
538 seen_pps_ = true; | 534 seen_pps_ = true; |
539 break; | 535 break; |
540 } | 536 } |
541 | 537 |
542 default: | 538 default: |
543 break; | 539 break; |
544 } | 540 } |
545 } | 541 } |
546 } | 542 } |
547 | 543 |
(...skipping 17 matching lines...) Expand all Loading... |
565 EXPECT_TRUE(seen_keyframe_); | 561 EXPECT_TRUE(seen_keyframe_); |
566 | 562 |
567 frame_cb_.Run(keyframe); | 563 frame_cb_.Run(keyframe); |
568 // TODO(posciak): We could be getting more frames in the buffer, but there is | 564 // TODO(posciak): We could be getting more frames in the buffer, but there is |
569 // no simple way to detect this. We'd need to parse the frames and go through | 565 // no simple way to detect this. We'd need to parse the frames and go through |
570 // partition numbers/sizes. For now assume one frame per buffer. | 566 // partition numbers/sizes. For now assume one frame per buffer. |
571 } | 567 } |
572 | 568 |
573 // static | 569 // static |
574 std::unique_ptr<StreamValidator> StreamValidator::Create( | 570 std::unique_ptr<StreamValidator> StreamValidator::Create( |
575 media::VideoCodecProfile profile, | 571 VideoCodecProfile profile, |
576 const FrameFoundCallback& frame_cb) { | 572 const FrameFoundCallback& frame_cb) { |
577 std::unique_ptr<StreamValidator> validator; | 573 std::unique_ptr<StreamValidator> validator; |
578 | 574 |
579 if (IsH264(profile)) { | 575 if (IsH264(profile)) { |
580 validator.reset(new H264Validator(frame_cb)); | 576 validator.reset(new H264Validator(frame_cb)); |
581 } else if (IsVP8(profile)) { | 577 } else if (IsVP8(profile)) { |
582 validator.reset(new VP8Validator(frame_cb)); | 578 validator.reset(new VP8Validator(frame_cb)); |
583 } else { | 579 } else { |
584 LOG(FATAL) << "Unsupported profile: " << profile; | 580 LOG(FATAL) << "Unsupported profile: " << profile; |
585 } | 581 } |
586 | 582 |
587 return validator; | 583 return validator; |
588 } | 584 } |
589 | 585 |
590 class VideoFrameQualityValidator { | 586 class VideoFrameQualityValidator { |
591 public: | 587 public: |
592 VideoFrameQualityValidator(const media::VideoCodecProfile profile, | 588 VideoFrameQualityValidator(const VideoCodecProfile profile, |
593 const base::Closure& flush_complete_cb, | 589 const base::Closure& flush_complete_cb, |
594 const base::Closure& decode_error_cb); | 590 const base::Closure& decode_error_cb); |
595 void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size); | 591 void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size); |
596 // Save original YUV frame to compare it with the decoded frame later. | 592 // Save original YUV frame to compare it with the decoded frame later. |
597 void AddOriginalFrame(scoped_refptr<media::VideoFrame> frame); | 593 void AddOriginalFrame(scoped_refptr<VideoFrame> frame); |
598 void AddDecodeBuffer(const scoped_refptr<media::DecoderBuffer>& buffer); | 594 void AddDecodeBuffer(const scoped_refptr<DecoderBuffer>& buffer); |
599 // Flush the decoder. | 595 // Flush the decoder. |
600 void Flush(); | 596 void Flush(); |
601 | 597 |
602 private: | 598 private: |
603 void InitializeCB(bool success); | 599 void InitializeCB(bool success); |
604 void DecodeDone(media::DecodeStatus status); | 600 void DecodeDone(DecodeStatus status); |
605 void FlushDone(media::DecodeStatus status); | 601 void FlushDone(DecodeStatus status); |
606 void VerifyOutputFrame(const scoped_refptr<media::VideoFrame>& output_frame); | 602 void VerifyOutputFrame(const scoped_refptr<VideoFrame>& output_frame); |
607 void Decode(); | 603 void Decode(); |
608 | 604 |
609 enum State { UNINITIALIZED, INITIALIZED, DECODING, ERROR }; | 605 enum State { UNINITIALIZED, INITIALIZED, DECODING, ERROR }; |
610 | 606 |
611 const media::VideoCodecProfile profile_; | 607 const VideoCodecProfile profile_; |
612 std::unique_ptr<media::FFmpegVideoDecoder> decoder_; | 608 std::unique_ptr<FFmpegVideoDecoder> decoder_; |
613 media::VideoDecoder::DecodeCB decode_cb_; | 609 VideoDecoder::DecodeCB decode_cb_; |
614 // Decode callback of an EOS buffer. | 610 // Decode callback of an EOS buffer. |
615 media::VideoDecoder::DecodeCB eos_decode_cb_; | 611 VideoDecoder::DecodeCB eos_decode_cb_; |
616 // Callback of Flush(). Called after all frames are decoded. | 612 // Callback of Flush(). Called after all frames are decoded. |
617 const base::Closure flush_complete_cb_; | 613 const base::Closure flush_complete_cb_; |
618 const base::Closure decode_error_cb_; | 614 const base::Closure decode_error_cb_; |
619 State decoder_state_; | 615 State decoder_state_; |
620 std::queue<scoped_refptr<media::VideoFrame>> original_frames_; | 616 std::queue<scoped_refptr<VideoFrame>> original_frames_; |
621 std::queue<scoped_refptr<media::DecoderBuffer>> decode_buffers_; | 617 std::queue<scoped_refptr<DecoderBuffer>> decode_buffers_; |
622 }; | 618 }; |
623 | 619 |
624 VideoFrameQualityValidator::VideoFrameQualityValidator( | 620 VideoFrameQualityValidator::VideoFrameQualityValidator( |
625 const media::VideoCodecProfile profile, | 621 const VideoCodecProfile profile, |
626 const base::Closure& flush_complete_cb, | 622 const base::Closure& flush_complete_cb, |
627 const base::Closure& decode_error_cb) | 623 const base::Closure& decode_error_cb) |
628 : profile_(profile), | 624 : profile_(profile), |
629 decoder_(new media::FFmpegVideoDecoder()), | 625 decoder_(new FFmpegVideoDecoder()), |
630 decode_cb_(base::Bind(&VideoFrameQualityValidator::DecodeDone, | 626 decode_cb_(base::Bind(&VideoFrameQualityValidator::DecodeDone, |
631 base::Unretained(this))), | 627 base::Unretained(this))), |
632 eos_decode_cb_(base::Bind(&VideoFrameQualityValidator::FlushDone, | 628 eos_decode_cb_(base::Bind(&VideoFrameQualityValidator::FlushDone, |
633 base::Unretained(this))), | 629 base::Unretained(this))), |
634 flush_complete_cb_(flush_complete_cb), | 630 flush_complete_cb_(flush_complete_cb), |
635 decode_error_cb_(decode_error_cb), | 631 decode_error_cb_(decode_error_cb), |
636 decoder_state_(UNINITIALIZED) { | 632 decoder_state_(UNINITIALIZED) { |
637 // Allow decoding of individual NALU. Entire frames are required by default. | 633 // Allow decoding of individual NALU. Entire frames are required by default. |
638 decoder_->set_decode_nalus(true); | 634 decoder_->set_decode_nalus(true); |
639 } | 635 } |
640 | 636 |
641 void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size, | 637 void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size, |
642 const gfx::Rect& visible_size) { | 638 const gfx::Rect& visible_size) { |
643 media::FFmpegGlue::InitializeFFmpeg(); | 639 FFmpegGlue::InitializeFFmpeg(); |
644 | 640 |
645 gfx::Size natural_size(visible_size.size()); | 641 gfx::Size natural_size(visible_size.size()); |
646 // The default output format of ffmpeg video decoder is YV12. | 642 // The default output format of ffmpeg video decoder is YV12. |
647 media::VideoDecoderConfig config; | 643 VideoDecoderConfig config; |
648 if (IsVP8(profile_)) | 644 if (IsVP8(profile_)) |
649 config.Initialize(media::kCodecVP8, media::VP8PROFILE_ANY, kInputFormat, | 645 config.Initialize(kCodecVP8, VP8PROFILE_ANY, kInputFormat, |
650 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | 646 COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, |
651 natural_size, media::EmptyExtraData(), | 647 natural_size, EmptyExtraData(), Unencrypted()); |
652 media::Unencrypted()); | |
653 else if (IsH264(profile_)) | 648 else if (IsH264(profile_)) |
654 config.Initialize(media::kCodecH264, media::H264PROFILE_MAIN, kInputFormat, | 649 config.Initialize(kCodecH264, H264PROFILE_MAIN, kInputFormat, |
655 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | 650 COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, |
656 natural_size, media::EmptyExtraData(), | 651 natural_size, EmptyExtraData(), Unencrypted()); |
657 media::Unencrypted()); | |
658 else | 652 else |
659 LOG_ASSERT(0) << "Invalid profile " << profile_; | 653 LOG_ASSERT(0) << "Invalid profile " << profile_; |
660 | 654 |
661 decoder_->Initialize( | 655 decoder_->Initialize( |
662 config, false, nullptr, | 656 config, false, nullptr, |
663 base::Bind(&VideoFrameQualityValidator::InitializeCB, | 657 base::Bind(&VideoFrameQualityValidator::InitializeCB, |
664 base::Unretained(this)), | 658 base::Unretained(this)), |
665 base::Bind(&VideoFrameQualityValidator::VerifyOutputFrame, | 659 base::Bind(&VideoFrameQualityValidator::VerifyOutputFrame, |
666 base::Unretained(this))); | 660 base::Unretained(this))); |
667 } | 661 } |
668 | 662 |
669 void VideoFrameQualityValidator::InitializeCB(bool success) { | 663 void VideoFrameQualityValidator::InitializeCB(bool success) { |
670 if (success) { | 664 if (success) { |
671 decoder_state_ = INITIALIZED; | 665 decoder_state_ = INITIALIZED; |
672 Decode(); | 666 Decode(); |
673 } else { | 667 } else { |
674 decoder_state_ = ERROR; | 668 decoder_state_ = ERROR; |
675 if (IsH264(profile_)) | 669 if (IsH264(profile_)) |
676 LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome."; | 670 LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome."; |
677 FAIL() << "Decoder initialization error"; | 671 FAIL() << "Decoder initialization error"; |
678 decode_error_cb_.Run(); | 672 decode_error_cb_.Run(); |
679 } | 673 } |
680 } | 674 } |
681 | 675 |
682 void VideoFrameQualityValidator::AddOriginalFrame( | 676 void VideoFrameQualityValidator::AddOriginalFrame( |
683 scoped_refptr<media::VideoFrame> frame) { | 677 scoped_refptr<VideoFrame> frame) { |
684 original_frames_.push(frame); | 678 original_frames_.push(frame); |
685 } | 679 } |
686 | 680 |
687 void VideoFrameQualityValidator::DecodeDone(media::DecodeStatus status) { | 681 void VideoFrameQualityValidator::DecodeDone(DecodeStatus status) { |
688 if (status == media::DecodeStatus::OK) { | 682 if (status == DecodeStatus::OK) { |
689 decoder_state_ = INITIALIZED; | 683 decoder_state_ = INITIALIZED; |
690 Decode(); | 684 Decode(); |
691 } else { | 685 } else { |
692 decoder_state_ = ERROR; | 686 decoder_state_ = ERROR; |
693 FAIL() << "Unexpected decode status = " << status << ". Stop decoding."; | 687 FAIL() << "Unexpected decode status = " << status << ". Stop decoding."; |
694 decode_error_cb_.Run(); | 688 decode_error_cb_.Run(); |
695 } | 689 } |
696 } | 690 } |
697 | 691 |
698 void VideoFrameQualityValidator::FlushDone(media::DecodeStatus status) { | 692 void VideoFrameQualityValidator::FlushDone(DecodeStatus status) { |
699 flush_complete_cb_.Run(); | 693 flush_complete_cb_.Run(); |
700 } | 694 } |
701 | 695 |
702 void VideoFrameQualityValidator::Flush() { | 696 void VideoFrameQualityValidator::Flush() { |
703 if (decoder_state_ != ERROR) { | 697 if (decoder_state_ != ERROR) { |
704 decode_buffers_.push(media::DecoderBuffer::CreateEOSBuffer()); | 698 decode_buffers_.push(DecoderBuffer::CreateEOSBuffer()); |
705 Decode(); | 699 Decode(); |
706 } | 700 } |
707 } | 701 } |
708 | 702 |
709 void VideoFrameQualityValidator::AddDecodeBuffer( | 703 void VideoFrameQualityValidator::AddDecodeBuffer( |
710 const scoped_refptr<media::DecoderBuffer>& buffer) { | 704 const scoped_refptr<DecoderBuffer>& buffer) { |
711 if (decoder_state_ != ERROR) { | 705 if (decoder_state_ != ERROR) { |
712 decode_buffers_.push(buffer); | 706 decode_buffers_.push(buffer); |
713 Decode(); | 707 Decode(); |
714 } | 708 } |
715 } | 709 } |
716 | 710 |
717 void VideoFrameQualityValidator::Decode() { | 711 void VideoFrameQualityValidator::Decode() { |
718 if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) { | 712 if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) { |
719 scoped_refptr<media::DecoderBuffer> next_buffer = decode_buffers_.front(); | 713 scoped_refptr<DecoderBuffer> next_buffer = decode_buffers_.front(); |
720 decode_buffers_.pop(); | 714 decode_buffers_.pop(); |
721 decoder_state_ = DECODING; | 715 decoder_state_ = DECODING; |
722 if (next_buffer->end_of_stream()) | 716 if (next_buffer->end_of_stream()) |
723 decoder_->Decode(next_buffer, eos_decode_cb_); | 717 decoder_->Decode(next_buffer, eos_decode_cb_); |
724 else | 718 else |
725 decoder_->Decode(next_buffer, decode_cb_); | 719 decoder_->Decode(next_buffer, decode_cb_); |
726 } | 720 } |
727 } | 721 } |
728 | 722 |
729 void VideoFrameQualityValidator::VerifyOutputFrame( | 723 void VideoFrameQualityValidator::VerifyOutputFrame( |
730 const scoped_refptr<media::VideoFrame>& output_frame) { | 724 const scoped_refptr<VideoFrame>& output_frame) { |
731 scoped_refptr<media::VideoFrame> original_frame = original_frames_.front(); | 725 scoped_refptr<VideoFrame> original_frame = original_frames_.front(); |
732 original_frames_.pop(); | 726 original_frames_.pop(); |
733 gfx::Size visible_size = original_frame->visible_rect().size(); | 727 gfx::Size visible_size = original_frame->visible_rect().size(); |
734 | 728 |
735 int planes[] = {media::VideoFrame::kYPlane, media::VideoFrame::kUPlane, | 729 int planes[] = {VideoFrame::kYPlane, VideoFrame::kUPlane, |
736 media::VideoFrame::kVPlane}; | 730 VideoFrame::kVPlane}; |
737 double difference = 0; | 731 double difference = 0; |
738 for (int plane : planes) { | 732 for (int plane : planes) { |
739 uint8_t* original_plane = original_frame->data(plane); | 733 uint8_t* original_plane = original_frame->data(plane); |
740 uint8_t* output_plane = output_frame->data(plane); | 734 uint8_t* output_plane = output_frame->data(plane); |
741 | 735 |
742 size_t rows = | 736 size_t rows = VideoFrame::Rows(plane, kInputFormat, visible_size.height()); |
743 media::VideoFrame::Rows(plane, kInputFormat, visible_size.height()); | |
744 size_t columns = | 737 size_t columns = |
745 media::VideoFrame::Columns(plane, kInputFormat, visible_size.width()); | 738 VideoFrame::Columns(plane, kInputFormat, visible_size.width()); |
746 size_t stride = original_frame->stride(plane); | 739 size_t stride = original_frame->stride(plane); |
747 | 740 |
748 for (size_t i = 0; i < rows; i++) | 741 for (size_t i = 0; i < rows; i++) |
749 for (size_t j = 0; j < columns; j++) | 742 for (size_t j = 0; j < columns; j++) |
750 difference += std::abs(original_plane[stride * i + j] - | 743 difference += std::abs(original_plane[stride * i + j] - |
751 output_plane[stride * i + j]); | 744 output_plane[stride * i + j]); |
752 } | 745 } |
753 // Divide the difference by the size of frame. | 746 // Divide the difference by the size of frame. |
754 difference /= media::VideoFrame::AllocationSize(kInputFormat, visible_size); | 747 difference /= VideoFrame::AllocationSize(kInputFormat, visible_size); |
755 EXPECT_TRUE(difference <= kDecodeSimilarityThreshold) | 748 EXPECT_TRUE(difference <= kDecodeSimilarityThreshold) |
756 << "differrence = " << difference << " > decode similarity threshold"; | 749 << "differrence = " << difference << " > decode similarity threshold"; |
757 } | 750 } |
758 | 751 |
759 class VEAClient : public VideoEncodeAccelerator::Client { | 752 class VEAClient : public VideoEncodeAccelerator::Client { |
760 public: | 753 public: |
761 VEAClient(TestStream* test_stream, | 754 VEAClient(TestStream* test_stream, |
762 ClientStateNotification<ClientState>* note, | 755 ClientStateNotification<ClientState>* note, |
763 bool save_to_file, | 756 bool save_to_file, |
764 unsigned int keyframe_period, | 757 unsigned int keyframe_period, |
(...skipping 15 matching lines...) Expand all Loading... |
780 bool key_frame, | 773 bool key_frame, |
781 base::TimeDelta timestamp) override; | 774 base::TimeDelta timestamp) override; |
782 void NotifyError(VideoEncodeAccelerator::Error error) override; | 775 void NotifyError(VideoEncodeAccelerator::Error error) override; |
783 | 776 |
784 private: | 777 private: |
785 bool has_encoder() { return encoder_.get(); } | 778 bool has_encoder() { return encoder_.get(); } |
786 | 779 |
787 // Return the number of encoded frames per second. | 780 // Return the number of encoded frames per second. |
788 double frames_per_second(); | 781 double frames_per_second(); |
789 | 782 |
790 std::unique_ptr<media::VideoEncodeAccelerator> CreateFakeVEA(); | 783 std::unique_ptr<VideoEncodeAccelerator> CreateFakeVEA(); |
791 std::unique_ptr<media::VideoEncodeAccelerator> CreateV4L2VEA(); | 784 std::unique_ptr<VideoEncodeAccelerator> CreateV4L2VEA(); |
792 std::unique_ptr<media::VideoEncodeAccelerator> CreateVaapiVEA(); | 785 std::unique_ptr<VideoEncodeAccelerator> CreateVaapiVEA(); |
793 std::unique_ptr<media::VideoEncodeAccelerator> CreateVTVEA(); | 786 std::unique_ptr<VideoEncodeAccelerator> CreateVTVEA(); |
794 | 787 |
795 void SetState(ClientState new_state); | 788 void SetState(ClientState new_state); |
796 | 789 |
797 // Set current stream parameters to given |bitrate| at |framerate|. | 790 // Set current stream parameters to given |bitrate| at |framerate|. |
798 void SetStreamParameters(unsigned int bitrate, unsigned int framerate); | 791 void SetStreamParameters(unsigned int bitrate, unsigned int framerate); |
799 | 792 |
800 // Called when encoder is done with a VideoFrame. | 793 // Called when encoder is done with a VideoFrame. |
801 void InputNoLongerNeededCallback(int32_t input_id); | 794 void InputNoLongerNeededCallback(int32_t input_id); |
802 | 795 |
803 // Feed the encoder with one input frame. | 796 // Feed the encoder with one input frame. |
(...skipping 20 matching lines...) Expand all Loading... |
824 void LogPerf(); | 817 void LogPerf(); |
825 | 818 |
826 // Write IVF file header to test_stream_->out_filename. | 819 // Write IVF file header to test_stream_->out_filename. |
827 void WriteIvfFileHeader(); | 820 void WriteIvfFileHeader(); |
828 | 821 |
829 // Write an IVF frame header to test_stream_->out_filename. | 822 // Write an IVF frame header to test_stream_->out_filename. |
830 void WriteIvfFrameHeader(int frame_index, size_t frame_size); | 823 void WriteIvfFrameHeader(int frame_index, size_t frame_size); |
831 | 824 |
832 // Create and return a VideoFrame wrapping the data at |position| bytes in the | 825 // Create and return a VideoFrame wrapping the data at |position| bytes in the |
833 // input stream. | 826 // input stream. |
834 scoped_refptr<media::VideoFrame> CreateFrame(off_t position); | 827 scoped_refptr<VideoFrame> CreateFrame(off_t position); |
835 | 828 |
836 // Prepare and return a frame wrapping the data at |position| bytes in the | 829 // Prepare and return a frame wrapping the data at |position| bytes in the |
837 // input stream, ready to be sent to encoder. | 830 // input stream, ready to be sent to encoder. |
838 // The input frame id is returned in |input_id|. | 831 // The input frame id is returned in |input_id|. |
839 scoped_refptr<media::VideoFrame> PrepareInputFrame(off_t position, | 832 scoped_refptr<VideoFrame> PrepareInputFrame(off_t position, |
840 int32_t* input_id); | 833 int32_t* input_id); |
841 | 834 |
842 // Update the parameters according to |mid_stream_bitrate_switch| and | 835 // Update the parameters according to |mid_stream_bitrate_switch| and |
843 // |mid_stream_framerate_switch|. | 836 // |mid_stream_framerate_switch|. |
844 void UpdateTestStreamData(bool mid_stream_bitrate_switch, | 837 void UpdateTestStreamData(bool mid_stream_bitrate_switch, |
845 bool mid_stream_framerate_switch); | 838 bool mid_stream_framerate_switch); |
846 | 839 |
847 // Callback function of the |input_timer_|. | 840 // Callback function of the |input_timer_|. |
848 void OnInputTimer(); | 841 void OnInputTimer(); |
849 | 842 |
850 // Called when the quality validator has decoded all the frames. | 843 // Called when the quality validator has decoded all the frames. |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1018 // Initialize the parameters of the test streams. | 1011 // Initialize the parameters of the test streams. |
1019 UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch); | 1012 UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch); |
1020 | 1013 |
1021 thread_checker_.DetachFromThread(); | 1014 thread_checker_.DetachFromThread(); |
1022 } | 1015 } |
1023 | 1016 |
1024 VEAClient::~VEAClient() { | 1017 VEAClient::~VEAClient() { |
1025 LOG_ASSERT(!has_encoder()); | 1018 LOG_ASSERT(!has_encoder()); |
1026 } | 1019 } |
1027 | 1020 |
1028 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateFakeVEA() { | 1021 std::unique_ptr<VideoEncodeAccelerator> VEAClient::CreateFakeVEA() { |
1029 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | 1022 std::unique_ptr<VideoEncodeAccelerator> encoder; |
1030 if (g_fake_encoder) { | 1023 if (g_fake_encoder) { |
1031 encoder.reset(new media::FakeVideoEncodeAccelerator( | 1024 encoder.reset(new FakeVideoEncodeAccelerator( |
1032 scoped_refptr<base::SingleThreadTaskRunner>( | 1025 scoped_refptr<base::SingleThreadTaskRunner>( |
1033 base::ThreadTaskRunnerHandle::Get()))); | 1026 base::ThreadTaskRunnerHandle::Get()))); |
1034 } | 1027 } |
1035 return encoder; | 1028 return encoder; |
1036 } | 1029 } |
1037 | 1030 |
1038 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateV4L2VEA() { | 1031 std::unique_ptr<VideoEncodeAccelerator> VEAClient::CreateV4L2VEA() { |
1039 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | 1032 std::unique_ptr<VideoEncodeAccelerator> encoder; |
1040 #if defined(OS_CHROMEOS) && (defined(ARCH_CPU_ARMEL) || \ | 1033 #if defined(OS_CHROMEOS) && (defined(ARCH_CPU_ARMEL) || \ |
1041 (defined(USE_OZONE) && defined(USE_V4L2_CODEC))) | 1034 (defined(USE_OZONE) && defined(USE_V4L2_CODEC))) |
1042 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); | 1035 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); |
1043 if (device) | 1036 if (device) |
1044 encoder.reset(new V4L2VideoEncodeAccelerator(device)); | 1037 encoder.reset(new V4L2VideoEncodeAccelerator(device)); |
1045 #endif | 1038 #endif |
1046 return encoder; | 1039 return encoder; |
1047 } | 1040 } |
1048 | 1041 |
1049 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVaapiVEA() { | 1042 std::unique_ptr<VideoEncodeAccelerator> VEAClient::CreateVaapiVEA() { |
1050 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | 1043 std::unique_ptr<VideoEncodeAccelerator> encoder; |
1051 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | 1044 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) |
1052 encoder.reset(new VaapiVideoEncodeAccelerator()); | 1045 encoder.reset(new VaapiVideoEncodeAccelerator()); |
1053 #endif | 1046 #endif |
1054 return encoder; | 1047 return encoder; |
1055 } | 1048 } |
1056 | 1049 |
1057 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVTVEA() { | 1050 std::unique_ptr<VideoEncodeAccelerator> VEAClient::CreateVTVEA() { |
1058 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | 1051 std::unique_ptr<VideoEncodeAccelerator> encoder; |
1059 #if defined(OS_MACOSX) | 1052 #if defined(OS_MACOSX) |
1060 encoder.reset(new VTVideoEncodeAccelerator()); | 1053 encoder.reset(new VTVideoEncodeAccelerator()); |
1061 #endif | 1054 #endif |
1062 return encoder; | 1055 return encoder; |
1063 } | 1056 } |
1064 | 1057 |
1065 void VEAClient::CreateEncoder() { | 1058 void VEAClient::CreateEncoder() { |
1066 DCHECK(thread_checker_.CalledOnValidThread()); | 1059 DCHECK(thread_checker_.CalledOnValidThread()); |
1067 LOG_ASSERT(!has_encoder()); | 1060 LOG_ASSERT(!has_encoder()); |
1068 | 1061 |
1069 std::unique_ptr<media::VideoEncodeAccelerator> encoders[] = { | 1062 std::unique_ptr<VideoEncodeAccelerator> encoders[] = { |
1070 CreateFakeVEA(), CreateV4L2VEA(), CreateVaapiVEA(), CreateVTVEA()}; | 1063 CreateFakeVEA(), CreateV4L2VEA(), CreateVaapiVEA(), CreateVTVEA()}; |
1071 | 1064 |
1072 DVLOG(1) << "Profile: " << test_stream_->requested_profile | 1065 DVLOG(1) << "Profile: " << test_stream_->requested_profile |
1073 << ", initial bitrate: " << requested_bitrate_; | 1066 << ", initial bitrate: " << requested_bitrate_; |
1074 | 1067 |
1075 for (size_t i = 0; i < arraysize(encoders); ++i) { | 1068 for (size_t i = 0; i < arraysize(encoders); ++i) { |
1076 if (!encoders[i]) | 1069 if (!encoders[i]) |
1077 continue; | 1070 continue; |
1078 encoder_ = std::move(encoders[i]); | 1071 encoder_ = std::move(encoders[i]); |
1079 SetState(CS_ENCODER_SET); | 1072 SetState(CS_ENCODER_SET); |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1237 | 1230 |
1238 const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory()); | 1231 const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory()); |
1239 if (payload_size > 0) { | 1232 if (payload_size > 0) { |
1240 if (stream_validator_) { | 1233 if (stream_validator_) { |
1241 stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size); | 1234 stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size); |
1242 } else { | 1235 } else { |
1243 HandleEncodedFrame(key_frame); | 1236 HandleEncodedFrame(key_frame); |
1244 } | 1237 } |
1245 | 1238 |
1246 if (quality_validator_) { | 1239 if (quality_validator_) { |
1247 scoped_refptr<media::DecoderBuffer> buffer(media::DecoderBuffer::CopyFrom( | 1240 scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom( |
1248 reinterpret_cast<const uint8_t*>(shm->memory()), | 1241 reinterpret_cast<const uint8_t*>(shm->memory()), |
1249 static_cast<int>(payload_size))); | 1242 static_cast<int>(payload_size))); |
1250 quality_validator_->AddDecodeBuffer(buffer); | 1243 quality_validator_->AddDecodeBuffer(buffer); |
1251 // Insert EOS buffer to flush the decoder. | 1244 // Insert EOS buffer to flush the decoder. |
1252 if (num_encoded_frames_ == num_frames_to_encode_) | 1245 if (num_encoded_frames_ == num_frames_to_encode_) |
1253 quality_validator_->Flush(); | 1246 quality_validator_->Flush(); |
1254 } | 1247 } |
1255 | 1248 |
1256 if (save_to_file_) { | 1249 if (save_to_file_) { |
1257 if (IsVP8(test_stream_->requested_profile)) | 1250 if (IsVP8(test_stream_->requested_profile)) |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1294 } | 1287 } |
1295 | 1288 |
1296 void VEAClient::InputNoLongerNeededCallback(int32_t input_id) { | 1289 void VEAClient::InputNoLongerNeededCallback(int32_t input_id) { |
1297 std::set<int32_t>::iterator it = inputs_at_client_.find(input_id); | 1290 std::set<int32_t>::iterator it = inputs_at_client_.find(input_id); |
1298 ASSERT_NE(it, inputs_at_client_.end()); | 1291 ASSERT_NE(it, inputs_at_client_.end()); |
1299 inputs_at_client_.erase(it); | 1292 inputs_at_client_.erase(it); |
1300 if (!g_env->run_at_fps()) | 1293 if (!g_env->run_at_fps()) |
1301 FeedEncoderWithOneInput(); | 1294 FeedEncoderWithOneInput(); |
1302 } | 1295 } |
1303 | 1296 |
1304 scoped_refptr<media::VideoFrame> VEAClient::CreateFrame(off_t position) { | 1297 scoped_refptr<VideoFrame> VEAClient::CreateFrame(off_t position) { |
1305 uint8_t* frame_data_y = | 1298 uint8_t* frame_data_y = |
1306 reinterpret_cast<uint8_t*>(&test_stream_->aligned_in_file_data[0]) + | 1299 reinterpret_cast<uint8_t*>(&test_stream_->aligned_in_file_data[0]) + |
1307 position; | 1300 position; |
1308 uint8_t* frame_data_u = frame_data_y + test_stream_->aligned_plane_size[0]; | 1301 uint8_t* frame_data_u = frame_data_y + test_stream_->aligned_plane_size[0]; |
1309 uint8_t* frame_data_v = frame_data_u + test_stream_->aligned_plane_size[1]; | 1302 uint8_t* frame_data_v = frame_data_u + test_stream_->aligned_plane_size[1]; |
1310 CHECK_GT(current_framerate_, 0U); | 1303 CHECK_GT(current_framerate_, 0U); |
1311 | 1304 |
1312 scoped_refptr<media::VideoFrame> video_frame = | 1305 scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData( |
1313 media::VideoFrame::WrapExternalYuvData( | 1306 kInputFormat, input_coded_size_, gfx::Rect(test_stream_->visible_size), |
1314 kInputFormat, input_coded_size_, | 1307 test_stream_->visible_size, input_coded_size_.width(), |
1315 gfx::Rect(test_stream_->visible_size), test_stream_->visible_size, | 1308 input_coded_size_.width() / 2, input_coded_size_.width() / 2, |
1316 input_coded_size_.width(), input_coded_size_.width() / 2, | 1309 frame_data_y, frame_data_u, frame_data_v, |
1317 input_coded_size_.width() / 2, frame_data_y, frame_data_u, | 1310 base::TimeDelta().FromMilliseconds(next_input_id_ * |
1318 frame_data_v, | 1311 base::Time::kMillisecondsPerSecond / |
1319 base::TimeDelta().FromMilliseconds( | 1312 current_framerate_)); |
1320 next_input_id_ * base::Time::kMillisecondsPerSecond / | |
1321 current_framerate_)); | |
1322 EXPECT_NE(nullptr, video_frame.get()); | 1313 EXPECT_NE(nullptr, video_frame.get()); |
1323 return video_frame; | 1314 return video_frame; |
1324 } | 1315 } |
1325 | 1316 |
1326 scoped_refptr<media::VideoFrame> VEAClient::PrepareInputFrame( | 1317 scoped_refptr<VideoFrame> VEAClient::PrepareInputFrame(off_t position, |
1327 off_t position, | 1318 int32_t* input_id) { |
1328 int32_t* input_id) { | |
1329 CHECK_LE(position + test_stream_->aligned_buffer_size, | 1319 CHECK_LE(position + test_stream_->aligned_buffer_size, |
1330 test_stream_->aligned_in_file_data.size()); | 1320 test_stream_->aligned_in_file_data.size()); |
1331 | 1321 |
1332 scoped_refptr<media::VideoFrame> frame = CreateFrame(position); | 1322 scoped_refptr<VideoFrame> frame = CreateFrame(position); |
1333 EXPECT_TRUE(frame); | 1323 EXPECT_TRUE(frame); |
1334 frame->AddDestructionObserver(media::BindToCurrentLoop( | 1324 frame->AddDestructionObserver( |
1335 base::Bind(&VEAClient::InputNoLongerNeededCallback, | 1325 BindToCurrentLoop(base::Bind(&VEAClient::InputNoLongerNeededCallback, |
1336 base::Unretained(this), next_input_id_))); | 1326 base::Unretained(this), next_input_id_))); |
1337 | 1327 |
1338 LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second); | 1328 LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second); |
1339 | 1329 |
1340 *input_id = next_input_id_++; | 1330 *input_id = next_input_id_++; |
1341 return frame; | 1331 return frame; |
1342 } | 1332 } |
1343 | 1333 |
1344 void VEAClient::OnInputTimer() { | 1334 void VEAClient::OnInputTimer() { |
1345 if (!has_encoder() || state_ != CS_ENCODING) | 1335 if (!has_encoder() || state_ != CS_ENCODING) |
1346 input_timer_.reset(); | 1336 input_timer_.reset(); |
(...skipping 16 matching lines...) Expand all Loading... |
1363 // This is to flush the encoder with additional frames from the beginning | 1353 // This is to flush the encoder with additional frames from the beginning |
1364 // of the stream, or if the stream is shorter that the number of frames | 1354 // of the stream, or if the stream is shorter that the number of frames |
1365 // we require for bitrate tests. | 1355 // we require for bitrate tests. |
1366 pos_in_input_stream_ = 0; | 1356 pos_in_input_stream_ = 0; |
1367 } | 1357 } |
1368 | 1358 |
1369 if (quality_validator_) | 1359 if (quality_validator_) |
1370 quality_validator_->AddOriginalFrame(CreateFrame(pos_in_input_stream_)); | 1360 quality_validator_->AddOriginalFrame(CreateFrame(pos_in_input_stream_)); |
1371 | 1361 |
1372 int32_t input_id; | 1362 int32_t input_id; |
1373 scoped_refptr<media::VideoFrame> video_frame = | 1363 scoped_refptr<VideoFrame> video_frame = |
1374 PrepareInputFrame(pos_in_input_stream_, &input_id); | 1364 PrepareInputFrame(pos_in_input_stream_, &input_id); |
1375 pos_in_input_stream_ += test_stream_->aligned_buffer_size; | 1365 pos_in_input_stream_ += test_stream_->aligned_buffer_size; |
1376 | 1366 |
1377 bool force_keyframe = false; | 1367 bool force_keyframe = false; |
1378 if (keyframe_period_ && input_id % keyframe_period_ == 0) { | 1368 if (keyframe_period_ && input_id % keyframe_period_ == 0) { |
1379 force_keyframe = true; | 1369 force_keyframe = true; |
1380 ++num_keyframes_requested_; | 1370 ++num_keyframes_requested_; |
1381 } | 1371 } |
1382 | 1372 |
1383 if (input_id == 0) { | 1373 if (input_id == 0) { |
(...skipping 10 matching lines...) Expand all Loading... |
1394 void VEAClient::FeedEncoderWithOutput(base::SharedMemory* shm) { | 1384 void VEAClient::FeedEncoderWithOutput(base::SharedMemory* shm) { |
1395 if (!has_encoder()) | 1385 if (!has_encoder()) |
1396 return; | 1386 return; |
1397 | 1387 |
1398 if (state_ != CS_ENCODING) | 1388 if (state_ != CS_ENCODING) |
1399 return; | 1389 return; |
1400 | 1390 |
1401 base::SharedMemoryHandle dup_handle; | 1391 base::SharedMemoryHandle dup_handle; |
1402 LOG_ASSERT(shm->ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle)); | 1392 LOG_ASSERT(shm->ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle)); |
1403 | 1393 |
1404 media::BitstreamBuffer bitstream_buffer(next_output_buffer_id_++, dup_handle, | 1394 BitstreamBuffer bitstream_buffer(next_output_buffer_id_++, dup_handle, |
1405 output_buffer_size_); | 1395 output_buffer_size_); |
1406 LOG_ASSERT(output_buffers_at_client_ | 1396 LOG_ASSERT(output_buffers_at_client_ |
1407 .insert(std::make_pair(bitstream_buffer.id(), shm)) | 1397 .insert(std::make_pair(bitstream_buffer.id(), shm)) |
1408 .second); | 1398 .second); |
1409 encoder_->UseOutputBitstreamBuffer(bitstream_buffer); | 1399 encoder_->UseOutputBitstreamBuffer(bitstream_buffer); |
1410 } | 1400 } |
1411 | 1401 |
1412 bool VEAClient::HandleEncodedFrame(bool keyframe) { | 1402 bool VEAClient::HandleEncodedFrame(bool keyframe) { |
1413 // This would be a bug in the test, which should not ignore false | 1403 // This would be a bug in the test, which should not ignore false |
1414 // return value from this method. | 1404 // return value from this method. |
1415 LOG_ASSERT(num_encoded_frames_ <= num_frames_to_encode_); | 1405 LOG_ASSERT(num_encoded_frames_ <= num_frames_to_encode_); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1514 // All requested keyframes should've been provided. Allow the last requested | 1504 // All requested keyframes should've been provided. Allow the last requested |
1515 // frame to remain undelivered if we haven't reached the maximum frame number | 1505 // frame to remain undelivered if we haven't reached the maximum frame number |
1516 // by which it should have arrived. | 1506 // by which it should have arrived. |
1517 if (num_encoded_frames_ < next_keyframe_at_ + kMaxKeyframeDelay) | 1507 if (num_encoded_frames_ < next_keyframe_at_ + kMaxKeyframeDelay) |
1518 EXPECT_LE(num_keyframes_requested_, 1UL); | 1508 EXPECT_LE(num_keyframes_requested_, 1UL); |
1519 else | 1509 else |
1520 EXPECT_EQ(num_keyframes_requested_, 0UL); | 1510 EXPECT_EQ(num_keyframes_requested_, 0UL); |
1521 } | 1511 } |
1522 | 1512 |
1523 void VEAClient::WriteIvfFileHeader() { | 1513 void VEAClient::WriteIvfFileHeader() { |
1524 media::IvfFileHeader header = {}; | 1514 IvfFileHeader header = {}; |
1525 | 1515 |
1526 memcpy(header.signature, media::kIvfHeaderSignature, | 1516 memcpy(header.signature, kIvfHeaderSignature, sizeof(header.signature)); |
1527 sizeof(header.signature)); | |
1528 header.version = 0; | 1517 header.version = 0; |
1529 header.header_size = sizeof(header); | 1518 header.header_size = sizeof(header); |
1530 header.fourcc = 0x30385056; // VP80 | 1519 header.fourcc = 0x30385056; // VP80 |
1531 header.width = | 1520 header.width = |
1532 base::checked_cast<uint16_t>(test_stream_->visible_size.width()); | 1521 base::checked_cast<uint16_t>(test_stream_->visible_size.width()); |
1533 header.height = | 1522 header.height = |
1534 base::checked_cast<uint16_t>(test_stream_->visible_size.height()); | 1523 base::checked_cast<uint16_t>(test_stream_->visible_size.height()); |
1535 header.timebase_denum = requested_framerate_; | 1524 header.timebase_denum = requested_framerate_; |
1536 header.timebase_num = 1; | 1525 header.timebase_num = 1; |
1537 header.num_frames = num_frames_to_encode_; | 1526 header.num_frames = num_frames_to_encode_; |
1538 header.ByteSwap(); | 1527 header.ByteSwap(); |
1539 | 1528 |
1540 EXPECT_TRUE(base::AppendToFile( | 1529 EXPECT_TRUE(base::AppendToFile( |
1541 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | 1530 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), |
1542 reinterpret_cast<char*>(&header), sizeof(header))); | 1531 reinterpret_cast<char*>(&header), sizeof(header))); |
1543 } | 1532 } |
1544 | 1533 |
1545 void VEAClient::WriteIvfFrameHeader(int frame_index, size_t frame_size) { | 1534 void VEAClient::WriteIvfFrameHeader(int frame_index, size_t frame_size) { |
1546 media::IvfFrameHeader header = {}; | 1535 IvfFrameHeader header = {}; |
1547 | 1536 |
1548 header.frame_size = frame_size; | 1537 header.frame_size = frame_size; |
1549 header.timestamp = frame_index; | 1538 header.timestamp = frame_index; |
1550 header.ByteSwap(); | 1539 header.ByteSwap(); |
1551 EXPECT_TRUE(base::AppendToFile( | 1540 EXPECT_TRUE(base::AppendToFile( |
1552 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | 1541 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), |
1553 reinterpret_cast<char*>(&header), sizeof(header))); | 1542 reinterpret_cast<char*>(&header), sizeof(header))); |
1554 } | 1543 } |
1555 | 1544 |
1556 // Test parameters: | 1545 // Test parameters: |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 | 1777 |
1789 media::g_env = | 1778 media::g_env = |
1790 reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>( | 1779 reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>( |
1791 testing::AddGlobalTestEnvironment( | 1780 testing::AddGlobalTestEnvironment( |
1792 new media::VideoEncodeAcceleratorTestEnvironment( | 1781 new media::VideoEncodeAcceleratorTestEnvironment( |
1793 std::move(test_stream_data), log_path, run_at_fps, | 1782 std::move(test_stream_data), log_path, run_at_fps, |
1794 needs_encode_latency, verify_all_output))); | 1783 needs_encode_latency, verify_all_output))); |
1795 | 1784 |
1796 return RUN_ALL_TESTS(); | 1785 return RUN_ALL_TESTS(); |
1797 } | 1786 } |
OLD | NEW |