Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(688)

Side by Side Diff: media/capture/video/fake_video_capture_device.cc

Issue 2721633006: Reland Add MJPEG support to FakeVideoCaptureDevice (Closed)
Patch Set: Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/fake_video_capture_device.h" 5 #include "media/capture/video/fake_video_capture_device.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <algorithm> 8 #include <algorithm>
9 #include <utility> 9 #include <utility>
10 10
11 #include "base/bind.h" 11 #include "base/bind.h"
12 #include "base/location.h" 12 #include "base/location.h"
13 #include "base/macros.h" 13 #include "base/macros.h"
14 #include "base/memory/weak_ptr.h" 14 #include "base/memory/weak_ptr.h"
15 #include "base/single_thread_task_runner.h" 15 #include "base/single_thread_task_runner.h"
16 #include "base/strings/stringprintf.h" 16 #include "base/strings/stringprintf.h"
17 #include "base/threading/thread_checker.h" 17 #include "base/threading/thread_checker.h"
18 #include "base/threading/thread_task_runner_handle.h" 18 #include "base/threading/thread_task_runner_handle.h"
19 #include "base/time/time.h" 19 #include "base/time/time.h"
20 #include "media/audio/fake_audio_input_stream.h" 20 #include "media/audio/fake_audio_input_stream.h"
21 #include "media/base/video_frame.h" 21 #include "media/base/video_frame.h"
22 #include "third_party/skia/include/core/SkBitmap.h" 22 #include "third_party/skia/include/core/SkBitmap.h"
23 #include "third_party/skia/include/core/SkCanvas.h" 23 #include "third_party/skia/include/core/SkCanvas.h"
24 #include "third_party/skia/include/core/SkMatrix.h" 24 #include "third_party/skia/include/core/SkMatrix.h"
25 #include "third_party/skia/include/core/SkPaint.h" 25 #include "third_party/skia/include/core/SkPaint.h"
26 #include "ui/gfx/codec/jpeg_codec.h"
26 #include "ui/gfx/codec/png_codec.h" 27 #include "ui/gfx/codec/png_codec.h"
27 28
28 namespace media { 29 namespace media {
29 30
30 namespace { 31 namespace {
31 // Sweep at 600 deg/sec. 32 // Sweep at 600 deg/sec.
32 static const float kPacmanAngularVelocity = 600; 33 static const float kPacmanAngularVelocity = 600;
33 // Beep every 500 ms. 34 // Beep every 500 ms.
34 static const int kBeepInterval = 500; 35 static const int kBeepInterval = 500;
35 // Gradient travels from bottom to top in 5 seconds. 36 // Gradient travels from bottom to top in 5 seconds.
36 static const float kGradientFrequency = 1.f / 5; 37 static const float kGradientFrequency = 1.f / 5;
37 38
38 static const double kMinZoom = 100.0; 39 static const double kMinZoom = 100.0;
39 static const double kMaxZoom = 400.0; 40 static const double kMaxZoom = 400.0;
40 static const double kZoomStep = 1.0; 41 static const double kZoomStep = 1.0;
41 static const double kInitialZoom = 100.0; 42 static const double kInitialZoom = 100.0;
42 43
43 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = { 44 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = {
44 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), 45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480),
45 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; 46 gfx::Size(1280, 720), gfx::Size(1920, 1080)};
46 static const int kSupportedSizesCount = 47 static const int kSupportedSizesCount =
47 arraysize(kSupportedSizesOrderedByIncreasingWidth); 48 arraysize(kSupportedSizesOrderedByIncreasingWidth);
48 49
49 static const VideoPixelFormat kSupportedPixelFormats[] = {
50 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB};
51
52 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { 50 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) {
53 for (const gfx::Size& supported_size : 51 for (const gfx::Size& supported_size :
54 kSupportedSizesOrderedByIncreasingWidth) { 52 kSupportedSizesOrderedByIncreasingWidth) {
55 if (requested_size.width() <= supported_size.width()) 53 if (requested_size.width() <= supported_size.width())
56 return supported_size; 54 return supported_size;
57 } 55 }
58 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1]; 56 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1];
59 } 57 }
60 58
61 // Represents the current state of a FakeVideoCaptureDevice. 59 // Represents the current state of a FakeVideoCaptureDevice.
62 // This is a separate struct because read-access to it is shared with several 60 // This is a separate struct because read-access to it is shared with several
63 // collaborating classes. 61 // collaborating classes.
64 struct FakeDeviceState { 62 struct FakeDeviceState {
65 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) 63 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format)
66 : zoom(zoom), 64 : zoom(zoom),
67 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} 65 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {}
68 66
69 uint32_t zoom; 67 uint32_t zoom;
70 VideoCaptureFormat format; 68 VideoCaptureFormat format;
71 }; 69 };
72 70
73 // Paints a "pacman-like" animated circle including textual information such 71 // Paints a "pacman-like" animated circle including textual information such
74 // as a frame count and timer. 72 // as a frame count and timer.
75 class PacmanFramePainter { 73 class PacmanFramePainter {
76 public: 74 public:
77 // Currently, only the following values are supported for |pixel_format|: 75 enum class Format { I420, SK_N32, Y16 };
78 // PIXEL_FORMAT_I420 76 PacmanFramePainter(Format pixel_format,
79 // PIXEL_FORMAT_Y16
80 // PIXEL_FORMAT_ARGB
81 PacmanFramePainter(VideoPixelFormat pixel_format,
82 const FakeDeviceState* fake_device_state); 77 const FakeDeviceState* fake_device_state);
83 78
84 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer); 79 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer);
85 80
86 private: 81 private:
87 void DrawGradientSquares(base::TimeDelta elapsed_time, 82 void DrawGradientSquares(base::TimeDelta elapsed_time,
88 uint8_t* target_buffer); 83 uint8_t* target_buffer);
89 84
90 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); 85 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer);
91 86
92 const VideoPixelFormat pixel_format_; 87 const Format pixel_format_;
93 const FakeDeviceState* fake_device_state_ = nullptr; 88 const FakeDeviceState* fake_device_state_ = nullptr;
94 }; 89 };
95 90
96 // Paints and delivers frames to a client, which is set via Initialize(). 91 // Paints and delivers frames to a client, which is set via Initialize().
97 class FrameDeliverer { 92 class FrameDeliverer {
98 public: 93 public:
99 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter) 94 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter)
100 : frame_painter_(std::move(frame_painter)) {} 95 : frame_painter_(std::move(frame_painter)) {}
101 virtual ~FrameDeliverer() {} 96 virtual ~FrameDeliverer() {}
102 virtual void Initialize(VideoPixelFormat pixel_format, 97 virtual void Initialize(VideoPixelFormat pixel_format,
103 std::unique_ptr<VideoCaptureDevice::Client> client, 98 std::unique_ptr<VideoCaptureDevice::Client> client,
104 const FakeDeviceState* device_state) = 0; 99 const FakeDeviceState* device_state) {
105 virtual void Uninitialize() = 0; 100 client_ = std::move(client);
101 device_state_ = device_state;
102 client_->OnStarted();
103 }
104 virtual void Uninitialize() {
105 client_.reset();
106 device_state_ = nullptr;
107 }
106 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0; 108 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0;
107 109
108 protected: 110 protected:
109 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) { 111 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) {
110 if (first_ref_time_.is_null()) 112 if (first_ref_time_.is_null())
111 first_ref_time_ = now; 113 first_ref_time_ = now;
112 return now - first_ref_time_; 114 return now - first_ref_time_;
113 } 115 }
114 116
117 PacmanFramePainter* frame_painter() { return frame_painter_.get(); }
118 const FakeDeviceState* device_state() { return device_state_; }
119 VideoCaptureDevice::Client* client() { return client_.get(); }
120
121 private:
115 const std::unique_ptr<PacmanFramePainter> frame_painter_; 122 const std::unique_ptr<PacmanFramePainter> frame_painter_;
116 const FakeDeviceState* device_state_ = nullptr; 123 const FakeDeviceState* device_state_ = nullptr;
117 std::unique_ptr<VideoCaptureDevice::Client> client_; 124 std::unique_ptr<VideoCaptureDevice::Client> client_;
118
119 private:
120 base::TimeTicks first_ref_time_; 125 base::TimeTicks first_ref_time_;
121 }; 126 };
122 127
123 // Delivers frames using its own buffers via OnIncomingCapturedData(). 128 // Delivers frames using its own buffers via OnIncomingCapturedData().
124 class OwnBufferFrameDeliverer : public FrameDeliverer { 129 class OwnBufferFrameDeliverer : public FrameDeliverer {
125 public: 130 public:
126 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 131 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
127 ~OwnBufferFrameDeliverer() override; 132 ~OwnBufferFrameDeliverer() override;
128 133
129 // Implementation of FrameDeliverer 134 // Implementation of FrameDeliverer
130 void Initialize(VideoPixelFormat pixel_format, 135 void Initialize(VideoPixelFormat pixel_format,
131 std::unique_ptr<VideoCaptureDevice::Client> client, 136 std::unique_ptr<VideoCaptureDevice::Client> client,
132 const FakeDeviceState* device_state) override; 137 const FakeDeviceState* device_state) override;
133 void Uninitialize() override; 138 void Uninitialize() override;
134 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 139 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
135 140
136 private: 141 private:
137 std::unique_ptr<uint8_t[]> buffer_; 142 std::unique_ptr<uint8_t[]> buffer_;
138 }; 143 };
139 144
140 // Delivers frames using buffers provided by the client via 145 // Delivers frames using buffers provided by the client via
141 // OnIncomingCapturedBuffer(). 146 // OnIncomingCapturedBuffer().
142 class ClientBufferFrameDeliverer : public FrameDeliverer { 147 class ClientBufferFrameDeliverer : public FrameDeliverer {
143 public: 148 public:
144 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 149 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
145 ~ClientBufferFrameDeliverer() override; 150 ~ClientBufferFrameDeliverer() override;
146 151
147 // Implementation of FrameDeliverer 152 // Implementation of FrameDeliverer
148 void Initialize(VideoPixelFormat pixel_format, 153 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
149 std::unique_ptr<VideoCaptureDevice::Client> client, 154 };
150 const FakeDeviceState* device_state) override; 155
156 class JpegEncodingFrameDeliverer : public FrameDeliverer {
157 public:
158 JpegEncodingFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
159 ~JpegEncodingFrameDeliverer() override;
160
161 // Implementation of FrameDeliveryStrategy
151 void Uninitialize() override; 162 void Uninitialize() override;
152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 163 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
164
165 private:
166 std::vector<uint8_t> sk_n32_buffer_;
167 std::vector<unsigned char> jpeg_buffer_;
153 }; 168 };
154 169
155 // Implements the photo functionality of a VideoCaptureDevice 170 // Implements the photo functionality of a VideoCaptureDevice
156 class FakePhotoDevice { 171 class FakePhotoDevice {
157 public: 172 public:
158 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> argb_painter, 173 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter,
159 const FakeDeviceState* fake_device_state); 174 const FakeDeviceState* fake_device_state);
160 ~FakePhotoDevice(); 175 ~FakePhotoDevice();
161 176
162 void GetPhotoCapabilities( 177 void GetPhotoCapabilities(
163 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); 178 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback);
164 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 179 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
165 base::TimeDelta elapsed_time); 180 base::TimeDelta elapsed_time);
166 181
167 private: 182 private:
168 const std::unique_ptr<PacmanFramePainter> argb_painter_; 183 const std::unique_ptr<PacmanFramePainter> painter_;
169 const FakeDeviceState* const fake_device_state_; 184 const FakeDeviceState* const fake_device_state_;
170 }; 185 };
171 186
172 // Implementation of VideoCaptureDevice that generates test frames. This is 187 // Implementation of VideoCaptureDevice that generates test frames. This is
173 // useful for testing the video capture components without having to use real 188 // useful for testing the video capture components without having to use real
174 // devices. The implementation schedules delayed tasks to itself to generate and 189 // devices. The implementation schedules delayed tasks to itself to generate and
175 // deliver frames at the requested rate. 190 // deliver frames at the requested rate.
176 class FakeVideoCaptureDevice : public VideoCaptureDevice { 191 class FakeVideoCaptureDevice : public VideoCaptureDevice {
177 public: 192 public:
178 FakeVideoCaptureDevice( 193 FakeVideoCaptureDevice(
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
217 232
218 // static 233 // static
219 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( 234 void FakeVideoCaptureDeviceMaker::GetSupportedSizes(
220 std::vector<gfx::Size>* supported_sizes) { 235 std::vector<gfx::Size>* supported_sizes) {
221 for (int i = 0; i < kSupportedSizesCount; i++) 236 for (int i = 0; i < kSupportedSizesCount; i++)
222 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]); 237 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]);
223 } 238 }
224 239
225 // static 240 // static
226 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( 241 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance(
227 VideoPixelFormat pixel_format, 242 PixelFormat pixel_format,
228 DeliveryMode delivery_mode, 243 DeliveryMode delivery_mode,
229 float frame_rate) { 244 float frame_rate) {
230 bool pixel_format_supported = false; 245 auto device_state = base::MakeUnique<FakeDeviceState>(
231 for (const auto& supported_pixel_format : kSupportedPixelFormats) { 246 kInitialZoom, frame_rate,
232 if (pixel_format == supported_pixel_format) { 247 static_cast<media::VideoPixelFormat>(pixel_format));
233 pixel_format_supported = true; 248 PacmanFramePainter::Format painter_format;
249 switch (pixel_format) {
250 case PixelFormat::I420:
251 painter_format = PacmanFramePainter::Format::I420;
234 break; 252 break;
235 } 253 case PixelFormat::Y16:
254 painter_format = PacmanFramePainter::Format::Y16;
255 break;
256 case PixelFormat::MJPEG:
257 painter_format = PacmanFramePainter::Format::SK_N32;
258 break;
236 } 259 }
237 if (!pixel_format_supported) { 260 auto video_frame_painter =
238 DLOG(ERROR) << "Requested an unsupported pixel format " 261 base::MakeUnique<PacmanFramePainter>(painter_format, device_state.get());
239 << VideoPixelFormatToString(pixel_format);
240 return nullptr;
241 }
242 262
243 auto device_state =
244 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format);
245 auto video_frame_painter =
246 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get());
247 std::unique_ptr<FrameDeliverer> frame_delivery_strategy; 263 std::unique_ptr<FrameDeliverer> frame_delivery_strategy;
248 switch (delivery_mode) { 264 switch (delivery_mode) {
249 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: 265 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS:
250 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( 266 if (pixel_format == PixelFormat::MJPEG) {
251 std::move(video_frame_painter)); 267 frame_delivery_strategy = base::MakeUnique<JpegEncodingFrameDeliverer>(
268 std::move(video_frame_painter));
269 } else {
270 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>(
271 std::move(video_frame_painter));
272 }
252 break; 273 break;
253 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: 274 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS:
275 if (pixel_format == PixelFormat::MJPEG) {
276 DLOG(ERROR) << "PixelFormat::MJPEG cannot be used in combination with "
277 << "USE_CLIENT_PROVIDED_BUFFERS.";
278 return nullptr;
279 }
254 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( 280 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>(
255 std::move(video_frame_painter)); 281 std::move(video_frame_painter));
256 break; 282 break;
257 } 283 }
258 284
259 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( 285 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>(
260 PIXEL_FORMAT_ARGB, device_state.get()); 286 PacmanFramePainter::Format::SK_N32, device_state.get());
261 auto photo_device = base::MakeUnique<FakePhotoDevice>( 287 auto photo_device = base::MakeUnique<FakePhotoDevice>(
262 std::move(photo_frame_painter), device_state.get()); 288 std::move(photo_frame_painter), device_state.get());
263 289
264 return base::MakeUnique<FakeVideoCaptureDevice>( 290 return base::MakeUnique<FakeVideoCaptureDevice>(
265 std::move(frame_delivery_strategy), std::move(photo_device), 291 std::move(frame_delivery_strategy), std::move(photo_device),
266 std::move(device_state)); 292 std::move(device_state));
267 } 293 }
268 294
269 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, 295 PacmanFramePainter::PacmanFramePainter(Format pixel_format,
270 const FakeDeviceState* fake_device_state) 296 const FakeDeviceState* fake_device_state)
271 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) { 297 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {}
272 DCHECK(pixel_format == PIXEL_FORMAT_I420 ||
273 pixel_format == PIXEL_FORMAT_Y16 || pixel_format == PIXEL_FORMAT_ARGB);
274 }
275 298
276 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, 299 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time,
277 uint8_t* target_buffer) { 300 uint8_t* target_buffer) {
278 DrawPacman(elapsed_time, target_buffer); 301 DrawPacman(elapsed_time, target_buffer);
279 DrawGradientSquares(elapsed_time, target_buffer); 302 DrawGradientSquares(elapsed_time, target_buffer);
280 } 303 }
281 304
282 // Starting from top left, -45 deg gradient. Value at point (row, column) is 305 // Starting from top left, -45 deg gradient. Value at point (row, column) is
283 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where 306 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where
284 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per 307 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per
(...skipping 13 matching lines...) Expand all
298 const float start = 321 const float start =
299 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); 322 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
300 const float color_step = 65535 / static_cast<float>(width + height); 323 const float color_step = 65535 / static_cast<float>(width + height);
301 for (const auto& corner : squares) { 324 for (const auto& corner : squares) {
302 for (int y = corner.y(); y < corner.y() + side; ++y) { 325 for (int y = corner.y(); y < corner.y() + side; ++y) {
303 for (int x = corner.x(); x < corner.x() + side; ++x) { 326 for (int x = corner.x(); x < corner.x() + side; ++x) {
304 const unsigned int value = 327 const unsigned int value =
305 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; 328 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF;
306 size_t offset = (y * width) + x; 329 size_t offset = (y * width) + x;
307 switch (pixel_format_) { 330 switch (pixel_format_) {
308 case PIXEL_FORMAT_Y16: 331 case Format::Y16:
309 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; 332 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF;
310 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; 333 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8;
311 break; 334 break;
312 case PIXEL_FORMAT_ARGB: 335 case Format::SK_N32:
313 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; 336 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8;
314 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; 337 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8;
315 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; 338 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8;
316 break; 339 break;
317 default: 340 case Format::I420:
318 target_buffer[offset] = value >> 8; 341 target_buffer[offset] = value >> 8;
319 break; 342 break;
320 } 343 }
321 } 344 }
322 } 345 }
323 } 346 }
324 } 347 }
325 348
326 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, 349 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
327 uint8_t* target_buffer) { 350 uint8_t* target_buffer) {
328 const int width = fake_device_state_->format.frame_size.width(); 351 const int width = fake_device_state_->format.frame_size.width();
329 const int height = fake_device_state_->format.frame_size.height(); 352 const int height = fake_device_state_->format.frame_size.height();
330 353
331 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. 354 SkColorType colorspace = kAlpha_8_SkColorType;
332 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) 355 switch (pixel_format_) {
333 ? kN32_SkColorType 356 case Format::I420:
334 : kAlpha_8_SkColorType; 357 // Skia doesn't support painting in I420. Instead, paint an 8bpp
335 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use 358 // monochrome image to the beginning of |target_buffer|. This section of
336 // this as high byte values in 16 bit pixels. 359 // |target_buffer| corresponds to the Y-plane of the YUV image. Do not
360 // touch the U or V planes of |target_buffer|. Assuming they have been
361 // initialized to 0, which corresponds to a green color tone, the result
362 // will be an green-ish monochrome frame.
363 colorspace = kAlpha_8_SkColorType;
364 break;
365 case Format::SK_N32:
366 // SkColorType is RGBA on some platforms and BGRA on others.
367 colorspace = kN32_SkColorType;
368 break;
369 case Format::Y16:
370 // Skia doesn't support painting in Y16. Instead, paint an 8bpp monochrome
371 // image to the beginning of |target_buffer|. Later, move the 8bit pixel
372 // values to a position corresponding to the high byte values of 16bit
373 // pixel values (assuming the byte order is little-endian).
374 colorspace = kAlpha_8_SkColorType;
375 break;
376 }
377
337 const SkImageInfo info = 378 const SkImageInfo info =
338 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); 379 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType);
339 SkBitmap bitmap; 380 SkBitmap bitmap;
340 bitmap.setInfo(info); 381 bitmap.setInfo(info);
341 bitmap.setPixels(target_buffer); 382 bitmap.setPixels(target_buffer);
342 SkPaint paint; 383 SkPaint paint;
343 paint.setStyle(SkPaint::kFill_Style); 384 paint.setStyle(SkPaint::kFill_Style);
344 SkCanvas canvas(bitmap); 385 SkCanvas canvas(bitmap);
345 386
346 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; 387 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
347 SkMatrix matrix; 388 SkMatrix matrix;
348 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); 389 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2);
349 canvas.setMatrix(matrix); 390 canvas.setMatrix(matrix);
350 391
351 // Equalize Alpha_8 that has light green background while RGBA has white. 392 // For the SK_N32 case, match the green color tone produced by the
352 if (pixel_format_ == PIXEL_FORMAT_ARGB) { 393 // I420 case.
394 if (pixel_format_ == Format::SK_N32) {
353 const SkRect full_frame = SkRect::MakeWH(width, height); 395 const SkRect full_frame = SkRect::MakeWH(width, height);
354 paint.setARGB(255, 0, 127, 0); 396 paint.setARGB(255, 0, 127, 0);
355 canvas.drawRect(full_frame, paint); 397 canvas.drawRect(full_frame, paint);
398 paint.setColor(SK_ColorGREEN);
356 } 399 }
357 paint.setColor(SK_ColorGREEN);
358 400
359 // Draw a sweeping circle to show an animation. 401 // Draw a sweeping circle to show an animation.
360 const float end_angle = 402 const float end_angle =
361 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); 403 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361);
362 const int radius = std::min(width, height) / 4; 404 const int radius = std::min(width, height) / 4;
363 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, 405 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius,
364 2 * radius, 2 * radius); 406 2 * radius, 2 * radius);
365 canvas.drawArc(rect, 0, end_angle, true, paint); 407 canvas.drawArc(rect, 0, end_angle, true, paint);
366 408
367 // Draw current time. 409 // Draw current time.
368 const int milliseconds = elapsed_time.InMilliseconds() % 1000; 410 const int milliseconds = elapsed_time.InMilliseconds() % 1000;
369 const int seconds = elapsed_time.InSeconds() % 60; 411 const int seconds = elapsed_time.InSeconds() % 60;
370 const int minutes = elapsed_time.InMinutes() % 60; 412 const int minutes = elapsed_time.InMinutes() % 60;
371 const int hours = elapsed_time.InHours(); 413 const int hours = elapsed_time.InHours();
372 const int frame_count = elapsed_time.InMilliseconds() * 414 const int frame_count = elapsed_time.InMilliseconds() *
373 fake_device_state_->format.frame_rate / 1000; 415 fake_device_state_->format.frame_rate / 1000;
374 416
375 const std::string time_string = 417 const std::string time_string =
376 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, 418 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
377 milliseconds, frame_count); 419 milliseconds, frame_count);
378 canvas.scale(3, 3); 420 canvas.scale(3, 3);
379 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); 421 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
380 422
381 if (pixel_format_ == PIXEL_FORMAT_Y16) { 423 if (pixel_format_ == Format::Y16) {
382 // Use 8 bit bitmap rendered to first half of the buffer as high byte values 424 // Use 8 bit bitmap rendered to first half of the buffer as high byte values
383 // for the whole buffer. Low byte values are not important. 425 // for the whole buffer. Low byte values are not important.
384 for (int i = (width * height) - 1; i >= 0; --i) 426 for (int i = (width * height) - 1; i >= 0; --i)
385 target_buffer[i * 2 + 1] = target_buffer[i]; 427 target_buffer[i * 2 + 1] = target_buffer[i];
386 } 428 }
387 } 429 }
388 430
389 FakePhotoDevice::FakePhotoDevice( 431 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter,
390 std::unique_ptr<PacmanFramePainter> argb_painter, 432 const FakeDeviceState* fake_device_state)
391 const FakeDeviceState* fake_device_state) 433 : painter_(std::move(painter)), fake_device_state_(fake_device_state) {}
392 : argb_painter_(std::move(argb_painter)),
393 fake_device_state_(fake_device_state) {}
394 434
395 FakePhotoDevice::~FakePhotoDevice() = default; 435 FakePhotoDevice::~FakePhotoDevice() = default;
396 436
397 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 437 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
398 base::TimeDelta elapsed_time) { 438 base::TimeDelta elapsed_time) {
399 // Create a PNG-encoded frame and send it back to |callback|. 439 // Create a PNG-encoded frame and send it back to |callback|.
400 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( 440 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize(
401 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); 441 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size);
402 argb_painter_->PaintFrame(elapsed_time, buffer.get()); 442 std::unique_ptr<uint8_t[]> buffer(new uint8_t[required_sk_n32_buffer_size]);
443 memset(buffer.get(), 0, required_sk_n32_buffer_size);
444 painter_->PaintFrame(elapsed_time, buffer.get());
403 mojom::BlobPtr blob = mojom::Blob::New(); 445 mojom::BlobPtr blob = mojom::Blob::New();
404 const bool result = 446 const gfx::PNGCodec::ColorFormat encoding_source_format =
405 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, 447 (kN32_SkColorType == kRGBA_8888_SkColorType) ? gfx::PNGCodec::FORMAT_RGBA
406 fake_device_state_->format.frame_size, 448 : gfx::PNGCodec::FORMAT_BGRA;
407 fake_device_state_->format.frame_size.width() * 4, 449 const bool result = gfx::PNGCodec::Encode(
408 true /* discard_transparency */, 450 buffer.get(), encoding_source_format,
409 std::vector<gfx::PNGCodec::Comment>(), &blob->data); 451 fake_device_state_->format.frame_size,
452 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
453 fake_device_state_->format.frame_size.width()),
454 true /* discard_transparency */, std::vector<gfx::PNGCodec::Comment>(),
455 &blob->data);
410 DCHECK(result); 456 DCHECK(result);
411 457
412 blob->mime_type = "image/png"; 458 blob->mime_type = "image/png";
413 callback.Run(std::move(blob)); 459 callback.Run(std::move(blob));
414 } 460 }
415 461
416 FakeVideoCaptureDevice::FakeVideoCaptureDevice( 462 FakeVideoCaptureDevice::FakeVideoCaptureDevice(
417 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, 463 std::unique_ptr<FrameDeliverer> frame_delivery_strategy,
418 std::unique_ptr<FakePhotoDevice> photo_device, 464 std::unique_ptr<FakePhotoDevice> photo_device,
419 std::unique_ptr<FakeDeviceState> device_state) 465 std::unique_ptr<FakeDeviceState> device_state)
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( 563 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer(
518 std::unique_ptr<PacmanFramePainter> frame_painter) 564 std::unique_ptr<PacmanFramePainter> frame_painter)
519 : FrameDeliverer(std::move(frame_painter)) {} 565 : FrameDeliverer(std::move(frame_painter)) {}
520 566
521 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default; 567 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default;
522 568
523 void OwnBufferFrameDeliverer::Initialize( 569 void OwnBufferFrameDeliverer::Initialize(
524 VideoPixelFormat pixel_format, 570 VideoPixelFormat pixel_format,
525 std::unique_ptr<VideoCaptureDevice::Client> client, 571 std::unique_ptr<VideoCaptureDevice::Client> client,
526 const FakeDeviceState* device_state) { 572 const FakeDeviceState* device_state) {
527 client_ = std::move(client); 573 FrameDeliverer::Initialize(pixel_format, std::move(client), device_state);
528 device_state_ = device_state;
529 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( 574 buffer_.reset(new uint8_t[VideoFrame::AllocationSize(
530 pixel_format, device_state_->format.frame_size)]); 575 pixel_format, device_state->format.frame_size)]);
531 client_->OnStarted();
532 } 576 }
533 577
534 void OwnBufferFrameDeliverer::Uninitialize() { 578 void OwnBufferFrameDeliverer::Uninitialize() {
535 client_.reset(); 579 FrameDeliverer::Uninitialize();
536 device_state_ = nullptr;
537 buffer_.reset(); 580 buffer_.reset();
538 } 581 }
539 582
540 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( 583 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame(
541 base::TimeDelta timestamp_to_paint) { 584 base::TimeDelta timestamp_to_paint) {
542 if (!client_) 585 if (!client())
543 return; 586 return;
544 const size_t frame_size = device_state_->format.ImageAllocationSize(); 587 const size_t frame_size = device_state()->format.ImageAllocationSize();
545 memset(buffer_.get(), 0, frame_size); 588 memset(buffer_.get(), 0, frame_size);
546 frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get()); 589 frame_painter()->PaintFrame(timestamp_to_paint, buffer_.get());
547 base::TimeTicks now = base::TimeTicks::Now(); 590 base::TimeTicks now = base::TimeTicks::Now();
548 client_->OnIncomingCapturedData(buffer_.get(), frame_size, 591 client()->OnIncomingCapturedData(buffer_.get(), frame_size,
549 device_state_->format, 0 /* rotation */, now, 592 device_state()->format, 0 /* rotation */,
550 CalculateTimeSinceFirstInvocation(now)); 593 now, CalculateTimeSinceFirstInvocation(now));
551 } 594 }
552 595
553 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer( 596 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer(
554 std::unique_ptr<PacmanFramePainter> frame_painter) 597 std::unique_ptr<PacmanFramePainter> frame_painter)
555 : FrameDeliverer(std::move(frame_painter)) {} 598 : FrameDeliverer(std::move(frame_painter)) {}
556 599
557 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default; 600 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default;
558 601
559 void ClientBufferFrameDeliverer::Initialize(
560 VideoPixelFormat,
561 std::unique_ptr<VideoCaptureDevice::Client> client,
562 const FakeDeviceState* device_state) {
563 client_ = std::move(client);
564 device_state_ = device_state;
565 client_->OnStarted();
566 }
567
568 void ClientBufferFrameDeliverer::Uninitialize() {
569 client_.reset();
570 device_state_ = nullptr;
571 }
572
573 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame( 602 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame(
574 base::TimeDelta timestamp_to_paint) { 603 base::TimeDelta timestamp_to_paint) {
575 if (client_ == nullptr) 604 if (!client())
576 return; 605 return;
577 606
578 const int arbitrary_frame_feedback_id = 0; 607 const int arbitrary_frame_feedback_id = 0;
579 auto capture_buffer = client_->ReserveOutputBuffer( 608 auto capture_buffer = client()->ReserveOutputBuffer(
580 device_state_->format.frame_size, device_state_->format.pixel_format, 609 device_state()->format.frame_size, device_state()->format.pixel_format,
581 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); 610 device_state()->format.pixel_storage, arbitrary_frame_feedback_id);
582 DLOG_IF(ERROR, !capture_buffer.is_valid()) 611 DLOG_IF(ERROR, !capture_buffer.is_valid())
583 << "Couldn't allocate Capture Buffer"; 612 << "Couldn't allocate Capture Buffer";
584 auto buffer_access = 613 auto buffer_access =
585 capture_buffer.handle_provider->GetHandleForInProcessAccess(); 614 capture_buffer.handle_provider->GetHandleForInProcessAccess();
586 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; 615 DCHECK(buffer_access->data()) << "Buffer has NO backing memory";
587 616
588 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state_->format.pixel_storage); 617 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state()->format.pixel_storage);
589 618
590 uint8_t* data_ptr = buffer_access->data(); 619 uint8_t* data_ptr = buffer_access->data();
591 memset(data_ptr, 0, buffer_access->mapped_size()); 620 memset(data_ptr, 0, buffer_access->mapped_size());
592 frame_painter_->PaintFrame(timestamp_to_paint, data_ptr); 621 frame_painter()->PaintFrame(timestamp_to_paint, data_ptr);
593 622
594 base::TimeTicks now = base::TimeTicks::Now(); 623 base::TimeTicks now = base::TimeTicks::Now();
595 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), 624 client()->OnIncomingCapturedBuffer(std::move(capture_buffer),
596 device_state_->format, now, 625 device_state()->format, now,
597 CalculateTimeSinceFirstInvocation(now)); 626 CalculateTimeSinceFirstInvocation(now));
627 }
628
629 JpegEncodingFrameDeliverer::JpegEncodingFrameDeliverer(
630 std::unique_ptr<PacmanFramePainter> frame_painter)
631 : FrameDeliverer(std::move(frame_painter)) {}
632
633 JpegEncodingFrameDeliverer::~JpegEncodingFrameDeliverer() = default;
634
635 void JpegEncodingFrameDeliverer::Uninitialize() {
636 FrameDeliverer::Uninitialize();
637 sk_n32_buffer_.clear();
638 jpeg_buffer_.clear();
639 }
640
641 void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame(
642 base::TimeDelta timestamp_to_paint) {
643 if (!client())
644 return;
645
646 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize(
647 PIXEL_FORMAT_ARGB, device_state()->format.frame_size);
648 sk_n32_buffer_.resize(required_sk_n32_buffer_size);
649 memset(&sk_n32_buffer_[0], 0, required_sk_n32_buffer_size);
650
651 frame_painter()->PaintFrame(timestamp_to_paint, &sk_n32_buffer_[0]);
652
653 static const int kQuality = 75;
654 const gfx::JPEGCodec::ColorFormat encoding_source_format =
655 (kN32_SkColorType == kRGBA_8888_SkColorType)
656 ? gfx::JPEGCodec::FORMAT_RGBA
657 : gfx::JPEGCodec::FORMAT_BGRA;
658 bool success = gfx::JPEGCodec::Encode(
659 &sk_n32_buffer_[0], encoding_source_format,
660 device_state()->format.frame_size.width(),
661 device_state()->format.frame_size.height(),
662 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
663 device_state()->format.frame_size.width()),
664 kQuality, &jpeg_buffer_);
665 if (!success) {
666 DLOG(ERROR) << "Jpeg encoding failed";
667 return;
668 }
669
670 const size_t frame_size = jpeg_buffer_.size();
671 base::TimeTicks now = base::TimeTicks::Now();
672 client()->OnIncomingCapturedData(&jpeg_buffer_[0], frame_size,
673 device_state()->format, 0 /* rotation */,
674 now, CalculateTimeSinceFirstInvocation(now));
598 } 675 }
599 676
600 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( 677 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
601 base::TimeTicks expected_execution_time) { 678 base::TimeTicks expected_execution_time) {
602 DCHECK(thread_checker_.CalledOnValidThread()); 679 DCHECK(thread_checker_.CalledOnValidThread());
603 const base::TimeDelta beep_interval = 680 const base::TimeDelta beep_interval =
604 base::TimeDelta::FromMilliseconds(kBeepInterval); 681 base::TimeDelta::FromMilliseconds(kBeepInterval);
605 const base::TimeDelta frame_interval = 682 const base::TimeDelta frame_interval =
606 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); 683 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
607 beep_time_ += frame_interval; 684 beep_time_ += frame_interval;
608 elapsed_time_ += frame_interval; 685 elapsed_time_ += frame_interval;
609 686
610 // Generate a synchronized beep twice per second. 687 // Generate a synchronized beep twice per second.
611 if (beep_time_ >= beep_interval) { 688 if (beep_time_ >= beep_interval) {
612 FakeAudioInputStream::BeepOnce(); 689 FakeAudioInputStream::BeepOnce();
613 beep_time_ -= beep_interval; 690 beep_time_ -= beep_interval;
614 } 691 }
615 692
616 // Reschedule next CaptureTask. 693 // Reschedule next CaptureTask.
617 const base::TimeTicks current_time = base::TimeTicks::Now(); 694 const base::TimeTicks current_time = base::TimeTicks::Now();
618 // Don't accumulate any debt if we are lagging behind - just post the next 695 // Don't accumulate any debt if we are lagging behind - just post the next
619 // frame immediately and continue as normal. 696 // frame immediately and continue as normal.
620 const base::TimeTicks next_execution_time = 697 const base::TimeTicks next_execution_time =
621 std::max(current_time, expected_execution_time + frame_interval); 698 std::max(current_time, expected_execution_time + frame_interval);
622 const base::TimeDelta delay = next_execution_time - current_time; 699 const base::TimeDelta delay = next_execution_time - current_time;
623 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( 700 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
624 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, 701 FROM_HERE,
625 weak_factory_.GetWeakPtr(), next_execution_time, 702 base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue,
626 current_session_id_), 703 weak_factory_.GetWeakPtr(), next_execution_time,
704 current_session_id_),
627 delay); 705 delay);
628 } 706 }
629 707
630 void FakeVideoCaptureDevice::OnNextFrameDue( 708 void FakeVideoCaptureDevice::OnNextFrameDue(
631 base::TimeTicks expected_execution_time, 709 base::TimeTicks expected_execution_time,
632 int session_id) { 710 int session_id) {
633 DCHECK(thread_checker_.CalledOnValidThread()); 711 DCHECK(thread_checker_.CalledOnValidThread());
634 if (session_id != current_session_id_) 712 if (session_id != current_session_id_)
635 return; 713 return;
636 714
637 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_); 715 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_);
638 BeepAndScheduleNextCapture(expected_execution_time); 716 BeepAndScheduleNextCapture(expected_execution_time);
639 } 717 }
640 718
641 } // namespace media 719 } // namespace media
OLDNEW
« no previous file with comments | « media/capture/video/fake_video_capture_device.h ('k') | media/capture/video/fake_video_capture_device_factory.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698