Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(30)

Side by Side Diff: media/capture/video/fake_video_capture_device.cc

Issue 2700173002: Add MJPEG support to FakeVideoCaptureDevice (Closed)
Patch Set: Add a const Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/fake_video_capture_device.h" 5 #include "media/capture/video/fake_video_capture_device.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <algorithm> 8 #include <algorithm>
9 #include <utility> 9 #include <utility>
10 10
11 #include "base/bind.h" 11 #include "base/bind.h"
12 #include "base/location.h" 12 #include "base/location.h"
13 #include "base/macros.h" 13 #include "base/macros.h"
14 #include "base/memory/weak_ptr.h" 14 #include "base/memory/weak_ptr.h"
15 #include "base/single_thread_task_runner.h" 15 #include "base/single_thread_task_runner.h"
16 #include "base/strings/stringprintf.h" 16 #include "base/strings/stringprintf.h"
17 #include "base/threading/thread_checker.h" 17 #include "base/threading/thread_checker.h"
18 #include "base/threading/thread_task_runner_handle.h" 18 #include "base/threading/thread_task_runner_handle.h"
19 #include "base/time/time.h" 19 #include "base/time/time.h"
20 #include "media/audio/fake_audio_input_stream.h" 20 #include "media/audio/fake_audio_input_stream.h"
21 #include "media/base/video_frame.h" 21 #include "media/base/video_frame.h"
22 #include "third_party/skia/include/core/SkBitmap.h" 22 #include "third_party/skia/include/core/SkBitmap.h"
23 #include "third_party/skia/include/core/SkCanvas.h" 23 #include "third_party/skia/include/core/SkCanvas.h"
24 #include "third_party/skia/include/core/SkMatrix.h" 24 #include "third_party/skia/include/core/SkMatrix.h"
25 #include "third_party/skia/include/core/SkPaint.h" 25 #include "third_party/skia/include/core/SkPaint.h"
26 #include "ui/gfx/codec/jpeg_codec.h"
26 #include "ui/gfx/codec/png_codec.h" 27 #include "ui/gfx/codec/png_codec.h"
27 28
28 namespace media { 29 namespace media {
29 30
30 namespace { 31 namespace {
31 // Sweep at 600 deg/sec. 32 // Sweep at 600 deg/sec.
32 static const float kPacmanAngularVelocity = 600; 33 static const float kPacmanAngularVelocity = 600;
33 // Beep every 500 ms. 34 // Beep every 500 ms.
34 static const int kBeepInterval = 500; 35 static const int kBeepInterval = 500;
35 // Gradient travels from bottom to top in 5 seconds. 36 // Gradient travels from bottom to top in 5 seconds.
36 static const float kGradientFrequency = 1.f / 5; 37 static const float kGradientFrequency = 1.f / 5;
37 38
38 static const double kMinZoom = 100.0; 39 static const double kMinZoom = 100.0;
39 static const double kMaxZoom = 400.0; 40 static const double kMaxZoom = 400.0;
40 static const double kZoomStep = 1.0; 41 static const double kZoomStep = 1.0;
41 static const double kInitialZoom = 100.0; 42 static const double kInitialZoom = 100.0;
42 43
43 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = { 44 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = {
44 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), 45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480),
45 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; 46 gfx::Size(1280, 720), gfx::Size(1920, 1080)};
46 static const int kSupportedSizesCount = 47 static const int kSupportedSizesCount =
47 arraysize(kSupportedSizesOrderedByIncreasingWidth); 48 arraysize(kSupportedSizesOrderedByIncreasingWidth);
48 49
49 static const VideoPixelFormat kSupportedPixelFormats[] = {
50 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB};
51
52 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { 50 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) {
53 for (const gfx::Size& supported_size : 51 for (const gfx::Size& supported_size :
54 kSupportedSizesOrderedByIncreasingWidth) { 52 kSupportedSizesOrderedByIncreasingWidth) {
55 if (requested_size.width() <= supported_size.width()) 53 if (requested_size.width() <= supported_size.width())
56 return supported_size; 54 return supported_size;
57 } 55 }
58 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1]; 56 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1];
59 } 57 }
60 58
61 // Represents the current state of a FakeVideoCaptureDevice. 59 // Represents the current state of a FakeVideoCaptureDevice.
62 // This is a separate struct because read-access to it is shared with several 60 // This is a separate struct because read-access to it is shared with several
63 // collaborating classes. 61 // collaborating classes.
64 struct FakeDeviceState { 62 struct FakeDeviceState {
65 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) 63 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format)
66 : zoom(zoom), 64 : zoom(zoom),
67 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} 65 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {}
68 66
69 uint32_t zoom; 67 uint32_t zoom;
70 VideoCaptureFormat format; 68 VideoCaptureFormat format;
71 }; 69 };
72 70
73 // Paints a "pacman-like" animated circle including textual information such 71 // Paints a "pacman-like" animated circle including textual information such
74 // as a frame count and timer. 72 // as a frame count and timer.
75 class PacmanFramePainter { 73 class PacmanFramePainter {
76 public: 74 public:
77 // Currently, only the following values are supported for |pixel_format|: 75 enum class Format { I420, SK_N32, Y16 };
78 // PIXEL_FORMAT_I420 76 PacmanFramePainter(Format pixel_format,
79 // PIXEL_FORMAT_Y16
80 // PIXEL_FORMAT_ARGB
81 PacmanFramePainter(VideoPixelFormat pixel_format,
82 const FakeDeviceState* fake_device_state); 77 const FakeDeviceState* fake_device_state);
83 78
84 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer); 79 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer);
85 80
86 private: 81 private:
87 void DrawGradientSquares(base::TimeDelta elapsed_time, 82 void DrawGradientSquares(base::TimeDelta elapsed_time,
88 uint8_t* target_buffer); 83 uint8_t* target_buffer);
89 84
90 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); 85 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer);
91 86
92 const VideoPixelFormat pixel_format_; 87 const Format pixel_format_;
93 const FakeDeviceState* fake_device_state_ = nullptr; 88 const FakeDeviceState* fake_device_state_ = nullptr;
94 }; 89 };
95 90
96 // Paints and delivers frames to a client, which is set via Initialize(). 91 // Paints and delivers frames to a client, which is set via Initialize().
97 class FrameDeliverer { 92 class FrameDeliverer {
98 public: 93 public:
99 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter) 94 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter)
100 : frame_painter_(std::move(frame_painter)) {} 95 : frame_painter_(std::move(frame_painter)) {}
101 virtual ~FrameDeliverer() {} 96 virtual ~FrameDeliverer() {}
102 virtual void Initialize(VideoPixelFormat pixel_format, 97 virtual void Initialize(VideoPixelFormat pixel_format,
103 std::unique_ptr<VideoCaptureDevice::Client> client, 98 std::unique_ptr<VideoCaptureDevice::Client> client,
104 const FakeDeviceState* device_state) = 0; 99 const FakeDeviceState* device_state) {
105 virtual void Uninitialize() = 0; 100 client_ = std::move(client);
101 device_state_ = device_state;
102 }
103 virtual void Uninitialize() {
104 client_.reset();
105 device_state_ = nullptr;
106 }
106 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0; 107 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0;
107 108
108 protected: 109 protected:
109 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) { 110 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) {
110 if (first_ref_time_.is_null()) 111 if (first_ref_time_.is_null())
111 first_ref_time_ = now; 112 first_ref_time_ = now;
112 return now - first_ref_time_; 113 return now - first_ref_time_;
113 } 114 }
114 115
116 PacmanFramePainter* frame_painter() { return frame_painter_.get(); }
117 const FakeDeviceState* device_state() { return device_state_; }
118 VideoCaptureDevice::Client* client() { return client_.get(); }
119
120 private:
115 const std::unique_ptr<PacmanFramePainter> frame_painter_; 121 const std::unique_ptr<PacmanFramePainter> frame_painter_;
116 const FakeDeviceState* device_state_ = nullptr; 122 const FakeDeviceState* device_state_ = nullptr;
117 std::unique_ptr<VideoCaptureDevice::Client> client_; 123 std::unique_ptr<VideoCaptureDevice::Client> client_;
118
119 private:
120 base::TimeTicks first_ref_time_; 124 base::TimeTicks first_ref_time_;
121 }; 125 };
122 126
123 // Delivers frames using its own buffers via OnIncomingCapturedData(). 127 // Delivers frames using its own buffers via OnIncomingCapturedData().
124 class OwnBufferFrameDeliverer : public FrameDeliverer { 128 class OwnBufferFrameDeliverer : public FrameDeliverer {
125 public: 129 public:
126 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 130 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
127 ~OwnBufferFrameDeliverer() override; 131 ~OwnBufferFrameDeliverer() override;
128 132
129 // Implementation of FrameDeliverer 133 // Implementation of FrameDeliverer
130 void Initialize(VideoPixelFormat pixel_format, 134 void Initialize(VideoPixelFormat pixel_format,
131 std::unique_ptr<VideoCaptureDevice::Client> client, 135 std::unique_ptr<VideoCaptureDevice::Client> client,
132 const FakeDeviceState* device_state) override; 136 const FakeDeviceState* device_state) override;
133 void Uninitialize() override; 137 void Uninitialize() override;
134 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 138 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
135 139
136 private: 140 private:
137 std::unique_ptr<uint8_t[]> buffer_; 141 std::unique_ptr<uint8_t[]> buffer_;
138 }; 142 };
139 143
140 // Delivers frames using buffers provided by the client via 144 // Delivers frames using buffers provided by the client via
141 // OnIncomingCapturedBuffer(). 145 // OnIncomingCapturedBuffer().
142 class ClientBufferFrameDeliverer : public FrameDeliverer { 146 class ClientBufferFrameDeliverer : public FrameDeliverer {
143 public: 147 public:
144 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 148 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
145 ~ClientBufferFrameDeliverer() override; 149 ~ClientBufferFrameDeliverer() override;
146 150
147 // Implementation of FrameDeliverer 151 // Implementation of FrameDeliverer
148 void Initialize(VideoPixelFormat pixel_format, 152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
149 std::unique_ptr<VideoCaptureDevice::Client> client, 153 };
150 const FakeDeviceState* device_state) override; 154
155 class JpegEncodingFrameDeliverer : public FrameDeliverer {
156 public:
157 JpegEncodingFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
158 ~JpegEncodingFrameDeliverer() override;
159
160 // Implementation of FrameDeliveryStrategy
151 void Uninitialize() override; 161 void Uninitialize() override;
152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 162 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
163
164 private:
165 std::vector<uint8_t> sk_n32_buffer_;
166 std::vector<unsigned char> jpeg_buffer_;
153 }; 167 };
154 168
155 // Implements the photo functionality of a VideoCaptureDevice 169 // Implements the photo functionality of a VideoCaptureDevice
156 class FakePhotoDevice { 170 class FakePhotoDevice {
157 public: 171 public:
158 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> argb_painter, 172 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter,
159 const FakeDeviceState* fake_device_state); 173 const FakeDeviceState* fake_device_state);
160 ~FakePhotoDevice(); 174 ~FakePhotoDevice();
161 175
162 void GetPhotoCapabilities( 176 void GetPhotoCapabilities(
163 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); 177 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback);
164 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 178 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
165 base::TimeDelta elapsed_time); 179 base::TimeDelta elapsed_time);
166 180
167 private: 181 private:
168 const std::unique_ptr<PacmanFramePainter> argb_painter_; 182 const std::unique_ptr<PacmanFramePainter> painter_;
169 const FakeDeviceState* const fake_device_state_; 183 const FakeDeviceState* const fake_device_state_;
170 }; 184 };
171 185
172 // Implementation of VideoCaptureDevice that generates test frames. This is 186 // Implementation of VideoCaptureDevice that generates test frames. This is
173 // useful for testing the video capture components without having to use real 187 // useful for testing the video capture components without having to use real
174 // devices. The implementation schedules delayed tasks to itself to generate and 188 // devices. The implementation schedules delayed tasks to itself to generate and
175 // deliver frames at the requested rate. 189 // deliver frames at the requested rate.
176 class FakeVideoCaptureDevice : public VideoCaptureDevice { 190 class FakeVideoCaptureDevice : public VideoCaptureDevice {
177 public: 191 public:
178 FakeVideoCaptureDevice( 192 FakeVideoCaptureDevice(
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
217 231
218 // static 232 // static
219 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( 233 void FakeVideoCaptureDeviceMaker::GetSupportedSizes(
220 std::vector<gfx::Size>* supported_sizes) { 234 std::vector<gfx::Size>* supported_sizes) {
221 for (int i = 0; i < kSupportedSizesCount; i++) 235 for (int i = 0; i < kSupportedSizesCount; i++)
222 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]); 236 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]);
223 } 237 }
224 238
225 // static 239 // static
226 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( 240 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance(
227 VideoPixelFormat pixel_format, 241 PixelFormat pixel_format,
228 DeliveryMode delivery_mode, 242 DeliveryMode delivery_mode,
229 float frame_rate) { 243 float frame_rate) {
230 bool pixel_format_supported = false; 244 auto device_state = base::MakeUnique<FakeDeviceState>(
231 for (const auto& supported_pixel_format : kSupportedPixelFormats) { 245 kInitialZoom, frame_rate,
232 if (pixel_format == supported_pixel_format) { 246 static_cast<media::VideoPixelFormat>(pixel_format));
233 pixel_format_supported = true; 247 PacmanFramePainter::Format painter_format;
248 switch (pixel_format) {
249 case PixelFormat::I420:
250 painter_format = PacmanFramePainter::Format::I420;
234 break; 251 break;
235 } 252 case PixelFormat::Y16:
253 painter_format = PacmanFramePainter::Format::Y16;
254 break;
255 case PixelFormat::MJPEG:
256 painter_format = PacmanFramePainter::Format::SK_N32;
257 break;
236 } 258 }
237 if (!pixel_format_supported) { 259 auto video_frame_painter =
238 DLOG(ERROR) << "Requested an unsupported pixel format " 260 base::MakeUnique<PacmanFramePainter>(painter_format, device_state.get());
239 << VideoPixelFormatToString(pixel_format);
240 return nullptr;
241 }
242 261
243 auto device_state =
244 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format);
245 auto video_frame_painter =
246 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get());
247 std::unique_ptr<FrameDeliverer> frame_delivery_strategy; 262 std::unique_ptr<FrameDeliverer> frame_delivery_strategy;
248 switch (delivery_mode) { 263 switch (delivery_mode) {
249 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: 264 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS:
250 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( 265 if (pixel_format == PixelFormat::MJPEG) {
251 std::move(video_frame_painter)); 266 frame_delivery_strategy = base::MakeUnique<JpegEncodingFrameDeliverer>(
267 std::move(video_frame_painter));
268 } else {
269 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>(
270 std::move(video_frame_painter));
271 }
252 break; 272 break;
253 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: 273 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS:
274 if (pixel_format == PixelFormat::MJPEG) {
275 DLOG(ERROR) << "PixelFormat::MJPEG cannot be used in combination with "
276 << "USE_CLIENT_PROVIDED_BUFFERS.";
277 return nullptr;
278 }
254 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( 279 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>(
255 std::move(video_frame_painter)); 280 std::move(video_frame_painter));
256 break; 281 break;
257 } 282 }
258 283
259 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( 284 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>(
260 PIXEL_FORMAT_ARGB, device_state.get()); 285 PacmanFramePainter::Format::SK_N32, device_state.get());
261 auto photo_device = base::MakeUnique<FakePhotoDevice>( 286 auto photo_device = base::MakeUnique<FakePhotoDevice>(
262 std::move(photo_frame_painter), device_state.get()); 287 std::move(photo_frame_painter), device_state.get());
263 288
264 return base::MakeUnique<FakeVideoCaptureDevice>( 289 return base::MakeUnique<FakeVideoCaptureDevice>(
265 std::move(frame_delivery_strategy), std::move(photo_device), 290 std::move(frame_delivery_strategy), std::move(photo_device),
266 std::move(device_state)); 291 std::move(device_state));
267 } 292 }
268 293
269 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, 294 PacmanFramePainter::PacmanFramePainter(Format pixel_format,
270 const FakeDeviceState* fake_device_state) 295 const FakeDeviceState* fake_device_state)
271 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) { 296 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {}
272 DCHECK(pixel_format == PIXEL_FORMAT_I420 ||
273 pixel_format == PIXEL_FORMAT_Y16 || pixel_format == PIXEL_FORMAT_ARGB);
274 }
275 297
276 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, 298 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time,
277 uint8_t* target_buffer) { 299 uint8_t* target_buffer) {
278 DrawPacman(elapsed_time, target_buffer); 300 DrawPacman(elapsed_time, target_buffer);
279 DrawGradientSquares(elapsed_time, target_buffer); 301 DrawGradientSquares(elapsed_time, target_buffer);
280 } 302 }
281 303
282 // Starting from top left, -45 deg gradient. Value at point (row, column) is 304 // Starting from top left, -45 deg gradient. Value at point (row, column) is
283 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where 305 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where
284 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per 306 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per
(...skipping 13 matching lines...) Expand all
298 const float start = 320 const float start =
299 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); 321 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
300 const float color_step = 65535 / static_cast<float>(width + height); 322 const float color_step = 65535 / static_cast<float>(width + height);
301 for (const auto& corner : squares) { 323 for (const auto& corner : squares) {
302 for (int y = corner.y(); y < corner.y() + side; ++y) { 324 for (int y = corner.y(); y < corner.y() + side; ++y) {
303 for (int x = corner.x(); x < corner.x() + side; ++x) { 325 for (int x = corner.x(); x < corner.x() + side; ++x) {
304 const unsigned int value = 326 const unsigned int value =
305 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; 327 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF;
306 size_t offset = (y * width) + x; 328 size_t offset = (y * width) + x;
307 switch (pixel_format_) { 329 switch (pixel_format_) {
308 case PIXEL_FORMAT_Y16: 330 case Format::Y16:
309 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; 331 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF;
310 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; 332 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8;
311 break; 333 break;
312 case PIXEL_FORMAT_ARGB: 334 case Format::SK_N32:
313 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; 335 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8;
314 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; 336 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8;
315 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; 337 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8;
316 break; 338 break;
317 default: 339 case Format::I420:
318 target_buffer[offset] = value >> 8; 340 target_buffer[offset] = value >> 8;
319 break; 341 break;
320 } 342 }
321 } 343 }
322 } 344 }
323 } 345 }
324 } 346 }
325 347
326 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, 348 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
327 uint8_t* target_buffer) { 349 uint8_t* target_buffer) {
328 const int width = fake_device_state_->format.frame_size.width(); 350 const int width = fake_device_state_->format.frame_size.width();
329 const int height = fake_device_state_->format.frame_size.height(); 351 const int height = fake_device_state_->format.frame_size.height();
330 352
331 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. 353 SkColorType colorspace = kAlpha_8_SkColorType;
332 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) 354 switch (pixel_format_) {
333 ? kN32_SkColorType 355 case Format::I420:
334 : kAlpha_8_SkColorType; 356 // Skia doesn't support painting in I420. Instead, paint an 8bpp
335 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use 357 // monochrome image to the beginning of |target_buffer|. This section of
336 // this as high byte values in 16 bit pixels. 358 // |target_buffer| corresponds to the Y-plane of the YUV image. Do not
359 // touch the U or V planes of |target_buffer|. Assuming they have been
360 // initialized to 0, which corresponds to a green color tone, the result
361 // will be an green-ish monochrome frame.
362 colorspace = kAlpha_8_SkColorType;
363 break;
364 case Format::SK_N32:
365 // SkColorType is RGBA on some platforms and BGRA on others.
366 colorspace = kN32_SkColorType;
367 break;
368 case Format::Y16:
369 // Skia doesn't support painting in Y16. Instead, paint an 8bpp monochrome
370 // image to the beginning of |target_buffer|. Later, move the 8bit pixel
371 // values to a position corresponding to the high byte values of 16bit
372 // pixel values (assuming the byte order is little-endian).
373 colorspace = kAlpha_8_SkColorType;
374 break;
375 }
376
337 const SkImageInfo info = 377 const SkImageInfo info =
338 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); 378 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType);
339 SkBitmap bitmap; 379 SkBitmap bitmap;
340 bitmap.setInfo(info); 380 bitmap.setInfo(info);
341 bitmap.setPixels(target_buffer); 381 bitmap.setPixels(target_buffer);
342 SkPaint paint; 382 SkPaint paint;
343 paint.setStyle(SkPaint::kFill_Style); 383 paint.setStyle(SkPaint::kFill_Style);
344 SkCanvas canvas(bitmap); 384 SkCanvas canvas(bitmap);
345 385
346 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; 386 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
347 SkMatrix matrix; 387 SkMatrix matrix;
348 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); 388 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2);
349 canvas.setMatrix(matrix); 389 canvas.setMatrix(matrix);
350 390
351 // Equalize Alpha_8 that has light green background while RGBA has white. 391 // For the SK_N32 case, match the green color tone produced by the
352 if (pixel_format_ == PIXEL_FORMAT_ARGB) { 392 // I420 case.
393 if (pixel_format_ == Format::SK_N32) {
353 const SkRect full_frame = SkRect::MakeWH(width, height); 394 const SkRect full_frame = SkRect::MakeWH(width, height);
354 paint.setARGB(255, 0, 127, 0); 395 paint.setARGB(255, 0, 127, 0);
355 canvas.drawRect(full_frame, paint); 396 canvas.drawRect(full_frame, paint);
397 paint.setColor(SK_ColorGREEN);
356 } 398 }
357 paint.setColor(SK_ColorGREEN);
358 399
359 // Draw a sweeping circle to show an animation. 400 // Draw a sweeping circle to show an animation.
360 const float end_angle = 401 const float end_angle =
361 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); 402 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361);
362 const int radius = std::min(width, height) / 4; 403 const int radius = std::min(width, height) / 4;
363 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, 404 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius,
364 2 * radius, 2 * radius); 405 2 * radius, 2 * radius);
365 canvas.drawArc(rect, 0, end_angle, true, paint); 406 canvas.drawArc(rect, 0, end_angle, true, paint);
366 407
367 // Draw current time. 408 // Draw current time.
368 const int milliseconds = elapsed_time.InMilliseconds() % 1000; 409 const int milliseconds = elapsed_time.InMilliseconds() % 1000;
369 const int seconds = elapsed_time.InSeconds() % 60; 410 const int seconds = elapsed_time.InSeconds() % 60;
370 const int minutes = elapsed_time.InMinutes() % 60; 411 const int minutes = elapsed_time.InMinutes() % 60;
371 const int hours = elapsed_time.InHours(); 412 const int hours = elapsed_time.InHours();
372 const int frame_count = elapsed_time.InMilliseconds() * 413 const int frame_count = elapsed_time.InMilliseconds() *
373 fake_device_state_->format.frame_rate / 1000; 414 fake_device_state_->format.frame_rate / 1000;
374 415
375 const std::string time_string = 416 const std::string time_string =
376 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, 417 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
377 milliseconds, frame_count); 418 milliseconds, frame_count);
378 canvas.scale(3, 3); 419 canvas.scale(3, 3);
379 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); 420 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
380 421
381 if (pixel_format_ == PIXEL_FORMAT_Y16) { 422 if (pixel_format_ == Format::Y16) {
382 // Use 8 bit bitmap rendered to first half of the buffer as high byte values 423 // Use 8 bit bitmap rendered to first half of the buffer as high byte values
383 // for the whole buffer. Low byte values are not important. 424 // for the whole buffer. Low byte values are not important.
384 for (int i = (width * height) - 1; i >= 0; --i) 425 for (int i = (width * height) - 1; i >= 0; --i)
385 target_buffer[i * 2 + 1] = target_buffer[i]; 426 target_buffer[i * 2 + 1] = target_buffer[i];
386 } 427 }
387 } 428 }
388 429
389 FakePhotoDevice::FakePhotoDevice( 430 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter,
390 std::unique_ptr<PacmanFramePainter> argb_painter, 431 const FakeDeviceState* fake_device_state)
391 const FakeDeviceState* fake_device_state) 432 : painter_(std::move(painter)), fake_device_state_(fake_device_state) {}
392 : argb_painter_(std::move(argb_painter)),
393 fake_device_state_(fake_device_state) {}
394 433
395 FakePhotoDevice::~FakePhotoDevice() = default; 434 FakePhotoDevice::~FakePhotoDevice() = default;
396 435
397 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 436 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
398 base::TimeDelta elapsed_time) { 437 base::TimeDelta elapsed_time) {
399 // Create a PNG-encoded frame and send it back to |callback|. 438 // Create a PNG-encoded frame and send it back to |callback|.
400 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( 439 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize(
401 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); 440 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size);
402 argb_painter_->PaintFrame(elapsed_time, buffer.get()); 441 std::unique_ptr<uint8_t[]> buffer(new uint8_t[required_sk_n32_buffer_size]);
442 memset(buffer.get(), 0, required_sk_n32_buffer_size);
443 painter_->PaintFrame(elapsed_time, buffer.get());
403 mojom::BlobPtr blob = mojom::Blob::New(); 444 mojom::BlobPtr blob = mojom::Blob::New();
404 const bool result = 445 const gfx::PNGCodec::ColorFormat encoding_source_format =
405 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, 446 (kN32_SkColorType == kRGBA_8888_SkColorType) ? gfx::PNGCodec::FORMAT_RGBA
406 fake_device_state_->format.frame_size, 447 : gfx::PNGCodec::FORMAT_BGRA;
407 fake_device_state_->format.frame_size.width() * 4, 448 const bool result = gfx::PNGCodec::Encode(
408 true /* discard_transparency */, 449 buffer.get(), encoding_source_format,
409 std::vector<gfx::PNGCodec::Comment>(), &blob->data); 450 fake_device_state_->format.frame_size,
451 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
452 fake_device_state_->format.frame_size.width()),
453 true /* discard_transparency */, std::vector<gfx::PNGCodec::Comment>(),
454 &blob->data);
410 DCHECK(result); 455 DCHECK(result);
411 456
412 blob->mime_type = "image/png"; 457 blob->mime_type = "image/png";
413 callback.Run(std::move(blob)); 458 callback.Run(std::move(blob));
414 } 459 }
415 460
416 FakeVideoCaptureDevice::FakeVideoCaptureDevice( 461 FakeVideoCaptureDevice::FakeVideoCaptureDevice(
417 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, 462 std::unique_ptr<FrameDeliverer> frame_delivery_strategy,
418 std::unique_ptr<FakePhotoDevice> photo_device, 463 std::unique_ptr<FakePhotoDevice> photo_device,
419 std::unique_ptr<FakeDeviceState> device_state) 464 std::unique_ptr<FakeDeviceState> device_state)
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
517 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( 562 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer(
518 std::unique_ptr<PacmanFramePainter> frame_painter) 563 std::unique_ptr<PacmanFramePainter> frame_painter)
519 : FrameDeliverer(std::move(frame_painter)) {} 564 : FrameDeliverer(std::move(frame_painter)) {}
520 565
521 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default; 566 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default;
522 567
523 void OwnBufferFrameDeliverer::Initialize( 568 void OwnBufferFrameDeliverer::Initialize(
524 VideoPixelFormat pixel_format, 569 VideoPixelFormat pixel_format,
525 std::unique_ptr<VideoCaptureDevice::Client> client, 570 std::unique_ptr<VideoCaptureDevice::Client> client,
526 const FakeDeviceState* device_state) { 571 const FakeDeviceState* device_state) {
527 client_ = std::move(client); 572 FrameDeliverer::Initialize(pixel_format, std::move(client), device_state);
528 device_state_ = device_state;
529 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( 573 buffer_.reset(new uint8_t[VideoFrame::AllocationSize(
530 pixel_format, device_state_->format.frame_size)]); 574 pixel_format, device_state->format.frame_size)]);
531 } 575 }
532 576
533 void OwnBufferFrameDeliverer::Uninitialize() { 577 void OwnBufferFrameDeliverer::Uninitialize() {
534 client_.reset(); 578 FrameDeliverer::Uninitialize();
535 device_state_ = nullptr;
536 buffer_.reset(); 579 buffer_.reset();
537 } 580 }
538 581
539 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( 582 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame(
540 base::TimeDelta timestamp_to_paint) { 583 base::TimeDelta timestamp_to_paint) {
541 if (!client_) 584 if (!client())
542 return; 585 return;
543 const size_t frame_size = device_state_->format.ImageAllocationSize(); 586 const size_t frame_size = device_state()->format.ImageAllocationSize();
544 memset(buffer_.get(), 0, frame_size); 587 memset(buffer_.get(), 0, frame_size);
545 frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get()); 588 frame_painter()->PaintFrame(timestamp_to_paint, buffer_.get());
546 base::TimeTicks now = base::TimeTicks::Now(); 589 base::TimeTicks now = base::TimeTicks::Now();
547 client_->OnIncomingCapturedData(buffer_.get(), frame_size, 590 client()->OnIncomingCapturedData(buffer_.get(), frame_size,
548 device_state_->format, 0 /* rotation */, now, 591 device_state()->format, 0 /* rotation */,
549 CalculateTimeSinceFirstInvocation(now)); 592 now, CalculateTimeSinceFirstInvocation(now));
550 } 593 }
551 594
552 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer( 595 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer(
553 std::unique_ptr<PacmanFramePainter> frame_painter) 596 std::unique_ptr<PacmanFramePainter> frame_painter)
554 : FrameDeliverer(std::move(frame_painter)) {} 597 : FrameDeliverer(std::move(frame_painter)) {}
555 598
556 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default; 599 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default;
557 600
558 void ClientBufferFrameDeliverer::Initialize(
559 VideoPixelFormat,
560 std::unique_ptr<VideoCaptureDevice::Client> client,
561 const FakeDeviceState* device_state) {
562 client_ = std::move(client);
563 device_state_ = device_state;
564 }
565
566 void ClientBufferFrameDeliverer::Uninitialize() {
567 client_.reset();
568 device_state_ = nullptr;
569 }
570
571 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame( 601 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame(
572 base::TimeDelta timestamp_to_paint) { 602 base::TimeDelta timestamp_to_paint) {
573 if (client_ == nullptr) 603 if (!client())
574 return; 604 return;
575 605
576 const int arbitrary_frame_feedback_id = 0; 606 const int arbitrary_frame_feedback_id = 0;
577 auto capture_buffer = client_->ReserveOutputBuffer( 607 auto capture_buffer = client()->ReserveOutputBuffer(
578 device_state_->format.frame_size, device_state_->format.pixel_format, 608 device_state()->format.frame_size, device_state()->format.pixel_format,
579 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); 609 device_state()->format.pixel_storage, arbitrary_frame_feedback_id);
580 DLOG_IF(ERROR, !capture_buffer.is_valid()) 610 DLOG_IF(ERROR, !capture_buffer.is_valid())
581 << "Couldn't allocate Capture Buffer"; 611 << "Couldn't allocate Capture Buffer";
582 auto buffer_access = 612 auto buffer_access =
583 capture_buffer.handle_provider->GetHandleForInProcessAccess(); 613 capture_buffer.handle_provider->GetHandleForInProcessAccess();
584 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; 614 DCHECK(buffer_access->data()) << "Buffer has NO backing memory";
585 615
586 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state_->format.pixel_storage); 616 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state()->format.pixel_storage);
587 617
588 uint8_t* data_ptr = buffer_access->data(); 618 uint8_t* data_ptr = buffer_access->data();
589 memset(data_ptr, 0, buffer_access->mapped_size()); 619 memset(data_ptr, 0, buffer_access->mapped_size());
590 frame_painter_->PaintFrame(timestamp_to_paint, data_ptr); 620 frame_painter()->PaintFrame(timestamp_to_paint, data_ptr);
591 621
592 base::TimeTicks now = base::TimeTicks::Now(); 622 base::TimeTicks now = base::TimeTicks::Now();
593 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), 623 client()->OnIncomingCapturedBuffer(std::move(capture_buffer),
594 device_state_->format, now, 624 device_state()->format, now,
595 CalculateTimeSinceFirstInvocation(now)); 625 CalculateTimeSinceFirstInvocation(now));
626 }
627
628 JpegEncodingFrameDeliverer::JpegEncodingFrameDeliverer(
629 std::unique_ptr<PacmanFramePainter> frame_painter)
630 : FrameDeliverer(std::move(frame_painter)) {}
631
632 JpegEncodingFrameDeliverer::~JpegEncodingFrameDeliverer() = default;
633
634 void JpegEncodingFrameDeliverer::Uninitialize() {
635 FrameDeliverer::Uninitialize();
636 sk_n32_buffer_.clear();
637 jpeg_buffer_.clear();
638 }
639
640 void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame(
641 base::TimeDelta timestamp_to_paint) {
642 if (!client())
643 return;
644
645 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize(
646 PIXEL_FORMAT_ARGB, device_state()->format.frame_size);
647 sk_n32_buffer_.resize(required_sk_n32_buffer_size);
648 memset(&sk_n32_buffer_[0], 0, required_sk_n32_buffer_size);
649
650 frame_painter()->PaintFrame(timestamp_to_paint, &sk_n32_buffer_[0]);
651
652 static const int kQuality = 75;
653 const gfx::JPEGCodec::ColorFormat encoding_source_format =
654 (kN32_SkColorType == kRGBA_8888_SkColorType)
655 ? gfx::JPEGCodec::FORMAT_RGBA
656 : gfx::JPEGCodec::FORMAT_BGRA;
657 bool success = gfx::JPEGCodec::Encode(
658 &sk_n32_buffer_[0], encoding_source_format,
659 device_state()->format.frame_size.width(),
660 device_state()->format.frame_size.height(),
661 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
662 device_state()->format.frame_size.width()),
663 kQuality, &jpeg_buffer_);
664 if (!success) {
665 DLOG(ERROR) << "Jpeg encoding failed";
666 return;
667 }
668
669 const size_t frame_size = jpeg_buffer_.size();
670 base::TimeTicks now = base::TimeTicks::Now();
671 client()->OnIncomingCapturedData(&jpeg_buffer_[0], frame_size,
672 device_state()->format, 0 /* rotation */,
673 now, CalculateTimeSinceFirstInvocation(now));
596 } 674 }
597 675
598 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( 676 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
599 base::TimeTicks expected_execution_time) { 677 base::TimeTicks expected_execution_time) {
600 DCHECK(thread_checker_.CalledOnValidThread()); 678 DCHECK(thread_checker_.CalledOnValidThread());
601 const base::TimeDelta beep_interval = 679 const base::TimeDelta beep_interval =
602 base::TimeDelta::FromMilliseconds(kBeepInterval); 680 base::TimeDelta::FromMilliseconds(kBeepInterval);
603 const base::TimeDelta frame_interval = 681 const base::TimeDelta frame_interval =
604 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); 682 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
605 beep_time_ += frame_interval; 683 beep_time_ += frame_interval;
606 elapsed_time_ += frame_interval; 684 elapsed_time_ += frame_interval;
607 685
608 // Generate a synchronized beep twice per second. 686 // Generate a synchronized beep twice per second.
609 if (beep_time_ >= beep_interval) { 687 if (beep_time_ >= beep_interval) {
610 FakeAudioInputStream::BeepOnce(); 688 FakeAudioInputStream::BeepOnce();
611 beep_time_ -= beep_interval; 689 beep_time_ -= beep_interval;
612 } 690 }
613 691
614 // Reschedule next CaptureTask. 692 // Reschedule next CaptureTask.
615 const base::TimeTicks current_time = base::TimeTicks::Now(); 693 const base::TimeTicks current_time = base::TimeTicks::Now();
616 // Don't accumulate any debt if we are lagging behind - just post the next 694 // Don't accumulate any debt if we are lagging behind - just post the next
617 // frame immediately and continue as normal. 695 // frame immediately and continue as normal.
618 const base::TimeTicks next_execution_time = 696 const base::TimeTicks next_execution_time =
619 std::max(current_time, expected_execution_time + frame_interval); 697 std::max(current_time, expected_execution_time + frame_interval);
620 const base::TimeDelta delay = next_execution_time - current_time; 698 const base::TimeDelta delay = next_execution_time - current_time;
621 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( 699 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
622 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, 700 FROM_HERE,
623 weak_factory_.GetWeakPtr(), next_execution_time, 701 base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue,
624 current_session_id_), 702 weak_factory_.GetWeakPtr(), next_execution_time,
703 current_session_id_),
625 delay); 704 delay);
626 } 705 }
627 706
628 void FakeVideoCaptureDevice::OnNextFrameDue( 707 void FakeVideoCaptureDevice::OnNextFrameDue(
629 base::TimeTicks expected_execution_time, 708 base::TimeTicks expected_execution_time,
630 int session_id) { 709 int session_id) {
631 DCHECK(thread_checker_.CalledOnValidThread()); 710 DCHECK(thread_checker_.CalledOnValidThread());
632 if (session_id != current_session_id_) 711 if (session_id != current_session_id_)
633 return; 712 return;
634 713
635 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_); 714 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_);
636 BeepAndScheduleNextCapture(expected_execution_time); 715 BeepAndScheduleNextCapture(expected_execution_time);
637 } 716 }
638 717
639 } // namespace media 718 } // namespace media
OLDNEW
« no previous file with comments | « media/capture/video/fake_video_capture_device.h ('k') | media/capture/video/fake_video_capture_device_factory.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698