OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_controller.h" | 5 #include "content/browser/renderer_host/media/video_capture_controller.h" |
6 | 6 |
7 #include <set> | 7 #include <set> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/debug/trace_event.h" | 10 #include "base/debug/trace_event.h" |
11 #include "base/memory/scoped_ptr.h" | 11 #include "base/memory/scoped_ptr.h" |
12 #include "base/stl_util.h" | 12 #include "base/stl_util.h" |
13 #include "content/browser/renderer_host/media/media_stream_manager.h" | 13 #include "content/browser/renderer_host/media/media_stream_manager.h" |
14 #include "content/browser/renderer_host/media/video_capture_manager.h" | 14 #include "content/browser/renderer_host/media/video_capture_manager.h" |
15 #include "content/public/browser/browser_thread.h" | 15 #include "content/public/browser/browser_thread.h" |
16 #include "media/base/video_frame.h" | 16 #include "media/base/video_frame.h" |
17 #include "media/base/video_util.h" | 17 #include "media/base/video_util.h" |
18 #include "media/base/yuv_convert.h" | 18 #include "media/base/yuv_convert.h" |
19 | 19 |
20 #if !defined(OS_IOS) && !defined(OS_ANDROID) | 20 #if !defined(OS_IOS) && !defined(OS_ANDROID) |
21 #include "third_party/libyuv/include/libyuv.h" | 21 #include "third_party/libyuv/include/libyuv.h" |
22 #endif | 22 #endif |
23 | 23 |
24 namespace { | 24 namespace { |
25 | 25 |
| 26 #if defined(OS_IOS) || defined(OS_ANDROID) |
26 // TODO(wjia): Support stride. | 27 // TODO(wjia): Support stride. |
27 void RotatePackedYV12Frame( | 28 void RotatePackedYV12Frame( |
28 const uint8* src, | 29 const uint8* src, |
29 uint8* dest_yplane, | 30 uint8* dest_yplane, |
30 uint8* dest_uplane, | 31 uint8* dest_uplane, |
31 uint8* dest_vplane, | 32 uint8* dest_vplane, |
32 int width, | 33 int width, |
33 int height, | 34 int height, |
34 int rotation, | 35 int rotation, |
35 bool flip_vert, | 36 bool flip_vert, |
36 bool flip_horiz) { | 37 bool flip_horiz) { |
37 media::RotatePlaneByPixels( | 38 media::RotatePlaneByPixels( |
38 src, dest_yplane, width, height, rotation, flip_vert, flip_horiz); | 39 src, dest_yplane, width, height, rotation, flip_vert, flip_horiz); |
39 int y_size = width * height; | 40 int y_size = width * height; |
40 src += y_size; | 41 src += y_size; |
41 media::RotatePlaneByPixels( | 42 media::RotatePlaneByPixels( |
42 src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz); | 43 src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz); |
43 src += y_size/4; | 44 src += y_size/4; |
44 media::RotatePlaneByPixels( | 45 media::RotatePlaneByPixels( |
45 src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz); | 46 src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz); |
46 } | 47 } |
| 48 #endif // #if defined(OS_IOS) || defined(OS_ANDROID) |
47 | 49 |
48 } // namespace | 50 } // namespace |
49 | 51 |
50 namespace content { | 52 namespace content { |
51 | 53 |
52 // The number of buffers that VideoCaptureBufferPool should allocate. | 54 // The number of buffers that VideoCaptureBufferPool should allocate. |
53 static const int kNoOfBuffers = 3; | 55 static const int kNoOfBuffers = 3; |
54 | 56 |
55 struct VideoCaptureController::ControllerClient { | 57 struct VideoCaptureController::ControllerClient { |
56 ControllerClient( | 58 ControllerClient( |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
251 if (!buffer_pool_.get()) | 253 if (!buffer_pool_.get()) |
252 return NULL; | 254 return NULL; |
253 return buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width, | 255 return buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width, |
254 frame_info_.height), | 256 frame_info_.height), |
255 0); | 257 0); |
256 } | 258 } |
257 | 259 |
258 // Implements VideoCaptureDevice::EventHandler. | 260 // Implements VideoCaptureDevice::EventHandler. |
259 // OnIncomingCapturedFrame is called the thread running the capture device. | 261 // OnIncomingCapturedFrame is called the thread running the capture device. |
260 // I.e.- DirectShow thread on windows and v4l2_thread on Linux. | 262 // I.e.- DirectShow thread on windows and v4l2_thread on Linux. |
| 263 #if !defined(OS_IOS) && !defined(OS_ANDROID) |
261 void VideoCaptureController::OnIncomingCapturedFrame( | 264 void VideoCaptureController::OnIncomingCapturedFrame( |
262 const uint8* data, | 265 const uint8* data, |
263 int length, | 266 int length, |
| 267 base::Time timestamp, |
| 268 int rotation, |
| 269 bool flip_vert, |
| 270 bool flip_horiz) { |
| 271 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); |
| 272 |
| 273 scoped_refptr<media::VideoFrame> dst; |
| 274 { |
| 275 base::AutoLock lock(buffer_pool_lock_); |
| 276 if (!buffer_pool_.get()) |
| 277 return; |
| 278 dst = buffer_pool_->ReserveI420VideoFrame( |
| 279 gfx::Size(frame_info_.width, frame_info_.height), rotation); |
| 280 } |
| 281 |
| 282 if (!dst.get()) |
| 283 return; |
| 284 |
| 285 uint8* yplane = dst->data(media::VideoFrame::kYPlane); |
| 286 uint8* uplane = dst->data(media::VideoFrame::kUPlane); |
| 287 uint8* vplane = dst->data(media::VideoFrame::kVPlane); |
| 288 int yplane_stride = frame_info_.width; |
| 289 int uv_plane_stride = (frame_info_.width + 1) / 2; |
| 290 int crop_x = 0; |
| 291 int crop_y = 0; |
| 292 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
| 293 // Assuming rotation happens first and flips next, we can consolidate both |
| 294 // vertical and horizontal flips together with rotation into two variables: |
| 295 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 |
| 296 // new_vertical_flip = horizontal_flip XOR vertical_flip |
| 297 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; |
| 298 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
| 299 if (new_rotation_angle == 90) |
| 300 rotation_mode = libyuv::kRotate90; |
| 301 else if (new_rotation_angle == 180) |
| 302 rotation_mode = libyuv::kRotate180; |
| 303 else if (new_rotation_angle == 270) |
| 304 rotation_mode = libyuv::kRotate270; |
| 305 |
| 306 switch (frame_info_.color) { |
| 307 case media::VideoCaptureCapability::kColorUnknown: // Color format not set. |
| 308 break; |
| 309 case media::VideoCaptureCapability::kI420: |
| 310 DCHECK(!chopped_width_ && !chopped_height_); |
| 311 origin_colorspace = libyuv::FOURCC_I420; |
| 312 break; |
| 313 case media::VideoCaptureCapability::kYV12: |
| 314 DCHECK(!chopped_width_ && !chopped_height_); |
| 315 origin_colorspace = libyuv::FOURCC_YV12; |
| 316 break; |
| 317 case media::VideoCaptureCapability::kNV21: |
| 318 DCHECK(!chopped_width_ && !chopped_height_); |
| 319 origin_colorspace = libyuv::FOURCC_NV12; |
| 320 break; |
| 321 case media::VideoCaptureCapability::kYUY2: |
| 322 DCHECK(!chopped_width_ && !chopped_height_); |
| 323 origin_colorspace = libyuv::FOURCC_YUY2; |
| 324 break; |
| 325 case media::VideoCaptureCapability::kRGB24: |
| 326 origin_colorspace = libyuv::FOURCC_RAW; |
| 327 break; |
| 328 case media::VideoCaptureCapability::kARGB: |
| 329 origin_colorspace = libyuv::FOURCC_ARGB; |
| 330 break; |
| 331 case media::VideoCaptureCapability::kMJPEG: |
| 332 origin_colorspace = libyuv::FOURCC_MJPG; |
| 333 break; |
| 334 default: |
| 335 NOTREACHED(); |
| 336 } |
| 337 |
| 338 int need_convert_rgb24_on_win = false; |
| 339 #if defined(OS_WIN) |
| 340 // kRGB24 on Windows start at the bottom line and has a negative stride. This |
| 341 // is not supported by libyuv, so the media API is used instead. |
| 342 if (frame_info_.color == media::VideoCaptureCapability::kRGB24) { |
| 343 // Rotation and flipping is not supported in kRGB24 and OS_WIN case. |
| 344 DCHECK(!rotation && !flip_vert && !flip_horiz); |
| 345 need_convert_rgb24_on_win = true; |
| 346 } |
| 347 #endif |
| 348 if (need_convert_rgb24_on_win) { |
| 349 int rgb_stride = -3 * (frame_info_.width + chopped_width_); |
| 350 const uint8* rgb_src = |
| 351 data + 3 * (frame_info_.width + chopped_width_) * |
| 352 (frame_info_.height - 1 + chopped_height_); |
| 353 media::ConvertRGB24ToYUV(rgb_src, |
| 354 yplane, |
| 355 uplane, |
| 356 vplane, |
| 357 frame_info_.width, |
| 358 frame_info_.height, |
| 359 rgb_stride, |
| 360 yplane_stride, |
| 361 uv_plane_stride); |
| 362 } else { |
| 363 libyuv::ConvertToI420( |
| 364 data, |
| 365 length, |
| 366 yplane, |
| 367 yplane_stride, |
| 368 uplane, |
| 369 uv_plane_stride, |
| 370 vplane, |
| 371 uv_plane_stride, |
| 372 crop_x, |
| 373 crop_y, |
| 374 frame_info_.width, |
| 375 frame_info_.height * (flip_vert ^ flip_horiz ? -1 : 1), |
| 376 frame_info_.width, |
| 377 frame_info_.height, |
| 378 rotation_mode, |
| 379 origin_colorspace); |
| 380 } |
| 381 BrowserThread::PostTask( |
| 382 BrowserThread::IO, |
| 383 FROM_HERE, |
| 384 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, |
| 385 this, |
| 386 dst, |
| 387 timestamp)); |
| 388 } |
| 389 #else |
| 390 void VideoCaptureController::OnIncomingCapturedFrame( |
| 391 const uint8* data, |
| 392 int length, |
264 base::Time timestamp, | 393 base::Time timestamp, |
265 int rotation, | 394 int rotation, |
266 bool flip_vert, | 395 bool flip_vert, |
267 bool flip_horiz) { | 396 bool flip_horiz) { |
268 DCHECK(frame_info_.color == media::VideoCaptureCapability::kI420 || | 397 DCHECK(frame_info_.color == media::VideoCaptureCapability::kI420 || |
269 frame_info_.color == media::VideoCaptureCapability::kYV12 || | 398 frame_info_.color == media::VideoCaptureCapability::kYV12 || |
270 (rotation == 0 && !flip_vert && !flip_horiz)); | 399 (rotation == 0 && !flip_vert && !flip_horiz)); |
271 | 400 |
272 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); | 401 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); |
273 | 402 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 438 media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
310 frame_info_.height); | 439 frame_info_.height); |
311 break; | 440 break; |
312 case media::VideoCaptureCapability::kYUY2: | 441 case media::VideoCaptureCapability::kYUY2: |
313 DCHECK(!chopped_width_ && !chopped_height_); | 442 DCHECK(!chopped_width_ && !chopped_height_); |
314 if (frame_info_.width * frame_info_.height * 2 != length) { | 443 if (frame_info_.width * frame_info_.height * 2 != length) { |
315 // If |length| of |data| does not match the expected width and height | 444 // If |length| of |data| does not match the expected width and height |
316 // we can't convert the frame to I420. YUY2 is 2 bytes per pixel. | 445 // we can't convert the frame to I420. YUY2 is 2 bytes per pixel. |
317 break; | 446 break; |
318 } | 447 } |
319 | |
320 media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 448 media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
321 frame_info_.height); | 449 frame_info_.height); |
322 break; | 450 break; |
323 case media::VideoCaptureCapability::kRGB24: { | 451 case media::VideoCaptureCapability::kRGB24: { |
324 int ystride = frame_info_.width; | 452 int ystride = frame_info_.width; |
325 int uvstride = frame_info_.width / 2; | 453 int uvstride = frame_info_.width / 2; |
326 #if defined(OS_WIN) // RGB on Windows start at the bottom line. | |
327 int rgb_stride = -3 * (frame_info_.width + chopped_width_); | |
328 const uint8* rgb_src = data + 3 * (frame_info_.width + chopped_width_) * | |
329 (frame_info_.height -1 + chopped_height_); | |
330 #else | |
331 int rgb_stride = 3 * (frame_info_.width + chopped_width_); | 454 int rgb_stride = 3 * (frame_info_.width + chopped_width_); |
332 const uint8* rgb_src = data; | 455 const uint8* rgb_src = data; |
333 #endif | |
334 media::ConvertRGB24ToYUV(rgb_src, yplane, uplane, vplane, | 456 media::ConvertRGB24ToYUV(rgb_src, yplane, uplane, vplane, |
335 frame_info_.width, frame_info_.height, | 457 frame_info_.width, frame_info_.height, |
336 rgb_stride, ystride, uvstride); | 458 rgb_stride, ystride, uvstride); |
337 break; | 459 break; |
338 } | 460 } |
339 case media::VideoCaptureCapability::kARGB: | 461 case media::VideoCaptureCapability::kARGB: |
340 media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 462 media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
341 frame_info_.height, | 463 frame_info_.height, |
342 (frame_info_.width + chopped_width_) * 4, | 464 (frame_info_.width + chopped_width_) * 4, |
343 frame_info_.width, frame_info_.width / 2); | 465 frame_info_.width, frame_info_.width / 2); |
344 break; | 466 break; |
345 #if !defined(OS_IOS) && !defined(OS_ANDROID) | |
346 case media::VideoCaptureCapability::kMJPEG: { | |
347 int yplane_stride = frame_info_.width; | |
348 int uv_plane_stride = (frame_info_.width + 1) / 2; | |
349 int crop_x = 0; | |
350 int crop_y = 0; | |
351 libyuv::ConvertToI420(data, length, yplane, yplane_stride, uplane, | |
352 uv_plane_stride, vplane, uv_plane_stride, crop_x, | |
353 crop_y, frame_info_.width, frame_info_.height, | |
354 frame_info_.width, frame_info_.height, | |
355 libyuv::kRotate0, libyuv::FOURCC_MJPG); | |
356 break; | |
357 } | |
358 #endif | |
359 default: | 467 default: |
360 NOTREACHED(); | 468 NOTREACHED(); |
361 } | 469 } |
362 | 470 |
363 BrowserThread::PostTask(BrowserThread::IO, | 471 BrowserThread::PostTask(BrowserThread::IO, |
364 FROM_HERE, | 472 FROM_HERE, |
365 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | 473 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, |
366 this, dst, timestamp)); | 474 this, dst, timestamp)); |
367 } | 475 } |
| 476 #endif // #if !defined(OS_IOS) && !defined(OS_ANDROID) |
368 | 477 |
369 // OnIncomingCapturedVideoFrame is called the thread running the capture device. | 478 // OnIncomingCapturedVideoFrame is called the thread running the capture device. |
370 void VideoCaptureController::OnIncomingCapturedVideoFrame( | 479 void VideoCaptureController::OnIncomingCapturedVideoFrame( |
371 const scoped_refptr<media::VideoFrame>& frame, | 480 const scoped_refptr<media::VideoFrame>& frame, |
372 base::Time timestamp) { | 481 base::Time timestamp) { |
373 | 482 |
374 scoped_refptr<media::VideoFrame> target; | 483 scoped_refptr<media::VideoFrame> target; |
375 { | 484 { |
376 base::AutoLock lock(buffer_pool_lock_); | 485 base::AutoLock lock(buffer_pool_lock_); |
377 | 486 |
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
729 controller_clients_.push_back((*client_it)); | 838 controller_clients_.push_back((*client_it)); |
730 pending_clients_.erase(client_it++); | 839 pending_clients_.erase(client_it++); |
731 } | 840 } |
732 // Request the manager to start the actual capture. | 841 // Request the manager to start the actual capture. |
733 video_capture_manager_->Start(current_params_, this); | 842 video_capture_manager_->Start(current_params_, this); |
734 state_ = VIDEO_CAPTURE_STATE_STARTED; | 843 state_ = VIDEO_CAPTURE_STATE_STARTED; |
735 device_in_use_ = true; | 844 device_in_use_ = true; |
736 } | 845 } |
737 | 846 |
738 } // namespace content | 847 } // namespace content |
OLD | NEW |