OLD | NEW |
| (Empty) |
1 // Copyright 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "cc/video_layer_impl.h" | |
6 | |
7 #include "base/logging.h" | |
8 #include "cc/base/math_util.h" | |
9 #include "cc/output/renderer.h" | |
10 #include "cc/quad_sink.h" | |
11 #include "cc/quads/io_surface_draw_quad.h" | |
12 #include "cc/quads/stream_video_draw_quad.h" | |
13 #include "cc/quads/texture_draw_quad.h" | |
14 #include "cc/quads/yuv_video_draw_quad.h" | |
15 #include "cc/resources/resource_provider.h" | |
16 #include "cc/trees/layer_tree_impl.h" | |
17 #include "cc/video_frame_provider_client_impl.h" | |
18 #include "gpu/GLES2/gl2extchromium.h" | |
19 #include "media/filters/skcanvas_video_renderer.h" | |
20 #include "third_party/khronos/GLES2/gl2.h" | |
21 #include "third_party/khronos/GLES2/gl2ext.h" | |
22 | |
23 #if defined(GOOGLE_TV) | |
24 #include "cc/quads/solid_color_draw_quad.h" | |
25 #endif | |
26 | |
27 namespace cc { | |
28 | |
29 // static | |
30 scoped_ptr<VideoLayerImpl> VideoLayerImpl::Create( | |
31 LayerTreeImpl* tree_impl, | |
32 int id, | |
33 VideoFrameProvider* provider) { | |
34 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(tree_impl, id)); | |
35 layer->SetProviderClientImpl(VideoFrameProviderClientImpl::Create(provider)); | |
36 DCHECK(tree_impl->proxy()->IsImplThread()); | |
37 DCHECK(tree_impl->proxy()->IsMainThreadBlocked()); | |
38 return layer.Pass(); | |
39 } | |
40 | |
41 VideoLayerImpl::VideoLayerImpl(LayerTreeImpl* tree_impl, int id) | |
42 : LayerImpl(tree_impl, id), | |
43 frame_(NULL), | |
44 format_(GL_INVALID_VALUE), | |
45 convert_yuv_(false), | |
46 external_texture_resource_(0) {} | |
47 | |
48 VideoLayerImpl::~VideoLayerImpl() { | |
49 if (!provider_client_impl_->Stopped()) { | |
50 // In impl side painting, we may have a pending and active layer | |
51 // associated with the video provider at the same time. Both have a ref | |
52 // on the VideoFrameProviderClientImpl, but we stop when the first | |
53 // LayerImpl (the one on the pending tree) is destroyed since we know | |
54 // the main thread is blocked for this commit. | |
55 DCHECK(layer_tree_impl()->proxy()->IsImplThread()); | |
56 DCHECK(layer_tree_impl()->proxy()->IsMainThreadBlocked()); | |
57 provider_client_impl_->Stop(); | |
58 } | |
59 FreePlaneData(layer_tree_impl()->resource_provider()); | |
60 | |
61 #ifndef NDEBUG | |
62 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i) | |
63 DCHECK(!frame_planes_[i].resource_id); | |
64 DCHECK(!external_texture_resource_); | |
65 #endif | |
66 } | |
67 | |
68 scoped_ptr<LayerImpl> VideoLayerImpl::CreateLayerImpl( | |
69 LayerTreeImpl* tree_impl) { | |
70 return scoped_ptr<LayerImpl>(new VideoLayerImpl(tree_impl, id())); | |
71 } | |
72 | |
73 void VideoLayerImpl::PushPropertiesTo(LayerImpl* layer) { | |
74 LayerImpl::PushPropertiesTo(layer); | |
75 | |
76 VideoLayerImpl* other = static_cast<VideoLayerImpl*>(layer); | |
77 other->SetProviderClientImpl(provider_client_impl_); | |
78 } | |
79 | |
80 void VideoLayerImpl::DidBecomeActive() { | |
81 provider_client_impl_->set_active_video_layer(this); | |
82 } | |
83 | |
84 // Convert media::VideoFrame::Format to OpenGL enum values. | |
85 static GLenum ConvertVFCFormatToGLenum(const media::VideoFrame& frame) { | |
86 switch (frame.format()) { | |
87 case media::VideoFrame::YV12: | |
88 case media::VideoFrame::YV16: | |
89 return GL_LUMINANCE; | |
90 case media::VideoFrame::NATIVE_TEXTURE: | |
91 return frame.texture_target(); | |
92 #if defined(GOOGLE_TV) | |
93 case media::VideoFrame::HOLE: | |
94 return GL_INVALID_VALUE; | |
95 #endif | |
96 case media::VideoFrame::INVALID: | |
97 case media::VideoFrame::RGB32: | |
98 case media::VideoFrame::EMPTY: | |
99 case media::VideoFrame::I420: | |
100 NOTREACHED(); | |
101 break; | |
102 } | |
103 return GL_INVALID_VALUE; | |
104 } | |
105 | |
106 size_t VideoLayerImpl::NumPlanes() const { | |
107 if (!frame_) | |
108 return 0; | |
109 | |
110 if (convert_yuv_) | |
111 return 1; | |
112 | |
113 return media::VideoFrame::NumPlanes(frame_->format()); | |
114 } | |
115 | |
116 void VideoLayerImpl::WillDraw(ResourceProvider* resource_provider) { | |
117 LayerImpl::WillDraw(resource_provider); | |
118 | |
119 | |
120 // Explicitly acquire and release the provider mutex so it can be held from | |
121 // willDraw to didDraw. Since the compositor thread is in the middle of | |
122 // drawing, the layer will not be destroyed before didDraw is called. | |
123 // Therefore, the only thing that will prevent this lock from being released | |
124 // is the GPU process locking it. As the GPU process can't cause the | |
125 // destruction of the provider (calling stopUsingProvider), holding this | |
126 // lock should not cause a deadlock. | |
127 frame_ = provider_client_impl_->AcquireLockAndCurrentFrame(); | |
128 | |
129 WillDrawInternal(resource_provider); | |
130 FreeUnusedPlaneData(resource_provider); | |
131 | |
132 if (!frame_) | |
133 provider_client_impl_->ReleaseLock(); | |
134 } | |
135 | |
136 void VideoLayerImpl::WillDrawInternal(ResourceProvider* resource_provider) { | |
137 DCHECK(!external_texture_resource_); | |
138 | |
139 if (!frame_) | |
140 return; | |
141 | |
142 #if defined(GOOGLE_TV) | |
143 if (frame_->format() == media::VideoFrame::HOLE) | |
144 return; | |
145 #endif | |
146 | |
147 format_ = ConvertVFCFormatToGLenum(*frame_); | |
148 | |
149 // If these fail, we'll have to add draw logic that handles offset bitmap/ | |
150 // texture UVs. For now, just expect (0, 0) offset, since all our decoders | |
151 // so far don't offset. | |
152 DCHECK_EQ(frame_->visible_rect().x(), 0); | |
153 DCHECK_EQ(frame_->visible_rect().y(), 0); | |
154 | |
155 if (format_ == GL_INVALID_VALUE) { | |
156 provider_client_impl_->PutCurrentFrame(frame_); | |
157 frame_ = NULL; | |
158 return; | |
159 } | |
160 | |
161 // TODO: If we're in software compositing mode, we do the YUV -> RGB | |
162 // conversion here. That involves an extra copy of each frame to a bitmap. | |
163 // Obviously, this is suboptimal and should be addressed once ubercompositor | |
164 // starts shaping up. | |
165 convert_yuv_ = | |
166 resource_provider->default_resource_type() == ResourceProvider::Bitmap && | |
167 (frame_->format() == media::VideoFrame::YV12 || | |
168 frame_->format() == media::VideoFrame::YV16); | |
169 | |
170 if (convert_yuv_) | |
171 format_ = GL_RGBA; | |
172 | |
173 if (!AllocatePlaneData(resource_provider)) { | |
174 provider_client_impl_->PutCurrentFrame(frame_); | |
175 frame_ = NULL; | |
176 return; | |
177 } | |
178 | |
179 if (!CopyPlaneData(resource_provider)) { | |
180 provider_client_impl_->PutCurrentFrame(frame_); | |
181 frame_ = NULL; | |
182 return; | |
183 } | |
184 | |
185 if (format_ == GL_TEXTURE_2D) { | |
186 external_texture_resource_ = | |
187 resource_provider->CreateResourceFromExternalTexture( | |
188 frame_->texture_id()); | |
189 } | |
190 } | |
191 | |
192 void VideoLayerImpl::AppendQuads(QuadSink* quad_sink, | |
193 AppendQuadsData* append_quads_data) { | |
194 if (!frame_) | |
195 return; | |
196 | |
197 SharedQuadState* shared_quad_state = | |
198 quad_sink->UseSharedQuadState(CreateSharedQuadState()); | |
199 AppendDebugBorderQuad(quad_sink, shared_quad_state, append_quads_data); | |
200 | |
201 // TODO: When we pass quads out of process, we need to double-buffer, or | |
202 // otherwise synchonize use of all textures in the quad. | |
203 | |
204 gfx::Rect quad_rect(content_bounds()); | |
205 gfx::Rect opaque_rect(contents_opaque() ? quad_rect : gfx::Rect()); | |
206 gfx::Rect visible_rect = frame_->visible_rect(); | |
207 gfx::Size coded_size = frame_->coded_size(); | |
208 | |
209 // pixels for macroblocked formats. | |
210 float tex_width_scale = | |
211 static_cast<float>(visible_rect.width()) / coded_size.width(); | |
212 float tex_height_scale = | |
213 static_cast<float>(visible_rect.height()) / coded_size.height(); | |
214 | |
215 #if defined(GOOGLE_TV) | |
216 // This block and other blocks wrapped around #if defined(GOOGLE_TV) is not | |
217 // maintained by the general compositor team. Please contact the following | |
218 // people instead: | |
219 // | |
220 // wonsik@chromium.org | |
221 // ycheo@chromium.org | |
222 | |
223 if (frame_->format() == media::VideoFrame::HOLE) { | |
224 scoped_ptr<SolidColorDrawQuad> solid_color_draw_quad = | |
225 SolidColorDrawQuad::Create(); | |
226 // Create a solid color quad with transparent black and force no | |
227 // blending. | |
228 solid_color_draw_quad->SetAll( | |
229 shared_quad_state, quad_rect, quad_rect, quad_rect, false, | |
230 SK_ColorTRANSPARENT); | |
231 quad_sink->Append(solid_color_draw_quad.PassAs<DrawQuad>(), | |
232 append_quads_data); | |
233 return; | |
234 } | |
235 #endif | |
236 | |
237 switch (format_) { | |
238 case GL_LUMINANCE: { | |
239 // YUV software decoder. | |
240 const FramePlane& y_plane = frame_planes_[media::VideoFrame::kYPlane]; | |
241 const FramePlane& u_plane = frame_planes_[media::VideoFrame::kUPlane]; | |
242 const FramePlane& v_plane = frame_planes_[media::VideoFrame::kVPlane]; | |
243 gfx::SizeF tex_scale(tex_width_scale, tex_height_scale); | |
244 scoped_ptr<YUVVideoDrawQuad> yuv_video_quad = YUVVideoDrawQuad::Create(); | |
245 yuv_video_quad->SetNew(shared_quad_state, | |
246 quad_rect, | |
247 opaque_rect, | |
248 tex_scale, | |
249 y_plane, | |
250 u_plane, | |
251 v_plane); | |
252 quad_sink->Append(yuv_video_quad.PassAs<DrawQuad>(), append_quads_data); | |
253 break; | |
254 } | |
255 case GL_RGBA: { | |
256 // RGBA software decoder. | |
257 const FramePlane& plane = frame_planes_[media::VideoFrame::kRGBPlane]; | |
258 bool premultiplied_alpha = true; | |
259 gfx::PointF uv_top_left(0.f, 0.f); | |
260 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale); | |
261 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; | |
262 bool flipped = false; | |
263 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create(); | |
264 texture_quad->SetNew(shared_quad_state, | |
265 quad_rect, | |
266 opaque_rect, | |
267 plane.resource_id, | |
268 premultiplied_alpha, | |
269 uv_top_left, | |
270 uv_bottom_right, | |
271 opacity, | |
272 flipped); | |
273 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data); | |
274 break; | |
275 } | |
276 case GL_TEXTURE_2D: { | |
277 // NativeTexture hardware decoder. | |
278 bool premultiplied_alpha = true; | |
279 gfx::PointF uv_top_left(0.f, 0.f); | |
280 gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale); | |
281 float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; | |
282 bool flipped = false; | |
283 scoped_ptr<TextureDrawQuad> texture_quad = TextureDrawQuad::Create(); | |
284 texture_quad->SetNew(shared_quad_state, | |
285 quad_rect, | |
286 opaque_rect, | |
287 external_texture_resource_, | |
288 premultiplied_alpha, | |
289 uv_top_left, | |
290 uv_bottom_right, | |
291 opacity, | |
292 flipped); | |
293 quad_sink->Append(texture_quad.PassAs<DrawQuad>(), append_quads_data); | |
294 break; | |
295 } | |
296 case GL_TEXTURE_RECTANGLE_ARB: { | |
297 gfx::Size visible_size(visible_rect.width(), visible_rect.height()); | |
298 scoped_ptr<IOSurfaceDrawQuad> io_surface_quad = | |
299 IOSurfaceDrawQuad::Create(); | |
300 io_surface_quad->SetNew(shared_quad_state, | |
301 quad_rect, | |
302 opaque_rect, | |
303 visible_size, | |
304 frame_->texture_id(), | |
305 IOSurfaceDrawQuad::UNFLIPPED); | |
306 quad_sink->Append(io_surface_quad.PassAs<DrawQuad>(), append_quads_data); | |
307 break; | |
308 } | |
309 case GL_TEXTURE_EXTERNAL_OES: { | |
310 // StreamTexture hardware decoder. | |
311 gfx::Transform transform(provider_client_impl_->stream_texture_matrix()); | |
312 transform.Scale(tex_width_scale, tex_height_scale); | |
313 scoped_ptr<StreamVideoDrawQuad> stream_video_quad = | |
314 StreamVideoDrawQuad::Create(); | |
315 stream_video_quad->SetNew(shared_quad_state, | |
316 quad_rect, | |
317 opaque_rect, | |
318 frame_->texture_id(), | |
319 transform); | |
320 quad_sink->Append(stream_video_quad.PassAs<DrawQuad>(), | |
321 append_quads_data); | |
322 break; | |
323 } | |
324 default: | |
325 // Someone updated ConvertVFCFormatToGLenum() above but update this! | |
326 NOTREACHED(); | |
327 break; | |
328 } | |
329 } | |
330 | |
331 void VideoLayerImpl::DidDraw(ResourceProvider* resource_provider) { | |
332 LayerImpl::DidDraw(resource_provider); | |
333 | |
334 if (!frame_) | |
335 return; | |
336 | |
337 if (format_ == GL_TEXTURE_2D) { | |
338 DCHECK(external_texture_resource_); | |
339 // TODO: the following assert will not be true when sending resources to a | |
340 // parent compositor. We will probably need to hold on to frame_ for | |
341 // longer, and have several "current frames" in the pipeline. | |
342 DCHECK(!resource_provider->InUseByConsumer(external_texture_resource_)); | |
343 resource_provider->DeleteResource(external_texture_resource_); | |
344 external_texture_resource_ = 0; | |
345 } | |
346 | |
347 provider_client_impl_->PutCurrentFrame(frame_); | |
348 frame_ = NULL; | |
349 | |
350 provider_client_impl_->ReleaseLock(); | |
351 } | |
352 | |
353 static gfx::Size VideoFrameDimension(media::VideoFrame* frame, int plane) { | |
354 gfx::Size dimensions = frame->coded_size(); | |
355 switch (frame->format()) { | |
356 case media::VideoFrame::YV12: | |
357 if (plane != media::VideoFrame::kYPlane) { | |
358 dimensions.set_width(dimensions.width() / 2); | |
359 dimensions.set_height(dimensions.height() / 2); | |
360 } | |
361 break; | |
362 case media::VideoFrame::YV16: | |
363 if (plane != media::VideoFrame::kYPlane) | |
364 dimensions.set_width(dimensions.width() / 2); | |
365 break; | |
366 default: | |
367 break; | |
368 } | |
369 return dimensions; | |
370 } | |
371 | |
372 bool VideoLayerImpl::FramePlane::AllocateData( | |
373 ResourceProvider* resource_provider) { | |
374 if (resource_id) | |
375 return true; | |
376 | |
377 resource_id = resource_provider->CreateResource( | |
378 size, format, ResourceProvider::TextureUsageAny); | |
379 return resource_id; | |
380 } | |
381 | |
382 void VideoLayerImpl::FramePlane::FreeData(ResourceProvider* resource_provider) { | |
383 if (!resource_id) | |
384 return; | |
385 | |
386 resource_provider->DeleteResource(resource_id); | |
387 resource_id = 0; | |
388 } | |
389 | |
390 bool VideoLayerImpl::AllocatePlaneData(ResourceProvider* resource_provider) { | |
391 int max_texture_size = resource_provider->max_texture_size(); | |
392 size_t plane_count = NumPlanes(); | |
393 for (size_t plane_index = 0; plane_index < plane_count; ++plane_index) { | |
394 VideoLayerImpl::FramePlane* plane = &frame_planes_[plane_index]; | |
395 | |
396 gfx::Size required_texture_size = VideoFrameDimension(frame_, plane_index); | |
397 // TODO: Remove the test against max_texture_size when tiled layers are | |
398 // implemented. | |
399 if (required_texture_size.IsEmpty() || | |
400 required_texture_size.width() > max_texture_size || | |
401 required_texture_size.height() > max_texture_size) | |
402 return false; | |
403 | |
404 if (plane->size != required_texture_size || plane->format != format_) { | |
405 plane->FreeData(resource_provider); | |
406 plane->size = required_texture_size; | |
407 plane->format = format_; | |
408 } | |
409 | |
410 if (!plane->AllocateData(resource_provider)) | |
411 return false; | |
412 } | |
413 return true; | |
414 } | |
415 | |
416 bool VideoLayerImpl::CopyPlaneData(ResourceProvider* resource_provider) { | |
417 size_t plane_count = NumPlanes(); | |
418 if (!plane_count) | |
419 return true; | |
420 | |
421 if (convert_yuv_) { | |
422 if (!video_renderer_) | |
423 video_renderer_.reset(new media::SkCanvasVideoRenderer); | |
424 const VideoLayerImpl::FramePlane& plane = | |
425 frame_planes_[media::VideoFrame::kRGBPlane]; | |
426 ResourceProvider::ScopedWriteLockSoftware lock(resource_provider, | |
427 plane.resource_id); | |
428 video_renderer_->Paint(frame_, | |
429 lock.sk_canvas(), | |
430 frame_->visible_rect(), | |
431 0xff); | |
432 return true; | |
433 } | |
434 | |
435 for (size_t plane_index = 0; plane_index < plane_count; ++plane_index) { | |
436 const VideoLayerImpl::FramePlane& plane = frame_planes_[plane_index]; | |
437 // Only non-FormatNativeTexture planes should need upload. | |
438 DCHECK_EQ(plane.format, GL_LUMINANCE); | |
439 const uint8_t* software_plane_pixels = frame_->data(plane_index); | |
440 gfx::Rect image_rect(0, | |
441 0, | |
442 frame_->stride(plane_index), | |
443 plane.size.height()); | |
444 gfx::Rect source_rect(plane.size); | |
445 resource_provider->SetPixels(plane.resource_id, | |
446 software_plane_pixels, | |
447 image_rect, | |
448 source_rect, | |
449 gfx::Vector2d()); | |
450 } | |
451 return true; | |
452 } | |
453 | |
454 void VideoLayerImpl::FreePlaneData(ResourceProvider* resource_provider) { | |
455 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i) | |
456 frame_planes_[i].FreeData(resource_provider); | |
457 } | |
458 | |
459 void VideoLayerImpl::FreeUnusedPlaneData(ResourceProvider* resource_provider) { | |
460 size_t first_unused_plane = NumPlanes(); | |
461 for (size_t i = first_unused_plane; i < media::VideoFrame::kMaxPlanes; ++i) | |
462 frame_planes_[i].FreeData(resource_provider); | |
463 } | |
464 | |
465 void VideoLayerImpl::DidLoseOutputSurface() { | |
466 FreePlaneData(layer_tree_impl()->resource_provider()); | |
467 } | |
468 | |
469 void VideoLayerImpl::SetNeedsRedraw() { | |
470 layer_tree_impl()->SetNeedsRedraw(); | |
471 } | |
472 | |
473 void VideoLayerImpl::SetProviderClientImpl( | |
474 scoped_refptr<VideoFrameProviderClientImpl> provider_client_impl) { | |
475 provider_client_impl_ = provider_client_impl; | |
476 } | |
477 | |
478 const char* VideoLayerImpl::LayerTypeAsString() const { | |
479 return "VideoLayer"; | |
480 } | |
481 | |
482 } // namespace cc | |
OLD | NEW |