OLD | NEW |
1 // Copyright 2011 The Chromium Authors. All rights reserved. | 1 // Copyright 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/video_layer_impl.h" | 5 #include "cc/video_layer_impl.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "cc/io_surface_draw_quad.h" | 8 #include "cc/io_surface_draw_quad.h" |
9 #include "cc/layer_tree_impl.h" | 9 #include "cc/layer_tree_impl.h" |
10 #include "cc/math_util.h" | 10 #include "cc/math_util.h" |
11 #include "cc/quad_sink.h" | 11 #include "cc/quad_sink.h" |
12 #include "cc/renderer.h" | 12 #include "cc/renderer.h" |
13 #include "cc/resource_provider.h" | 13 #include "cc/resource_provider.h" |
14 #include "cc/stream_video_draw_quad.h" | 14 #include "cc/stream_video_draw_quad.h" |
15 #include "cc/texture_draw_quad.h" | 15 #include "cc/texture_draw_quad.h" |
| 16 #include "cc/video_frame_provider_client_impl.h" |
16 #include "cc/yuv_video_draw_quad.h" | 17 #include "cc/yuv_video_draw_quad.h" |
17 #include "gpu/GLES2/gl2extchromium.h" | 18 #include "gpu/GLES2/gl2extchromium.h" |
18 #include "media/filters/skcanvas_video_renderer.h" | 19 #include "media/filters/skcanvas_video_renderer.h" |
19 #include "third_party/khronos/GLES2/gl2.h" | 20 #include "third_party/khronos/GLES2/gl2.h" |
20 #include "third_party/khronos/GLES2/gl2ext.h" | 21 #include "third_party/khronos/GLES2/gl2ext.h" |
21 | 22 |
22 namespace cc { | 23 namespace cc { |
23 | 24 |
24 VideoLayerImpl::VideoLayerImpl(LayerTreeImpl* treeImpl, int id, VideoFrameProvid
er* provider) | 25 // static |
| 26 scoped_ptr<VideoLayerImpl> VideoLayerImpl::create(LayerTreeImpl* treeImpl, int i
d, VideoFrameProvider* provider) |
| 27 { |
| 28 scoped_ptr<VideoLayerImpl> layer(new VideoLayerImpl(treeImpl, id)); |
| 29 layer->setProviderClientImpl(VideoFrameProviderClientImpl::Create(provider))
; |
| 30 DCHECK(treeImpl->proxy()->isImplThread()); |
| 31 DCHECK(treeImpl->proxy()->isMainThreadBlocked()); |
| 32 return layer.Pass(); |
| 33 } |
| 34 |
| 35 VideoLayerImpl::VideoLayerImpl(LayerTreeImpl* treeImpl, int id) |
25 : LayerImpl(treeImpl, id) | 36 : LayerImpl(treeImpl, id) |
26 , m_provider(provider) | |
27 , m_frame(0) | 37 , m_frame(0) |
28 , m_format(GL_INVALID_VALUE) | 38 , m_format(GL_INVALID_VALUE) |
29 , m_convertYUV(false) | 39 , m_convertYUV(false) |
30 , m_externalTextureResource(0) | 40 , m_externalTextureResource(0) |
31 { | 41 { |
32 // This matrix is the default transformation for stream textures, and flips
on the Y axis. | |
33 m_streamTextureMatrix = gfx::Transform( | |
34 1.0, 0.0, 0.0, 0.0, | |
35 0.0, -1.0, 0.0, 1.0, | |
36 0.0, 0.0, 1.0, 0.0, | |
37 0.0, 0.0, 0.0, 1.0); | |
38 | |
39 // This only happens during a commit on the compositor thread while the main | |
40 // thread is blocked. That makes this a thread-safe call to set the video | |
41 // frame provider client that does not require a lock. The same is true of | |
42 // the call in the destructor. | |
43 m_provider->SetVideoFrameProviderClient(this); | |
44 } | 42 } |
45 | 43 |
46 VideoLayerImpl::~VideoLayerImpl() | 44 VideoLayerImpl::~VideoLayerImpl() |
47 { | 45 { |
48 // See comment in constructor for why this doesn't need a lock. | 46 if (!m_providerClientImpl->Stopped()) { |
49 if (m_provider) { | 47 // In impl side painting, we may have a pending and active layer |
50 m_provider->SetVideoFrameProviderClient(0); | 48 // associated with the video provider at the same time. Both have a ref |
51 m_provider = 0; | 49 // on the VideoFrameProviderClientImpl, but we stop when the first |
| 50 // LayerImpl (the one on the pending tree) is destroyed since we know |
| 51 // the main thread is blocked for this commit. |
| 52 DCHECK(layerTreeImpl()->proxy()->isImplThread()); |
| 53 DCHECK(layerTreeImpl()->proxy()->isMainThreadBlocked()); |
| 54 m_providerClientImpl->Stop(); |
52 } | 55 } |
53 freePlaneData(layerTreeImpl()->resource_provider()); | 56 freePlaneData(layerTreeImpl()->resource_provider()); |
54 | 57 |
55 #ifndef NDEBUG | 58 #ifndef NDEBUG |
56 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i) | 59 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i) |
57 DCHECK(!m_framePlanes[i].resourceId); | 60 DCHECK(!m_framePlanes[i].resourceId); |
58 DCHECK(!m_externalTextureResource); | 61 DCHECK(!m_externalTextureResource); |
59 #endif | 62 #endif |
60 } | 63 } |
61 | 64 |
62 void VideoLayerImpl::StopUsingProvider() | 65 scoped_ptr<LayerImpl> VideoLayerImpl::createLayerImpl(LayerTreeImpl* treeImpl) |
63 { | 66 { |
64 // Block the provider from shutting down until this client is done | 67 return scoped_ptr<LayerImpl>(new VideoLayerImpl(treeImpl, id())); |
65 // using the frame. | 68 } |
66 base::AutoLock locker(m_providerLock); | 69 |
67 DCHECK(!m_frame); | 70 void VideoLayerImpl::pushPropertiesTo(LayerImpl* layer) |
68 m_provider = 0; | 71 { |
| 72 LayerImpl::pushPropertiesTo(layer); |
| 73 |
| 74 VideoLayerImpl* other = static_cast<VideoLayerImpl*>(layer); |
| 75 other->setProviderClientImpl(m_providerClientImpl); |
| 76 } |
| 77 |
| 78 void VideoLayerImpl::didBecomeActive() |
| 79 { |
| 80 m_providerClientImpl->set_active_video_layer(this); |
69 } | 81 } |
70 | 82 |
71 // Convert media::VideoFrame::Format to OpenGL enum values. | 83 // Convert media::VideoFrame::Format to OpenGL enum values. |
72 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) | 84 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) |
73 { | 85 { |
74 switch (frame.format()) { | 86 switch (frame.format()) { |
75 case media::VideoFrame::YV12: | 87 case media::VideoFrame::YV12: |
76 case media::VideoFrame::YV16: | 88 case media::VideoFrame::YV16: |
77 return GL_LUMINANCE; | 89 return GL_LUMINANCE; |
78 case media::VideoFrame::NATIVE_TEXTURE: | 90 case media::VideoFrame::NATIVE_TEXTURE: |
(...skipping 30 matching lines...) Expand all Loading... |
109 return 0; | 121 return 0; |
110 } | 122 } |
111 NOTREACHED(); | 123 NOTREACHED(); |
112 return 0; | 124 return 0; |
113 } | 125 } |
114 | 126 |
115 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider) | 127 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider) |
116 { | 128 { |
117 LayerImpl::willDraw(resourceProvider); | 129 LayerImpl::willDraw(resourceProvider); |
118 | 130 |
| 131 |
119 // Explicitly acquire and release the provider mutex so it can be held from | 132 // Explicitly acquire and release the provider mutex so it can be held from |
120 // willDraw to didDraw. Since the compositor thread is in the middle of | 133 // willDraw to didDraw. Since the compositor thread is in the middle of |
121 // drawing, the layer will not be destroyed before didDraw is called. | 134 // drawing, the layer will not be destroyed before didDraw is called. |
122 // Therefore, the only thing that will prevent this lock from being released | 135 // Therefore, the only thing that will prevent this lock from being released |
123 // is the GPU process locking it. As the GPU process can't cause the | 136 // is the GPU process locking it. As the GPU process can't cause the |
124 // destruction of the provider (calling stopUsingProvider), holding this | 137 // destruction of the provider (calling stopUsingProvider), holding this |
125 // lock should not cause a deadlock. | 138 // lock should not cause a deadlock. |
126 m_providerLock.Acquire(); | 139 m_frame = m_providerClientImpl->AcquireLockAndCurrentFrame(); |
127 | 140 |
128 willDrawInternal(resourceProvider); | 141 willDrawInternal(resourceProvider); |
129 freeUnusedPlaneData(resourceProvider); | 142 freeUnusedPlaneData(resourceProvider); |
130 | 143 |
131 if (!m_frame) | 144 if (!m_frame) |
132 m_providerLock.Release(); | 145 m_providerClientImpl->ReleaseLock(); |
133 } | 146 } |
134 | 147 |
135 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) | 148 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) |
136 { | 149 { |
137 DCHECK(!m_externalTextureResource); | 150 DCHECK(!m_externalTextureResource); |
138 | 151 |
139 if (!m_provider) { | |
140 m_frame = 0; | |
141 return; | |
142 } | |
143 | |
144 m_frame = m_provider->GetCurrentFrame(); | |
145 | |
146 if (!m_frame) | 152 if (!m_frame) |
147 return; | 153 return; |
148 | 154 |
149 m_format = convertVFCFormatToGLenum(*m_frame); | 155 m_format = convertVFCFormatToGLenum(*m_frame); |
150 | 156 |
151 // If these fail, we'll have to add draw logic that handles offset bitmap/ | 157 // If these fail, we'll have to add draw logic that handles offset bitmap/ |
152 // texture UVs. For now, just expect (0, 0) offset, since all our decoders | 158 // texture UVs. For now, just expect (0, 0) offset, since all our decoders |
153 // so far don't offset. | 159 // so far don't offset. |
154 DCHECK_EQ(m_frame->visible_rect().x(), 0); | 160 DCHECK_EQ(m_frame->visible_rect().x(), 0); |
155 DCHECK_EQ(m_frame->visible_rect().y(), 0); | 161 DCHECK_EQ(m_frame->visible_rect().y(), 0); |
156 | 162 |
157 if (m_format == GL_INVALID_VALUE) { | 163 if (m_format == GL_INVALID_VALUE) { |
158 m_provider->PutCurrentFrame(m_frame); | 164 m_providerClientImpl->PutCurrentFrame(m_frame); |
159 m_frame = 0; | 165 m_frame = 0; |
160 return; | 166 return; |
161 } | 167 } |
162 | 168 |
163 // FIXME: If we're in software compositing mode, we do the YUV -> RGB | 169 // FIXME: If we're in software compositing mode, we do the YUV -> RGB |
164 // conversion here. That involves an extra copy of each frame to a bitmap. | 170 // conversion here. That involves an extra copy of each frame to a bitmap. |
165 // Obviously, this is suboptimal and should be addressed once ubercompositor | 171 // Obviously, this is suboptimal and should be addressed once ubercompositor |
166 // starts shaping up. | 172 // starts shaping up. |
167 m_convertYUV = resourceProvider->defaultResourceType() == ResourceProvider::
Bitmap && | 173 m_convertYUV = resourceProvider->defaultResourceType() == ResourceProvider::
Bitmap && |
168 (m_frame->format() == media::VideoFrame::YV12 || | 174 (m_frame->format() == media::VideoFrame::YV12 || |
169 m_frame->format() == media::VideoFrame::YV16); | 175 m_frame->format() == media::VideoFrame::YV16); |
170 | 176 |
171 if (m_convertYUV) | 177 if (m_convertYUV) |
172 m_format = GL_RGBA; | 178 m_format = GL_RGBA; |
173 | 179 |
174 if (!allocatePlaneData(resourceProvider)) { | 180 if (!allocatePlaneData(resourceProvider)) { |
175 m_provider->PutCurrentFrame(m_frame); | 181 m_providerClientImpl->PutCurrentFrame(m_frame); |
176 m_frame = 0; | 182 m_frame = 0; |
177 return; | 183 return; |
178 } | 184 } |
179 | 185 |
180 if (!copyPlaneData(resourceProvider)) { | 186 if (!copyPlaneData(resourceProvider)) { |
181 m_provider->PutCurrentFrame(m_frame); | 187 m_providerClientImpl->PutCurrentFrame(m_frame); |
182 m_frame = 0; | 188 m_frame = 0; |
183 return; | 189 return; |
184 } | 190 } |
185 | 191 |
186 if (m_format == GL_TEXTURE_2D) | 192 if (m_format == GL_TEXTURE_2D) |
187 m_externalTextureResource = resourceProvider->createResourceFromExternal
Texture(m_frame->texture_id()); | 193 m_externalTextureResource = resourceProvider->createResourceFromExternal
Texture(m_frame->texture_id()); |
188 } | 194 } |
189 | 195 |
190 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad
sData) | 196 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad
sData) |
191 { | 197 { |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 } | 254 } |
249 case GL_TEXTURE_RECTANGLE_ARB: { | 255 case GL_TEXTURE_RECTANGLE_ARB: { |
250 gfx::Size visibleSize(visibleRect.width(), visibleRect.height()); | 256 gfx::Size visibleSize(visibleRect.width(), visibleRect.height()); |
251 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::Create(
); | 257 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::Create(
); |
252 ioSurfaceQuad->SetNew(sharedQuadState, quadRect, opaqueRect, visibleSize
, m_frame->texture_id(), IOSurfaceDrawQuad::UNFLIPPED); | 258 ioSurfaceQuad->SetNew(sharedQuadState, quadRect, opaqueRect, visibleSize
, m_frame->texture_id(), IOSurfaceDrawQuad::UNFLIPPED); |
253 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); | 259 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); |
254 break; | 260 break; |
255 } | 261 } |
256 case GL_TEXTURE_EXTERNAL_OES: { | 262 case GL_TEXTURE_EXTERNAL_OES: { |
257 // StreamTexture hardware decoder. | 263 // StreamTexture hardware decoder. |
258 gfx::Transform transform(m_streamTextureMatrix); | 264 gfx::Transform transform(m_providerClientImpl->stream_texture_matrix()); |
259 transform.Scale(texWidthScale, texHeightScale); | 265 transform.Scale(texWidthScale, texHeightScale); |
260 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::C
reate(); | 266 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::C
reate(); |
261 streamVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, m_frame->
texture_id(), transform); | 267 streamVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, m_frame->
texture_id(), transform); |
262 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); | 268 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); |
263 break; | 269 break; |
264 } | 270 } |
265 default: | 271 default: |
266 NOTREACHED(); // Someone updated convertVFCFormatToGLenum above but upd
ate this! | 272 NOTREACHED(); // Someone updated convertVFCFormatToGLenum above but upd
ate this! |
267 break; | 273 break; |
268 } | 274 } |
269 } | 275 } |
270 | 276 |
271 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) | 277 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) |
272 { | 278 { |
273 LayerImpl::didDraw(resourceProvider); | 279 LayerImpl::didDraw(resourceProvider); |
274 | 280 |
275 if (!m_frame) | 281 if (!m_frame) |
276 return; | 282 return; |
277 | 283 |
278 if (m_format == GL_TEXTURE_2D) { | 284 if (m_format == GL_TEXTURE_2D) { |
279 DCHECK(m_externalTextureResource); | 285 DCHECK(m_externalTextureResource); |
280 // FIXME: the following assert will not be true when sending resources t
o a | 286 // FIXME: the following assert will not be true when sending resources t
o a |
281 // parent compositor. We will probably need to hold on to m_frame for | 287 // parent compositor. We will probably need to hold on to m_frame for |
282 // longer, and have several "current frames" in the pipeline. | 288 // longer, and have several "current frames" in the pipeline. |
283 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource)); | 289 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource)); |
284 resourceProvider->deleteResource(m_externalTextureResource); | 290 resourceProvider->deleteResource(m_externalTextureResource); |
285 m_externalTextureResource = 0; | 291 m_externalTextureResource = 0; |
286 } | 292 } |
287 | 293 |
288 m_provider->PutCurrentFrame(m_frame); | 294 m_providerClientImpl->PutCurrentFrame(m_frame); |
289 m_frame = 0; | 295 m_frame = 0; |
290 | 296 |
291 m_providerLock.Release(); | 297 m_providerClientImpl->ReleaseLock(); |
292 } | 298 } |
293 | 299 |
294 static gfx::Size videoFrameDimension(media::VideoFrame* frame, int plane) { | 300 static gfx::Size videoFrameDimension(media::VideoFrame* frame, int plane) { |
295 gfx::Size dimensions = frame->coded_size(); | 301 gfx::Size dimensions = frame->coded_size(); |
296 switch (frame->format()) { | 302 switch (frame->format()) { |
297 case media::VideoFrame::YV12: | 303 case media::VideoFrame::YV12: |
298 if (plane != media::VideoFrame::kYPlane) { | 304 if (plane != media::VideoFrame::kYPlane) { |
299 dimensions.set_width(dimensions.width() / 2); | 305 dimensions.set_width(dimensions.width() / 2); |
300 dimensions.set_height(dimensions.height() / 2); | 306 dimensions.set_height(dimensions.height() / 2); |
301 } | 307 } |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
390 m_framePlanes[i].freeData(resourceProvider); | 396 m_framePlanes[i].freeData(resourceProvider); |
391 } | 397 } |
392 | 398 |
393 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider) | 399 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider) |
394 { | 400 { |
395 size_t firstUnusedPlane = numPlanes(); | 401 size_t firstUnusedPlane = numPlanes(); |
396 for (size_t i = firstUnusedPlane; i < media::VideoFrame::kMaxPlanes; ++i) | 402 for (size_t i = firstUnusedPlane; i < media::VideoFrame::kMaxPlanes; ++i) |
397 m_framePlanes[i].freeData(resourceProvider); | 403 m_framePlanes[i].freeData(resourceProvider); |
398 } | 404 } |
399 | 405 |
400 void VideoLayerImpl::DidReceiveFrame() | |
401 { | |
402 setNeedsRedraw(); | |
403 } | |
404 | |
405 void VideoLayerImpl::DidUpdateMatrix(const float matrix[16]) | |
406 { | |
407 m_streamTextureMatrix = gfx::Transform( | |
408 matrix[0], matrix[4], matrix[8], matrix[12], | |
409 matrix[1], matrix[5], matrix[9], matrix[13], | |
410 matrix[2], matrix[6], matrix[10], matrix[14], | |
411 matrix[3], matrix[7], matrix[11], matrix[15]); | |
412 setNeedsRedraw(); | |
413 } | |
414 | |
415 void VideoLayerImpl::didLoseOutputSurface() | 406 void VideoLayerImpl::didLoseOutputSurface() |
416 { | 407 { |
417 freePlaneData(layerTreeImpl()->resource_provider()); | 408 freePlaneData(layerTreeImpl()->resource_provider()); |
418 } | 409 } |
419 | 410 |
420 void VideoLayerImpl::setNeedsRedraw() | 411 void VideoLayerImpl::setNeedsRedraw() |
421 { | 412 { |
422 layerTreeImpl()->SetNeedsRedraw(); | 413 layerTreeImpl()->SetNeedsRedraw(); |
423 } | 414 } |
424 | 415 |
| 416 void VideoLayerImpl::setProviderClientImpl(scoped_refptr<VideoFrameProviderClien
tImpl> providerClientImpl) |
| 417 { |
| 418 m_providerClientImpl = providerClientImpl; |
| 419 } |
| 420 |
425 const char* VideoLayerImpl::layerTypeAsString() const | 421 const char* VideoLayerImpl::layerTypeAsString() const |
426 { | 422 { |
427 return "VideoLayer"; | 423 return "VideoLayer"; |
428 } | 424 } |
429 | 425 |
430 } // namespace cc | 426 } // namespace cc |
OLD | NEW |