Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(500)

Side by Side Diff: cc/video_layer_impl.cc

Issue 11274017: Added support for YUV videos to the software compositor. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: More cosmetics. Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 The Chromium Authors. All rights reserved. 1 // Copyright 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "config.h" 5 #include "config.h"
6 6
7 #include "cc/video_layer_impl.h" 7 #include "cc/video_layer_impl.h"
8 8
9 #include "NotImplemented.h" 9 #include "base/logging.h"
10 #include "cc/io_surface_draw_quad.h" 10 #include "cc/io_surface_draw_quad.h"
11 #include "cc/layer_tree_host_impl.h" 11 #include "cc/layer_tree_host_impl.h"
12 #include "cc/proxy.h" 12 #include "cc/proxy.h"
13 #include "cc/quad_sink.h" 13 #include "cc/quad_sink.h"
14 #include "cc/resource_provider.h" 14 #include "cc/resource_provider.h"
15 #include "cc/stream_video_draw_quad.h" 15 #include "cc/stream_video_draw_quad.h"
16 #include "cc/texture_draw_quad.h" 16 #include "cc/texture_draw_quad.h"
17 #include "cc/video_frame_draw_quad.h"
17 #include "cc/yuv_video_draw_quad.h" 18 #include "cc/yuv_video_draw_quad.h"
18 #include "third_party/khronos/GLES2/gl2.h" 19 #include "third_party/khronos/GLES2/gl2.h"
19 #include "third_party/khronos/GLES2/gl2ext.h" 20 #include "third_party/khronos/GLES2/gl2ext.h"
20 #include <public/WebVideoFrame.h>
21 21
22 namespace cc { 22 namespace cc {
23 23
24 VideoLayerImpl::VideoLayerImpl(int id, WebKit::WebVideoFrameProvider* provider) 24 VideoLayerImpl::VideoLayerImpl(int id, WebKit::WebVideoFrameProvider* provider,
25 const FrameUnwrapper& unwrapper)
25 : LayerImpl(id) 26 : LayerImpl(id)
26 , m_provider(provider) 27 , m_provider(provider)
28 , m_unwrapper(unwrapper)
29 , m_webFrame(0)
27 , m_frame(0) 30 , m_frame(0)
31 , m_format(GL_INVALID_VALUE)
32 , m_emitFrameQuad(false)
28 , m_externalTextureResource(0) 33 , m_externalTextureResource(0)
29 { 34 {
30 // This matrix is the default transformation for stream textures, and flips on the Y axis. 35 // This matrix is the default transformation for stream textures, and flips on the Y axis.
31 m_streamTextureMatrix = WebKit::WebTransformationMatrix( 36 m_streamTextureMatrix = WebKit::WebTransformationMatrix(
32 1, 0, 0, 0, 37 1, 0, 0, 0,
33 0, -1, 0, 0, 38 0, -1, 0, 0,
34 0, 0, 1, 0, 39 0, 0, 1, 0,
35 0, 1, 0, 1); 40 0, 1, 0, 1);
36 41
37 // This only happens during a commit on the compositor thread while the main 42 // This only happens during a commit on the compositor thread while the main
38 // thread is blocked. That makes this a thread-safe call to set the video 43 // thread is blocked. That makes this a thread-safe call to set the video
39 // frame provider client that does not require a lock. The same is true of 44 // frame provider client that does not require a lock. The same is true of
40 // the call in the destructor. 45 // the call in the destructor.
41 DCHECK(Proxy::isMainThreadBlocked()); 46 DCHECK(Proxy::isMainThreadBlocked());
42 m_provider->setVideoFrameProviderClient(this); 47 m_provider->setVideoFrameProviderClient(this);
43 } 48 }
44 49
45 VideoLayerImpl::~VideoLayerImpl() 50 VideoLayerImpl::~VideoLayerImpl()
46 { 51 {
47 // See comment in constructor for why this doesn't need a lock. 52 // See comment in constructor for why this doesn't need a lock.
48 DCHECK(Proxy::isMainThreadBlocked()); 53 DCHECK(Proxy::isMainThreadBlocked());
49 if (m_provider) { 54 if (m_provider) {
50 m_provider->setVideoFrameProviderClient(0); 55 m_provider->setVideoFrameProviderClient(0);
51 m_provider = 0; 56 m_provider = 0;
52 } 57 }
53 freePlaneData(layerTreeHostImpl()->resourceProvider()); 58 freePlaneData(layerTreeHostImpl()->resourceProvider());
54 59
55 #ifndef NDEBUG 60 #ifndef NDEBUG
56 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) 61 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
57 DCHECK(!m_framePlanes[i].resourceId); 62 DCHECK(!m_framePlanes[i].resourceId);
58 DCHECK(!m_externalTextureResource); 63 DCHECK(!m_externalTextureResource);
59 #endif 64 #endif
60 } 65 }
61 66
62 void VideoLayerImpl::stopUsingProvider() 67 void VideoLayerImpl::stopUsingProvider()
63 { 68 {
64 // Block the provider from shutting down until this client is done 69 // Block the provider from shutting down until this client is done
65 // using the frame. 70 // using the frame.
66 base::AutoLock locker(m_providerLock); 71 base::AutoLock locker(m_providerLock);
67 DCHECK(!m_frame); 72 DCHECK(!m_frame);
68 m_provider = 0; 73 m_provider = 0;
69 } 74 }
70 75
71 // Convert WebKit::WebVideoFrame::Format to GraphicsContext3D's format enum valu es. 76 // Convert media::VideoFrame::Format to OpenGL enum values.
72 static GLenum convertVFCFormatToGC3DFormat(const WebKit::WebVideoFrame& frame) 77 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame)
73 { 78 {
74 switch (frame.format()) { 79 switch (frame.format()) {
75 case WebKit::WebVideoFrame::FormatYV12: 80 case media::VideoFrame::YV12:
76 case WebKit::WebVideoFrame::FormatYV16: 81 case media::VideoFrame::YV16:
77 return GL_LUMINANCE; 82 return GL_LUMINANCE;
78 case WebKit::WebVideoFrame::FormatNativeTexture: 83 case media::VideoFrame::NATIVE_TEXTURE:
79 return frame.textureTarget(); 84 return frame.texture_target();
80 case WebKit::WebVideoFrame::FormatInvalid: 85 case media::VideoFrame::INVALID:
81 case WebKit::WebVideoFrame::FormatRGB32: 86 case media::VideoFrame::RGB32:
82 case WebKit::WebVideoFrame::FormatEmpty: 87 case media::VideoFrame::EMPTY:
83 case WebKit::WebVideoFrame::FormatI420: 88 case media::VideoFrame::I420:
84 notImplemented(); 89 NOTREACHED();
90 break;
85 } 91 }
86 return GL_INVALID_VALUE; 92 return GL_INVALID_VALUE;
87 } 93 }
88 94
95 static size_t numPlanes(const media::VideoFrame& frame)
96 {
97 switch (frame.format()) {
98 case media::VideoFrame::RGB32:
99 return 1;
100 case media::VideoFrame::YV12:
101 case media::VideoFrame::YV16:
102 return 3;
103 case media::VideoFrame::INVALID:
104 case media::VideoFrame::EMPTY:
105 case media::VideoFrame::I420:
106 break;
107 case media::VideoFrame::NATIVE_TEXTURE:
108 return 0;
109 }
110 NOTREACHED();
111 return 0;
112 }
113
89 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider) 114 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider)
90 { 115 {
91 DCHECK(Proxy::isImplThread()); 116 DCHECK(Proxy::isImplThread());
92 LayerImpl::willDraw(resourceProvider); 117 LayerImpl::willDraw(resourceProvider);
93 118
94 // Explicitly acquire and release the provider mutex so it can be held from 119 // Explicitly acquire and release the provider mutex so it can be held from
95 // willDraw to didDraw. Since the compositor thread is in the middle of 120 // willDraw to didDraw. Since the compositor thread is in the middle of
96 // drawing, the layer will not be destroyed before didDraw is called. 121 // drawing, the layer will not be destroyed before didDraw is called.
97 // Therefore, the only thing that will prevent this lock from being released 122 // Therefore, the only thing that will prevent this lock from being released
98 // is the GPU process locking it. As the GPU process can't cause the 123 // is the GPU process locking it. As the GPU process can't cause the
(...skipping 11 matching lines...) Expand all
110 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) 135 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider)
111 { 136 {
112 DCHECK(Proxy::isImplThread()); 137 DCHECK(Proxy::isImplThread());
113 DCHECK(!m_externalTextureResource); 138 DCHECK(!m_externalTextureResource);
114 139
115 if (!m_provider) { 140 if (!m_provider) {
116 m_frame = 0; 141 m_frame = 0;
117 return; 142 return;
118 } 143 }
119 144
120 m_frame = m_provider->getCurrentFrame(); 145 m_webFrame = m_provider->getCurrentFrame();
146 m_frame = m_unwrapper.Run(m_webFrame);
121 147
122 if (!m_frame) 148 if (!m_frame)
123 return; 149 return;
124 150
125 m_format = convertVFCFormatToGC3DFormat(*m_frame); 151 m_format = convertVFCFormatToGLenum(*m_frame);
126 152
127 if (m_format == GL_INVALID_VALUE) { 153 if (m_format == GL_INVALID_VALUE) {
128 m_provider->putCurrentFrame(m_frame); 154 m_provider->putCurrentFrame(m_webFrame);
129 m_frame = 0; 155 m_frame = 0;
130 return; 156 return;
131 } 157 }
132 158
133 if (m_frame->planes() > WebKit::WebVideoFrame::maxPlanes) { 159 m_emitFrameQuad = resourceProvider->defaultResourceType() == ResourceProvide r::Bitmap ||
134 m_provider->putCurrentFrame(m_frame); 160 m_frame->format() == media::VideoFrame::YV12 ||
135 m_frame = 0; 161 m_frame->format() == media::VideoFrame::YV16;
162
163 // Don't allocate plane data if we'll emit the frame itself
164 if (m_emitFrameQuad)
136 return; 165 return;
137 }
138 166
139 if (!allocatePlaneData(resourceProvider)) { 167 if (!allocatePlaneData(resourceProvider)) {
140 m_provider->putCurrentFrame(m_frame); 168 m_provider->putCurrentFrame(m_webFrame);
141 m_frame = 0; 169 m_frame = 0;
142 return; 170 return;
143 } 171 }
144 172
145 if (!copyPlaneData(resourceProvider)) { 173 if (!copyPlaneData(resourceProvider)) {
146 m_provider->putCurrentFrame(m_frame); 174 m_provider->putCurrentFrame(m_webFrame);
147 m_frame = 0; 175 m_frame = 0;
148 return; 176 return;
149 } 177 }
150 178
151 if (m_format == GL_TEXTURE_2D) 179 if (m_format == GL_TEXTURE_2D)
152 m_externalTextureResource = resourceProvider->createResourceFromExternal Texture(m_frame->textureId()); 180 m_externalTextureResource = resourceProvider->createResourceFromExternal Texture(m_frame->texture_id());
153 } 181 }
154 182
155 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad sData) 183 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad sData)
156 { 184 {
157 DCHECK(Proxy::isImplThread()); 185 DCHECK(Proxy::isImplThread());
158 186
159 if (!m_frame) 187 if (!m_frame)
160 return; 188 return;
161 189
162 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState()); 190 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState());
163 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData); 191 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData);
164 192
165 // FIXME: When we pass quads out of process, we need to double-buffer, or 193 // FIXME: When we pass quads out of process, we need to double-buffer, or
166 // otherwise synchonize use of all textures in the quad. 194 // otherwise synchonize use of all textures in the quad.
167 195
168 IntRect quadRect(IntPoint(), contentBounds()); 196 gfx::Rect quadRect(contentBounds());
197
198 if (m_emitFrameQuad) {
199 scoped_ptr<VideoFrameDrawQuad> drawQuad =
200 VideoFrameDrawQuad::create(sharedQuadState, quadRect, m_frame);
201 quadSink.append(drawQuad.PassAs<DrawQuad>(), appendQuadsData);
202 return;
203 }
169 204
170 switch (m_format) { 205 switch (m_format) {
171 case GL_LUMINANCE: { 206 case GL_LUMINANCE: {
172 // YUV software decoder. 207 // YUV software decoder.
173 const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane]; 208 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane];
174 const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane]; 209 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane];
175 const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane]; 210 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane];
176 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sha redQuadState, quadRect, yPlane, uPlane, vPlane); 211 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sha redQuadState, quadRect, yPlane, uPlane, vPlane);
177 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 212 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
178 break; 213 break;
179 } 214 }
180 case GL_RGBA: { 215 case GL_RGBA: {
181 // RGBA software decoder. 216 // RGBA software decoder.
182 const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane] ; 217 const FramePlane& plane = m_framePlanes[media::VideoFrame::kRGBPlane];
183 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width(); 218 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width();
184 219
185 bool premultipliedAlpha = true; 220 bool premultipliedAlpha = true;
186 FloatRect uvRect(0, 0, widthScaleFactor, 1); 221 FloatRect uvRect(0, 0, widthScaleFactor, 1);
187 bool flipped = false; 222 bool flipped = false;
188 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped); 223 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped);
189 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 224 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
190 break; 225 break;
191 } 226 }
192 case GL_TEXTURE_2D: { 227 case GL_TEXTURE_2D: {
193 // NativeTexture hardware decoder. 228 // NativeTexture hardware decoder.
194 bool premultipliedAlpha = true; 229 bool premultipliedAlpha = true;
195 FloatRect uvRect(0, 0, 1, 1); 230 FloatRect uvRect(0, 0, 1, 1);
196 bool flipped = false; 231 bool flipped = false;
197 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped); 232 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped);
198 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 233 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
199 break; 234 break;
200 } 235 }
201 case GL_TEXTURE_RECTANGLE_ARB: { 236 case GL_TEXTURE_RECTANGLE_ARB: {
202 IntSize textureSize(m_frame->width(), m_frame->height()); 237 IntSize textureSize(m_frame->data_size().width(), m_frame->data_size().h eight());
203 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, textureSize, m_frame->textureId(), IOSurfaceDrawQuad: :Unflipped); 238 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, textureSize, m_frame->texture_id(), IOSurfaceDrawQuad ::Unflipped);
204 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); 239 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData);
205 break; 240 break;
206 } 241 }
207 case GL_TEXTURE_EXTERNAL_OES: { 242 case GL_TEXTURE_EXTERNAL_OES: {
208 // StreamTexture hardware decoder. 243 // StreamTexture hardware decoder.
209 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::c reate(sharedQuadState, quadRect, m_frame->textureId(), m_streamTextureMatrix); 244 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::c reate(sharedQuadState, quadRect, m_frame->texture_id(), m_streamTextureMatrix);
210 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 245 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
211 break; 246 break;
212 } 247 }
213 default: 248 default:
214 CRASH(); // Someone updated convertVFCFormatToGC3DFormat above but updat e this! 249 NOTREACHED(); // Someone updated convertVFCFormatToGLenum above but upd ate this!
250 break;
215 } 251 }
216 } 252 }
217 253
218 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) 254 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider)
219 { 255 {
220 DCHECK(Proxy::isImplThread()); 256 DCHECK(Proxy::isImplThread());
221 LayerImpl::didDraw(resourceProvider); 257 LayerImpl::didDraw(resourceProvider);
222 258
223 if (!m_frame) 259 if (!m_frame)
224 return; 260 return;
225 261
226 if (m_format == GL_TEXTURE_2D) { 262 if (m_format == GL_TEXTURE_2D) {
227 DCHECK(m_externalTextureResource); 263 DCHECK(m_externalTextureResource);
228 // FIXME: the following assert will not be true when sending resources t o a 264 // FIXME: the following assert will not be true when sending resources t o a
229 // parent compositor. We will probably need to hold on to m_frame for 265 // parent compositor. We will probably need to hold on to m_frame for
230 // longer, and have several "current frames" in the pipeline. 266 // longer, and have several "current frames" in the pipeline.
231 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource)); 267 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource));
232 resourceProvider->deleteResource(m_externalTextureResource); 268 resourceProvider->deleteResource(m_externalTextureResource);
233 m_externalTextureResource = 0; 269 m_externalTextureResource = 0;
234 } 270 }
235 271
236 m_provider->putCurrentFrame(m_frame); 272 m_provider->putCurrentFrame(m_webFrame);
237 m_frame = 0; 273 m_frame = 0;
238 274
239 m_providerLock.Release(); 275 m_providerLock.Release();
240 } 276 }
241 277
242 static int videoFrameDimension(int originalDimension, unsigned plane, int format ) 278 static int videoFrameDimension(int originalDimension, size_t plane, int format)
243 { 279 {
244 if (format == WebKit::WebVideoFrame::FormatYV12 && plane != WebKit::WebVideo Frame::yPlane) 280 if (format == media::VideoFrame::YV12 && plane != media::VideoFrame::kYPlane )
245 return originalDimension / 2; 281 return originalDimension / 2;
246 return originalDimension; 282 return originalDimension;
247 } 283 }
248 284
249 static bool hasPaddingBytes(const WebKit::WebVideoFrame& frame, unsigned plane) 285 static bool hasPaddingBytes(const media::VideoFrame& frame, size_t plane)
250 { 286 {
251 return frame.stride(plane) > videoFrameDimension(frame.width(), plane, frame .format()); 287 return frame.stride(plane) > videoFrameDimension(frame.data_size().width(), plane, frame.format());
252 } 288 }
253 289
254 IntSize VideoLayerImpl::computeVisibleSize(const WebKit::WebVideoFrame& frame, u nsigned plane) 290 IntSize computeVisibleSize(const media::VideoFrame& frame, size_t plane)
255 { 291 {
256 int visibleWidth = videoFrameDimension(frame.width(), plane, frame.format()) ; 292 int visibleWidth = videoFrameDimension(frame.data_size().width(), plane, fra me.format());
257 int originalWidth = visibleWidth; 293 int originalWidth = visibleWidth;
258 int visibleHeight = videoFrameDimension(frame.height(), plane, frame.format( )); 294 int visibleHeight = videoFrameDimension(frame.data_size().height(), plane, f rame.format());
259 295
260 // When there are dead pixels at the edge of the texture, decrease 296 // When there are dead pixels at the edge of the texture, decrease
261 // the frame width by 1 to prevent the rightmost pixels from 297 // the frame width by 1 to prevent the rightmost pixels from
262 // interpolating with the dead pixels. 298 // interpolating with the dead pixels.
263 if (hasPaddingBytes(frame, plane)) 299 if (hasPaddingBytes(frame, plane))
264 --visibleWidth; 300 --visibleWidth;
265 301
266 // In YV12, every 2x2 square of Y values corresponds to one U and 302 // In YV12, every 2x2 square of Y values corresponds to one U and
267 // one V value. If we decrease the width of the UV plane, we must decrease t he 303 // one V value. If we decrease the width of the UV plane, we must decrease t he
268 // width of the Y texture by 2 for proper alignment. This must happen 304 // width of the Y texture by 2 for proper alignment. This must happen
269 // always, even if Y's texture does not have padding bytes. 305 // always, even if Y's texture does not have padding bytes.
270 if (plane == WebKit::WebVideoFrame::yPlane && frame.format() == WebKit::WebV ideoFrame::FormatYV12) { 306 if (plane == media::VideoFrame::kYPlane && frame.format() == media::VideoFra me::YV12) {
271 if (hasPaddingBytes(frame, WebKit::WebVideoFrame::uPlane)) 307 if (hasPaddingBytes(frame, media::VideoFrame::kUPlane))
272 visibleWidth = originalWidth - 2; 308 visibleWidth = originalWidth - 2;
273 } 309 }
274 310
275 return IntSize(visibleWidth, visibleHeight); 311 return IntSize(visibleWidth, visibleHeight);
276 } 312 }
277 313
278 bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider ) 314 bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider )
279 { 315 {
280 if (resourceId) 316 if (resourceId)
281 return true; 317 return true;
282 318
283 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny); 319 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny);
284 return resourceId; 320 return resourceId;
285 } 321 }
286 322
287 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider) 323 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider)
288 { 324 {
289 if (!resourceId) 325 if (!resourceId)
290 return; 326 return;
291 327
292 resourceProvider->deleteResource(resourceId); 328 resourceProvider->deleteResource(resourceId);
293 resourceId = 0; 329 resourceId = 0;
294 } 330 }
295 331
296 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider) 332 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider)
297 { 333 {
298 int maxTextureSize = resourceProvider->maxTextureSize(); 334 DCHECK(!m_emitFrameQuad);
299 for (unsigned planeIndex = 0; planeIndex < m_frame->planes(); ++planeIndex) { 335
336 const int maxTextureSize = resourceProvider->maxTextureSize();
337 const size_t planeCount = numPlanes(*m_frame);
338 for (size_t planeIndex = 0; planeIndex < planeCount; ++planeIndex) {
300 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex]; 339 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex];
301 340
302 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen sion(m_frame->height(), planeIndex, m_frame->format())); 341 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen sion(m_frame->data_size().height(), planeIndex, m_frame->format()));
303 // FIXME: Remove the test against maxTextureSize when tiled layers are i mplemented. 342 // FIXME: Remove the test against maxTextureSize when tiled layers are i mplemented.
304 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex tureSize || requiredTextureSize.height() > maxTextureSize) 343 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex tureSize || requiredTextureSize.height() > maxTextureSize)
305 return false; 344 return false;
306 345
307 if (plane.size != requiredTextureSize || plane.format != m_format) { 346 if (plane.size != requiredTextureSize || plane.format != m_format) {
308 plane.freeData(resourceProvider); 347 plane.freeData(resourceProvider);
309 plane.size = requiredTextureSize; 348 plane.size = requiredTextureSize;
310 plane.format = m_format; 349 plane.format = m_format;
311 } 350 }
312 351
313 if (!plane.resourceId) { 352 if (!plane.resourceId) {
314 if (!plane.allocateData(resourceProvider)) 353 if (!plane.allocateData(resourceProvider))
315 return false; 354 return false;
316 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex); 355 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex);
317 } 356 }
318 } 357 }
319 return true; 358 return true;
320 } 359 }
321 360
322 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider) 361 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider)
323 { 362 {
324 size_t softwarePlaneCount = m_frame->planes(); 363 DCHECK(!m_emitFrameQuad);
325 if (!softwarePlaneCount) 364
365 const size_t planeCount = numPlanes(*m_frame);
366 if (!planeCount)
326 return true; 367 return true;
327 368
328 for (size_t softwarePlaneIndex = 0; softwarePlaneIndex < softwarePlaneCount; ++softwarePlaneIndex) { 369 for (size_t planeIndex = 0; planeIndex < planeCount; ++planeIndex) {
329 VideoLayerImpl::FramePlane& plane = m_framePlanes[softwarePlaneIndex]; 370 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex];
330 const uint8_t* softwarePlanePixels = static_cast<const uint8_t*>(m_frame ->data(softwarePlaneIndex)); 371 const uint8_t* softwarePlanePixels = m_frame->data(planeIndex);
331 IntRect planeRect(IntPoint(), plane.size); 372 IntRect planeRect(IntPoint(), plane.size);
332 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec t, planeRect, IntSize()); 373 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec t, planeRect, IntSize());
333 } 374 }
334 return true; 375 return true;
335 } 376 }
336 377
337 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider) 378 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider)
338 { 379 {
339 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) 380 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
340 m_framePlanes[i].freeData(resourceProvider); 381 m_framePlanes[i].freeData(resourceProvider);
341 } 382 }
342 383
343 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider) 384 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider)
344 { 385 {
345 unsigned firstUnusedPlane = m_frame ? m_frame->planes() : 0; 386 size_t firstUnusedPlane = 0;
346 for (unsigned i = firstUnusedPlane; i < WebKit::WebVideoFrame::maxPlanes; ++ i) 387 if (m_frame && !m_emitFrameQuad)
388 firstUnusedPlane = numPlanes(*m_frame);
389 for (size_t i = firstUnusedPlane; i < media::VideoFrame::kMaxPlanes; ++i)
347 m_framePlanes[i].freeData(resourceProvider); 390 m_framePlanes[i].freeData(resourceProvider);
348 } 391 }
349 392
350 void VideoLayerImpl::didReceiveFrame() 393 void VideoLayerImpl::didReceiveFrame()
351 { 394 {
352 setNeedsRedraw(); 395 setNeedsRedraw();
353 } 396 }
354 397
355 void VideoLayerImpl::didUpdateMatrix(const float matrix[16]) 398 void VideoLayerImpl::didUpdateMatrix(const float matrix[16])
356 { 399 {
(...skipping 21 matching lines...) Expand all
378 str->append("video layer\n"); 421 str->append("video layer\n");
379 LayerImpl::dumpLayerProperties(str, indent); 422 LayerImpl::dumpLayerProperties(str, indent);
380 } 423 }
381 424
382 const char* VideoLayerImpl::layerTypeAsString() const 425 const char* VideoLayerImpl::layerTypeAsString() const
383 { 426 {
384 return "VideoLayer"; 427 return "VideoLayer";
385 } 428 }
386 429
387 } 430 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698