Index: cc/video_layer_impl.cc |
diff --git a/cc/video_layer_impl.cc b/cc/video_layer_impl.cc |
index 86cf037eb8baa87676451710629a73fdde0bc619..c37d0394f6b71a0146458594a32f05c32a7822ab 100644 |
--- a/cc/video_layer_impl.cc |
+++ b/cc/video_layer_impl.cc |
@@ -122,6 +122,11 @@ void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) |
if (!m_frame) |
return; |
+ // If these fail, we'll have to add draw logic that handles offset bitmap/ |
+ // texture UVs. For now, just expect (0, 0) offset. |
Ami GONE FROM CHROMIUM
2012/10/25 17:13:34
IIUC you're saying none of the downstream creators
sheu
2012/10/25 21:44:13
Yep, everything generates 0,0 so far. Updating co
|
+ DCHECK_EQ(m_frame->visibleRect().x, 0); |
+ DCHECK_EQ(m_frame->visibleRect().y, 0); |
+ |
m_format = convertVFCFormatToGC3DFormat(*m_frame); |
if (m_format == GL_INVALID_VALUE) { |
@@ -166,6 +171,14 @@ void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad |
// otherwise synchonize use of all textures in the quad. |
IntRect quadRect(IntPoint(), contentBounds()); |
+ WebKit::WebRect visibleRect = m_frame->visibleRect(); |
slavi
2012/10/25 04:44:31
I have a pending CL http://codereview.chromium.org
sheu
2012/10/25 21:44:13
I am lazy and didn't do this :-P. The upstream We
|
+ WebKit::WebSize textureSize = m_frame->textureSize(); |
+ |
+ // pixels for macroblocked formats. |
+ const float texWidthScale = |
+ static_cast<float>(visibleRect.width) / textureSize.width; |
+ const float texHeightScale = |
+ static_cast<float>(visibleRect.height) / textureSize.height; |
switch (m_format) { |
case GL_LUMINANCE: { |
@@ -173,17 +186,17 @@ void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad |
const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane]; |
const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane]; |
const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane]; |
- scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sharedQuadState, quadRect, yPlane, uPlane, vPlane); |
+ FloatSize texScale(texWidthScale, texHeightScale); |
+ scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create( |
+ sharedQuadState, quadRect, texScale, yPlane, uPlane, vPlane); |
quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); |
break; |
} |
case GL_RGBA: { |
// RGBA software decoder. |
const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane]; |
- float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width(); |
- |
bool premultipliedAlpha = true; |
- FloatRect uvRect(0, 0, widthScaleFactor, 1); |
+ FloatRect uvRect(0, 0, texWidthScale, texHeightScale); |
bool flipped = false; |
scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(sharedQuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped); |
quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); |
@@ -192,21 +205,26 @@ void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad |
case GL_TEXTURE_2D: { |
// NativeTexture hardware decoder. |
bool premultipliedAlpha = true; |
- FloatRect uvRect(0, 0, 1, 1); |
+ FloatRect uvRect(0, 0, texWidthScale, texHeightScale); |
bool flipped = false; |
scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(sharedQuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flipped); |
quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); |
break; |
} |
case GL_TEXTURE_RECTANGLE_ARB: { |
- IntSize textureSize(m_frame->width(), m_frame->height()); |
- scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create(sharedQuadState, quadRect, textureSize, m_frame->textureId(), IOSurfaceDrawQuad::Unflipped); |
+ IntSize visibleSize(visibleRect.width, visibleRect.height); |
+ scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create(sharedQuadState, quadRect, visibleSize, m_frame->textureId(), IOSurfaceDrawQuad::Unflipped); |
quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); |
break; |
} |
case GL_TEXTURE_EXTERNAL_OES: { |
// StreamTexture hardware decoder. |
- scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::create(sharedQuadState, quadRect, m_frame->textureId(), m_streamTextureMatrix); |
+ WebKit::WebTransformationMatrix transform(m_streamTextureMatrix); |
+ transform.scaleNonUniform(texWidthScale, texHeightScale); |
+ scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = |
+ StreamVideoDrawQuad::create(sharedQuadState, quadRect, |
+ m_frame->textureId(), |
+ m_streamTextureMatrix); |
quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); |
break; |
} |
@@ -239,43 +257,8 @@ void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) |
m_providerLock.Release(); |
} |
-static int videoFrameDimension(int originalDimension, unsigned plane, int format) |
-{ |
- if (format == WebKit::WebVideoFrame::FormatYV12 && plane != WebKit::WebVideoFrame::yPlane) |
- return originalDimension / 2; |
- return originalDimension; |
-} |
- |
-static bool hasPaddingBytes(const WebKit::WebVideoFrame& frame, unsigned plane) |
-{ |
- return frame.stride(plane) > videoFrameDimension(frame.width(), plane, frame.format()); |
-} |
- |
-IntSize VideoLayerImpl::computeVisibleSize(const WebKit::WebVideoFrame& frame, unsigned plane) |
-{ |
- int visibleWidth = videoFrameDimension(frame.width(), plane, frame.format()); |
- int originalWidth = visibleWidth; |
- int visibleHeight = videoFrameDimension(frame.height(), plane, frame.format()); |
- |
- // When there are dead pixels at the edge of the texture, decrease |
- // the frame width by 1 to prevent the rightmost pixels from |
- // interpolating with the dead pixels. |
- if (hasPaddingBytes(frame, plane)) |
- --visibleWidth; |
- |
- // In YV12, every 2x2 square of Y values corresponds to one U and |
- // one V value. If we decrease the width of the UV plane, we must decrease the |
- // width of the Y texture by 2 for proper alignment. This must happen |
- // always, even if Y's texture does not have padding bytes. |
- if (plane == WebKit::WebVideoFrame::yPlane && frame.format() == WebKit::WebVideoFrame::FormatYV12) { |
- if (hasPaddingBytes(frame, WebKit::WebVideoFrame::uPlane)) |
- visibleWidth = originalWidth - 2; |
- } |
- |
- return IntSize(visibleWidth, visibleHeight); |
-} |
- |
-bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider) |
+bool VideoLayerImpl::FramePlane::allocateData( |
+ ResourceProvider* resourceProvider) |
{ |
if (resourceId) |
return true; |
@@ -295,13 +278,17 @@ void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider) |
bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider) |
{ |
+ WebKit::WebSize textureSize = m_frame->textureSize(); |
int maxTextureSize = resourceProvider->maxTextureSize(); |
- for (unsigned planeIndex = 0; planeIndex < m_frame->planes(); ++planeIndex) { |
- VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex]; |
- |
- IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimension(m_frame->height(), planeIndex, m_frame->format())); |
- // FIXME: Remove the test against maxTextureSize when tiled layers are implemented. |
- if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTextureSize || requiredTextureSize.height() > maxTextureSize) |
+ for (unsigned planeIdx = 0; planeIdx < m_frame->planes(); ++planeIdx) { |
+ VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIdx]; |
+ |
+ IntSize requiredTextureSize(textureSize.width, textureSize.height); |
+ // FIXME: Remove the test against maxTextureSize when tiled layers are |
+ // implemented. |
+ if (requiredTextureSize.isZero() || |
+ requiredTextureSize.width() > maxTextureSize || |
+ requiredTextureSize.height() > maxTextureSize) |
return false; |
if (plane.size != requiredTextureSize || plane.format != m_format) { |
@@ -310,26 +297,26 @@ bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider) |
plane.format = m_format; |
} |
- if (!plane.resourceId) { |
- if (!plane.allocateData(resourceProvider)) |
- return false; |
- plane.visibleSize = computeVisibleSize(*m_frame, planeIndex); |
- } |
+ if (!plane.allocateData(resourceProvider)) |
+ return false; |
} |
return true; |
} |
bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider) |
{ |
- size_t softwarePlaneCount = m_frame->planes(); |
- if (!softwarePlaneCount) |
- return true; |
- |
- for (size_t softwarePlaneIndex = 0; softwarePlaneIndex < softwarePlaneCount; ++softwarePlaneIndex) { |
- VideoLayerImpl::FramePlane& plane = m_framePlanes[softwarePlaneIndex]; |
- const uint8_t* softwarePlanePixels = static_cast<const uint8_t*>(m_frame->data(softwarePlaneIndex)); |
- IntRect planeRect(IntPoint(), plane.size); |
- resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRect, planeRect, IntSize()); |
+ WebKit::WebSize textureSize = m_frame->textureSize(); |
+ for (unsigned planeIdx = 0; planeIdx < m_frame->planes(); ++planeIdx) { |
+ VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIdx]; |
+ const uint8_t* planePixels = static_cast<const uint8_t*>(m_frame->data(planeIdx)); |
+ |
+ // Only non-FormatNativeTexture planes should need upload. |
+ DCHECK_EQ(plane.format, GL_LUMINANCE); |
+ |
+ IntRect planeRect(0, 0, plane.size.width(), plane.size.height()); |
+ IntRect visibleRect(0, 0, textureSize.width, textureSize.height); |
+ resourceProvider->upload(plane.resourceId, planePixels, planeRect, |
+ visibleRect, IntSize()); |
} |
return true; |
} |