Index: content/common/gpu/media/gles2_texture_to_egl_image_translator.cc |
=================================================================== |
--- content/common/gpu/media/gles2_texture_to_egl_image_translator.cc (revision 120554) |
+++ content/common/gpu/media/gles2_texture_to_egl_image_translator.cc (working copy) |
@@ -13,9 +13,13 @@ |
static PFNEGLDESTROYIMAGEKHRPROC egl_destroy_image_khr = |
reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>( |
eglGetProcAddress("eglDestroyImageKHR")); |
+static PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGL_ImageTargetTexture2DOES = |
Ami GONE FROM CHROMIUM
2012/02/07 17:05:40
please follow the naming conventions above.
|
+ reinterpret_cast<PFNGLEGLIMAGETARGETTEXTURE2DOESPROC>( |
+ eglGetProcAddress("glEGLImageTargetTexture2DOES")); |
Ami GONE FROM CHROMIUM
2012/02/07 17:05:40
This is an extension that's not guaranteed to exis
|
static bool AreEGLExtensionsInitialized() { |
- return (egl_create_image_khr && egl_destroy_image_khr); |
+ return (egl_create_image_khr && egl_destroy_image_khr |
+ && glEGL_ImageTargetTexture2DOES); |
Ami GONE FROM CHROMIUM
2012/02/07 17:05:40
&& goes on previous line, and indent is wrong.
Ple
|
} |
Gles2TextureToEglImageTranslator::Gles2TextureToEglImageTranslator() { |
@@ -47,6 +51,37 @@ |
return hEglImage; |
} |
+EGLImageKHR Gles2TextureToEglImageTranslator::CreateEglImage( |
+ Display* x_display, EGLDisplay egl_display, EGLContext egl_context, |
+ EGLSurface egl_surface, uint32 texture, int width, int height) { |
+ |
+ if (!egl_create_image_khr) |
+ return EGL_NO_IMAGE_KHR; |
+ eglMakeCurrent(egl_display, egl_surface, egl_surface, egl_context); |
+ |
+ EGLint image_attrs[] = { EGL_IMAGE_PRESERVED_KHR, 1 , EGL_NONE }; |
+ |
+ glActiveTexture(GL_TEXTURE0); |
+ glBindTexture(GL_TEXTURE_2D, texture); |
Ami GONE FROM CHROMIUM
2012/02/07 17:05:40
Given the calls below (which seem to bind the eglI
|
+ |
+ Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0), |
+ width, height, 32); |
+ |
+ pixmap_.push_back(pixmap); |
+ |
+ EGLImageKHR hEglImage = egl_create_image_khr( egl_display, |
+ EGL_NO_CONTEXT, |
+ EGL_NATIVE_PIXMAP_KHR, |
+ (EGLClientBuffer)pixmap, |
+ image_attrs); |
+ CHECK(hEglImage) << "Failed to eglCreateImageKHR for " << texture |
+ << ", error: 0x" << std::hex << eglGetError(); |
+ |
+ glEGL_ImageTargetTexture2DOES( GL_TEXTURE_2D, hEglImage ); |
+ |
+ return hEglImage; |
+} |
+ |
uint32 Gles2TextureToEglImageTranslator::TranslateToTexture( |
EGLImageKHR egl_image) { |
// TODO(vhiremath@nvidia.com) |
@@ -57,12 +92,27 @@ |
} |
void Gles2TextureToEglImageTranslator::DestroyEglImage( |
- EGLDisplay egl_display, EGLImageKHR egl_image) { |
+ Display* x_display, EGLDisplay egl_display, EGLImageKHR egl_image) { |
// Clients of this class will call this method for each EGLImage handle. |
// Actual destroying of the handles is done here. |
+ unsigned int i; |
if (!egl_destroy_image_khr) { |
DLOG(ERROR) << "egl_destroy_image_khr failed"; |
return; |
} |
egl_destroy_image_khr(egl_display, egl_image); |
+ |
+ for(i=0; i<pixmap_.size(); i++) |
Ami GONE FROM CHROMIUM
2012/02/07 17:05:40
IIUC, this is completely broken.
Why not instead s
|
+ { |
+ if(pixmap_[i]) |
+ { |
+ XFreePixmap(x_display, pixmap_[i]); |
+ pixmap_[i++] = 0; |
+ break; |
+ } |
+ } |
+ |
+ if(i == pixmap_.size()) |
+ pixmap_.clear(); |
+ |
} |