Index: gpu/command_buffer/service/gles2_cmd_decoder.cc |
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder.cc b/gpu/command_buffer/service/gles2_cmd_decoder.cc |
index fba0b90ed602633fe7e2a6d5b1fb67304fa51fa9..22b58c00ae3112c671b57b8d8aa88a3053953263 100644 |
--- a/gpu/command_buffer/service/gles2_cmd_decoder.cc |
+++ b/gpu/command_buffer/service/gles2_cmd_decoder.cc |
@@ -218,6 +218,50 @@ bool CombineAdjacentRects(const gfx::Rect& rect1, |
return false; |
} |
+ShShaderOutput GetShaderOutputLanguageForContext( |
Ken Russell (switch to Gerrit)
2015/08/05 18:29:37
Please figure out some way to test this code. Perh
Kimmo Kinnunen
2015/08/10 08:36:34
I struggled a bit adding the return type (ShShader
|
+ const gfx::GLVersionInfo& version_info) { |
+ if (version_info.is_es) { |
+ return SH_ESSL_OUTPUT; |
+ } |
+ |
+ // Determine the GLSL version based on OpenGL specification. |
+ |
+ unsigned context_version = |
+ version_info.major_version * 100 + version_info.minor_version * 10; |
+ if (context_version >= 450) { |
+ // OpenGL specs from 4.2 on specify that the core profile is "also |
+ // guaranteed to support all previous versions of the OpenGL Shading |
+ // Language back to version 1.40". For simplicity, we assume future |
+ // specs do not unspecify this. If they did, they could unspecify |
+ // glGetStringi(GL_SHADING_LANGUAGE_VERSION, k), too. |
+ // Since current context >= 4.5, use GLSL 4.50 core. |
+ return SH_GLSL_450_CORE_OUTPUT; |
+ } else if (context_version == 440) { |
+ return SH_GLSL_440_CORE_OUTPUT; |
+ } else if (context_version == 430) { |
+ return SH_GLSL_430_CORE_OUTPUT; |
+ } else if (context_version == 420) { |
+ return SH_GLSL_420_CORE_OUTPUT; |
+ } else if (context_version == 410) { |
+ return SH_GLSL_410_CORE_OUTPUT; |
+ } else if (context_version == 400) { |
+ return SH_GLSL_400_CORE_OUTPUT; |
+ } else if (context_version == 330) { |
+ return SH_GLSL_330_CORE_OUTPUT; |
+ } else if (context_version == 320) { |
+ return SH_GLSL_150_CORE_OUTPUT; |
+ } else if (context_version == 310) { |
+ return SH_GLSL_140_OUTPUT; |
+ } else if (context_version == 300) { |
+ return SH_GLSL_130_OUTPUT; |
+ } |
+ |
+ // Before OpenGL 3.0 we use compatibility profile. Also for future |
+ // specs between OpenGL 3.3 and OpenGL 4.0, at the time of writing, |
+ // we use compatibility profile. |
+ return SH_GLSL_COMPATIBILITY_OUTPUT; |
+} |
+ |
} // namespace |
class GLES2DecoderImpl; |
@@ -3239,9 +3283,10 @@ bool GLES2DecoderImpl::InitializeShaderTranslator() { |
resources.HashFunction = &CityHash64; |
else |
resources.HashFunction = NULL; |
- ShaderTranslatorInterface::GlslImplementationType implementation_type = |
- gfx::GetGLImplementation() == gfx::kGLImplementationEGLGLES2 ? |
- ShaderTranslatorInterface::kGlslES : ShaderTranslatorInterface::kGlsl; |
+ |
+ ShShaderOutput shader_output_language = |
+ GetShaderOutputLanguageForContext(feature_info_->gl_version_info()); |
+ |
int driver_bug_workarounds = 0; |
if (workarounds().needs_glsl_built_in_function_emulation) |
driver_bug_workarounds |= SH_EMULATE_BUILT_IN_FUNCTIONS; |
@@ -3265,10 +3310,7 @@ bool GLES2DecoderImpl::InitializeShaderTranslator() { |
resources.WEBGL_debug_shader_precision = true; |
vertex_translator_ = shader_translator_cache()->GetTranslator( |
- GL_VERTEX_SHADER, |
- shader_spec, |
- &resources, |
- implementation_type, |
+ GL_VERTEX_SHADER, shader_spec, &resources, shader_output_language, |
static_cast<ShCompileOptions>(driver_bug_workarounds)); |
if (!vertex_translator_.get()) { |
LOG(ERROR) << "Could not initialize vertex shader translator."; |
@@ -3277,10 +3319,7 @@ bool GLES2DecoderImpl::InitializeShaderTranslator() { |
} |
fragment_translator_ = shader_translator_cache()->GetTranslator( |
- GL_FRAGMENT_SHADER, |
- shader_spec, |
- &resources, |
- implementation_type, |
+ GL_FRAGMENT_SHADER, shader_spec, &resources, shader_output_language, |
static_cast<ShCompileOptions>(driver_bug_workarounds)); |
if (!fragment_translator_.get()) { |
LOG(ERROR) << "Could not initialize fragment shader translator."; |