OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "ui/gfx/gl/gl_context_cgl.h" | |
6 | |
7 #include <OpenGL/CGLRenderers.h> | |
8 #include <vector> | |
9 | |
10 #include "base/debug/trace_event.h" | |
11 #include "base/logging.h" | |
12 #include "ui/gfx/gl/gl_bindings.h" | |
13 #include "ui/gfx/gl/gl_implementation.h" | |
14 #include "ui/gfx/gl/gl_surface_cgl.h" | |
15 | |
16 namespace gfx { | |
17 | |
18 GLContextCGL::GLContextCGL(GLShareGroup* share_group) | |
19 : GLContext(share_group), | |
20 context_(NULL), | |
21 gpu_preference_(PreferIntegratedGpu) { | |
22 } | |
23 | |
24 bool GLContextCGL::Initialize(GLSurface* compatible_surface, | |
25 GpuPreference gpu_preference) { | |
26 DCHECK(compatible_surface); | |
27 | |
28 GLContextCGL* share_context = share_group() ? | |
29 static_cast<GLContextCGL*>(share_group()->GetContext()) : NULL; | |
30 if (SupportsDualGpus()) { | |
31 // Ensure the GPU preference is compatible with contexts already in the | |
32 // share group. | |
33 if (share_context && gpu_preference != share_context->GetGpuPreference()) | |
34 return false; | |
35 } | |
36 | |
37 std::vector<CGLPixelFormatAttribute> attribs; | |
38 bool using_offline_renderer = | |
39 SupportsDualGpus() && gpu_preference == PreferIntegratedGpu; | |
40 if (using_offline_renderer) { | |
41 attribs.push_back(kCGLPFAAllowOfflineRenderers); | |
42 } | |
43 if (GetGLImplementation() == kGLImplementationAppleGL) { | |
44 attribs.push_back(kCGLPFARendererID); | |
45 attribs.push_back((CGLPixelFormatAttribute) kCGLRendererGenericFloatID); | |
46 } | |
47 attribs.push_back((CGLPixelFormatAttribute) 0); | |
48 | |
49 CGLPixelFormatObj format; | |
50 GLint num_pixel_formats; | |
51 if (CGLChoosePixelFormat(&attribs.front(), | |
52 &format, | |
53 &num_pixel_formats) != kCGLNoError) { | |
54 LOG(ERROR) << "Error choosing pixel format."; | |
55 return false; | |
56 } | |
57 if (!format) { | |
58 LOG(ERROR) << "format == 0."; | |
59 return false; | |
60 } | |
61 DCHECK_NE(num_pixel_formats, 0); | |
62 | |
63 CGLError res = CGLCreateContext( | |
64 format, | |
65 share_context ? | |
66 static_cast<CGLContextObj>(share_context->GetHandle()) : NULL, | |
67 reinterpret_cast<CGLContextObj*>(&context_)); | |
68 CGLReleasePixelFormat(format); | |
69 if (res != kCGLNoError) { | |
70 LOG(ERROR) << "Error creating context."; | |
71 Destroy(); | |
72 return false; | |
73 } | |
74 | |
75 gpu_preference_ = gpu_preference; | |
76 return true; | |
77 } | |
78 | |
79 void GLContextCGL::Destroy() { | |
80 if (context_) { | |
81 CGLDestroyContext(static_cast<CGLContextObj>(context_)); | |
82 context_ = NULL; | |
83 } | |
84 } | |
85 | |
86 bool GLContextCGL::MakeCurrent(GLSurface* surface) { | |
87 DCHECK(context_); | |
88 if (IsCurrent(surface)) | |
89 return true; | |
90 | |
91 TRACE_EVENT0("gpu", "GLContextCGL::MakeCurrent"); | |
92 | |
93 if (CGLSetCurrentContext( | |
94 static_cast<CGLContextObj>(context_)) != kCGLNoError) { | |
95 LOG(ERROR) << "Unable to make gl context current."; | |
96 return false; | |
97 } | |
98 | |
99 SetCurrent(this, surface); | |
100 if (!InitializeExtensionBindings()) { | |
101 ReleaseCurrent(surface); | |
102 return false; | |
103 } | |
104 | |
105 if (!surface->OnMakeCurrent(this)) { | |
106 LOG(ERROR) << "Unable to make gl context current."; | |
107 return false; | |
108 } | |
109 | |
110 return true; | |
111 } | |
112 | |
113 void GLContextCGL::ReleaseCurrent(GLSurface* surface) { | |
114 if (!IsCurrent(surface)) | |
115 return; | |
116 | |
117 SetCurrent(NULL, NULL); | |
118 CGLSetCurrentContext(NULL); | |
119 } | |
120 | |
121 bool GLContextCGL::IsCurrent(GLSurface* surface) { | |
122 bool native_context_is_current = CGLGetCurrentContext() == context_; | |
123 | |
124 // If our context is current then our notion of which GLContext is | |
125 // current must be correct. On the other hand, third-party code | |
126 // using OpenGL might change the current context. | |
127 DCHECK(!native_context_is_current || (GetCurrent() == this)); | |
128 | |
129 if (!native_context_is_current) | |
130 return false; | |
131 | |
132 return true; | |
133 } | |
134 | |
135 void* GLContextCGL::GetHandle() { | |
136 return context_; | |
137 } | |
138 | |
139 void GLContextCGL::SetSwapInterval(int interval) { | |
140 DCHECK(IsCurrent(NULL)); | |
141 LOG(WARNING) << "GLContex: GLContextCGL::SetSwapInterval is ignored."; | |
142 } | |
143 | |
144 GLContextCGL::~GLContextCGL() { | |
145 Destroy(); | |
146 } | |
147 | |
148 GpuPreference GLContextCGL::GetGpuPreference() { | |
149 return gpu_preference_; | |
150 } | |
151 | |
152 void GLContextCGL::ForceUseOfDiscreteGPU() { | |
153 static CGLPixelFormatObj format = NULL; | |
154 if (format) | |
155 return; | |
156 CGLPixelFormatAttribute attribs[1]; | |
157 attribs[0] = static_cast<CGLPixelFormatAttribute>(0); | |
158 GLint num_pixel_formats = 0; | |
159 CGLChoosePixelFormat(attribs, &format, &num_pixel_formats); | |
160 // format is deliberately leaked. | |
161 } | |
162 | |
163 } // namespace gfx | |
OLD | NEW |