OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/android_deferred_rendering_backing_strategy.h" | 5 #include "media/gpu/android_deferred_rendering_backing_strategy.h" |
6 | 6 |
7 #include <EGL/egl.h> | 7 #include <EGL/egl.h> |
8 #include <EGL/eglext.h> | 8 #include <EGL/eglext.h> |
9 | 9 |
10 #include "base/android/build_info.h" | 10 #include "base/android/build_info.h" |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
42 shared_state_ = new AVDASharedState(); | 42 shared_state_ = new AVDASharedState(); |
43 | 43 |
44 bool using_virtual_context = false; | 44 bool using_virtual_context = false; |
45 if (gl::GLContext* context = gl::GLContext::GetCurrent()) { | 45 if (gl::GLContext* context = gl::GLContext::GetCurrent()) { |
46 if (gl::GLShareGroup* share_group = context->share_group()) | 46 if (gl::GLShareGroup* share_group = context->share_group()) |
47 using_virtual_context = !!share_group->GetSharedContext(); | 47 using_virtual_context = !!share_group->GetSharedContext(); |
48 } | 48 } |
49 UMA_HISTOGRAM_BOOLEAN("Media.AVDA.VirtualContext", using_virtual_context); | 49 UMA_HISTOGRAM_BOOLEAN("Media.AVDA.VirtualContext", using_virtual_context); |
50 | 50 |
51 // Acquire the SurfaceView surface if given a valid id. | 51 // Acquire the SurfaceView surface if given a valid id. |
52 if (surface_view_id != media::VideoDecodeAccelerator::Config::kNoSurfaceID) { | 52 if (surface_view_id != VideoDecodeAccelerator::Config::kNoSurfaceID) { |
53 return gpu::GpuSurfaceLookup::GetInstance()->AcquireJavaSurface( | 53 return gpu::GpuSurfaceLookup::GetInstance()->AcquireJavaSurface( |
54 surface_view_id); | 54 surface_view_id); |
55 } | 55 } |
56 | 56 |
57 // Create a SurfaceTexture. | 57 // Create a SurfaceTexture. |
58 GLuint service_id = 0; | 58 GLuint service_id = 0; |
59 surface_texture_ = state_provider_->CreateAttachedSurfaceTexture(&service_id); | 59 surface_texture_ = state_provider_->CreateAttachedSurfaceTexture(&service_id); |
60 shared_state_->SetSurfaceTexture(surface_texture_, service_id); | 60 shared_state_->SetSurfaceTexture(surface_texture_, service_id); |
61 return gl::ScopedJavaSurface(surface_texture_.get()); | 61 return gl::ScopedJavaSurface(surface_texture_.get()); |
62 } | 62 } |
(...skipping 27 matching lines...) Expand all Loading... |
90 // For SurfaceView, request a 1x1 2D texture to reduce memory during | 90 // For SurfaceView, request a 1x1 2D texture to reduce memory during |
91 // initialization. For SurfaceTexture, allocate a picture buffer that is the | 91 // initialization. For SurfaceTexture, allocate a picture buffer that is the |
92 // actual frame size. Note that it will be an external texture anyway, so it | 92 // actual frame size. Note that it will be an external texture anyway, so it |
93 // doesn't allocate an image of that size. However, it's still important to | 93 // doesn't allocate an image of that size. However, it's still important to |
94 // get the coded size right, so that VideoLayerImpl doesn't try to scale the | 94 // get the coded size right, so that VideoLayerImpl doesn't try to scale the |
95 // texture when building the quad for it. | 95 // texture when building the quad for it. |
96 return surface_texture_ ? state_provider_->GetSize() : gfx::Size(1, 1); | 96 return surface_texture_ ? state_provider_->GetSize() : gfx::Size(1, 1); |
97 } | 97 } |
98 | 98 |
99 void AndroidDeferredRenderingBackingStrategy::SetImageForPicture( | 99 void AndroidDeferredRenderingBackingStrategy::SetImageForPicture( |
100 const media::PictureBuffer& picture_buffer, | 100 const PictureBuffer& picture_buffer, |
101 const scoped_refptr<gpu::gles2::GLStreamTextureImage>& image) { | 101 const scoped_refptr<gpu::gles2::GLStreamTextureImage>& image) { |
102 gpu::gles2::TextureRef* texture_ref = | 102 gpu::gles2::TextureRef* texture_ref = |
103 state_provider_->GetTextureForPicture(picture_buffer); | 103 state_provider_->GetTextureForPicture(picture_buffer); |
104 RETURN_IF_NULL(texture_ref); | 104 RETURN_IF_NULL(texture_ref); |
105 | 105 |
106 gpu::gles2::TextureManager* texture_manager = | 106 gpu::gles2::TextureManager* texture_manager = |
107 state_provider_->GetGlDecoder()->GetContextGroup()->texture_manager(); | 107 state_provider_->GetGlDecoder()->GetContextGroup()->texture_manager(); |
108 RETURN_IF_NULL(texture_manager); | 108 RETURN_IF_NULL(texture_manager); |
109 | 109 |
110 // Default to zero which will clear the stream texture service id if one was | 110 // Default to zero which will clear the stream texture service id if one was |
(...skipping 27 matching lines...) Expand all Loading... |
138 // matter. | 138 // matter. |
139 if (image && !surface_texture_) | 139 if (image && !surface_texture_) |
140 image_state = gpu::gles2::Texture::BOUND; | 140 image_state = gpu::gles2::Texture::BOUND; |
141 texture_manager->SetLevelStreamTextureImage(texture_ref, GetTextureTarget(), | 141 texture_manager->SetLevelStreamTextureImage(texture_ref, GetTextureTarget(), |
142 0, image.get(), image_state, | 142 0, image.get(), image_state, |
143 stream_texture_service_id); | 143 stream_texture_service_id); |
144 } | 144 } |
145 | 145 |
146 void AndroidDeferredRenderingBackingStrategy::UseCodecBufferForPictureBuffer( | 146 void AndroidDeferredRenderingBackingStrategy::UseCodecBufferForPictureBuffer( |
147 int32_t codec_buf_index, | 147 int32_t codec_buf_index, |
148 const media::PictureBuffer& picture_buffer) { | 148 const PictureBuffer& picture_buffer) { |
149 // Make sure that the decoder is available. | 149 // Make sure that the decoder is available. |
150 RETURN_IF_NULL(state_provider_->GetGlDecoder()); | 150 RETURN_IF_NULL(state_provider_->GetGlDecoder()); |
151 | 151 |
152 // Notify the AVDACodecImage for picture_buffer that it should use the | 152 // Notify the AVDACodecImage for picture_buffer that it should use the |
153 // decoded buffer codec_buf_index to render this frame. | 153 // decoded buffer codec_buf_index to render this frame. |
154 AVDACodecImage* avda_image = | 154 AVDACodecImage* avda_image = |
155 shared_state_->GetImageForPicture(picture_buffer.id()); | 155 shared_state_->GetImageForPicture(picture_buffer.id()); |
156 RETURN_IF_NULL(avda_image); | 156 RETURN_IF_NULL(avda_image); |
157 | 157 |
158 // Note that this is not a race, since we do not re-use a PictureBuffer | 158 // Note that this is not a race, since we do not re-use a PictureBuffer |
159 // until after the CC is done drawing it. | 159 // until after the CC is done drawing it. |
160 pictures_out_for_display_.push_back(picture_buffer.id()); | 160 pictures_out_for_display_.push_back(picture_buffer.id()); |
161 avda_image->set_media_codec_buffer_index(codec_buf_index); | 161 avda_image->set_media_codec_buffer_index(codec_buf_index); |
162 avda_image->set_size(state_provider_->GetSize()); | 162 avda_image->set_size(state_provider_->GetSize()); |
163 | 163 |
164 MaybeRenderEarly(); | 164 MaybeRenderEarly(); |
165 } | 165 } |
166 | 166 |
167 void AndroidDeferredRenderingBackingStrategy::AssignOnePictureBuffer( | 167 void AndroidDeferredRenderingBackingStrategy::AssignOnePictureBuffer( |
168 const media::PictureBuffer& picture_buffer, | 168 const PictureBuffer& picture_buffer, |
169 bool have_context) { | 169 bool have_context) { |
170 // Attach a GLImage to each texture that will use the surface texture. | 170 // Attach a GLImage to each texture that will use the surface texture. |
171 // We use a refptr here in case SetImageForPicture fails. | 171 // We use a refptr here in case SetImageForPicture fails. |
172 scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image = | 172 scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image = |
173 new AVDACodecImage(picture_buffer.id(), shared_state_, media_codec_, | 173 new AVDACodecImage(picture_buffer.id(), shared_state_, media_codec_, |
174 state_provider_->GetGlDecoder(), surface_texture_); | 174 state_provider_->GetGlDecoder(), surface_texture_); |
175 SetImageForPicture(picture_buffer, gl_image); | 175 SetImageForPicture(picture_buffer, gl_image); |
176 | 176 |
177 if (!surface_texture_ && have_context) { | 177 if (!surface_texture_ && have_context) { |
178 // To make devtools work, we're using a 2D texture. Make it transparent, | 178 // To make devtools work, we're using a 2D texture. Make it transparent, |
179 // so that it draws a hole for the SV to show through. This is only | 179 // so that it draws a hole for the SV to show through. This is only |
180 // because devtools draws and reads back, which skips overlay processing. | 180 // because devtools draws and reads back, which skips overlay processing. |
181 // It's unclear why devtools renders twice -- once normally, and once | 181 // It's unclear why devtools renders twice -- once normally, and once |
182 // including a readback layer. The result is that the device screen | 182 // including a readback layer. The result is that the device screen |
183 // flashes as we alternately draw the overlay hole and this texture, | 183 // flashes as we alternately draw the overlay hole and this texture, |
184 // unless we make the texture transparent. | 184 // unless we make the texture transparent. |
185 static const uint8_t rgba[] = {0, 0, 0, 0}; | 185 static const uint8_t rgba[] = {0, 0, 0, 0}; |
186 const gfx::Size size(1, 1); | 186 const gfx::Size size(1, 1); |
187 DCHECK_LE(1u, picture_buffer.texture_ids().size()); | 187 DCHECK_LE(1u, picture_buffer.texture_ids().size()); |
188 glBindTexture(GL_TEXTURE_2D, picture_buffer.texture_ids()[0]); | 188 glBindTexture(GL_TEXTURE_2D, picture_buffer.texture_ids()[0]); |
189 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, size.width(), size.height(), 0, | 189 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, size.width(), size.height(), 0, |
190 GL_RGBA, GL_UNSIGNED_BYTE, rgba); | 190 GL_RGBA, GL_UNSIGNED_BYTE, rgba); |
191 } | 191 } |
192 } | 192 } |
193 | 193 |
194 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBufferForPicture( | 194 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBufferForPicture( |
195 const media::PictureBuffer& picture_buffer) { | 195 const PictureBuffer& picture_buffer) { |
196 AVDACodecImage* avda_image = | 196 AVDACodecImage* avda_image = |
197 shared_state_->GetImageForPicture(picture_buffer.id()); | 197 shared_state_->GetImageForPicture(picture_buffer.id()); |
198 RETURN_IF_NULL(avda_image); | 198 RETURN_IF_NULL(avda_image); |
199 avda_image->UpdateSurface(AVDACodecImage::UpdateMode::DISCARD_CODEC_BUFFER); | 199 avda_image->UpdateSurface(AVDACodecImage::UpdateMode::DISCARD_CODEC_BUFFER); |
200 } | 200 } |
201 | 201 |
202 void AndroidDeferredRenderingBackingStrategy::ReuseOnePictureBuffer( | 202 void AndroidDeferredRenderingBackingStrategy::ReuseOnePictureBuffer( |
203 const media::PictureBuffer& picture_buffer) { | 203 const PictureBuffer& picture_buffer) { |
204 pictures_out_for_display_.erase( | 204 pictures_out_for_display_.erase( |
205 std::remove(pictures_out_for_display_.begin(), | 205 std::remove(pictures_out_for_display_.begin(), |
206 pictures_out_for_display_.end(), picture_buffer.id()), | 206 pictures_out_for_display_.end(), picture_buffer.id()), |
207 pictures_out_for_display_.end()); | 207 pictures_out_for_display_.end()); |
208 | 208 |
209 // At this point, the CC must be done with the picture. We can't really | 209 // At this point, the CC must be done with the picture. We can't really |
210 // check for that here directly. it's guaranteed in gpu_video_decoder.cc, | 210 // check for that here directly. it's guaranteed in gpu_video_decoder.cc, |
211 // when it waits on the sync point before releasing the mailbox. That sync | 211 // when it waits on the sync point before releasing the mailbox. That sync |
212 // point is inserted by destroying the resource in VideoLayerImpl::DidDraw. | 212 // point is inserted by destroying the resource in VideoLayerImpl::DidDraw. |
213 ReleaseCodecBufferForPicture(picture_buffer); | 213 ReleaseCodecBufferForPicture(picture_buffer); |
214 MaybeRenderEarly(); | 214 MaybeRenderEarly(); |
215 } | 215 } |
216 | 216 |
217 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBuffers( | 217 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBuffers( |
218 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { | 218 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { |
219 for (const std::pair<int, media::PictureBuffer>& entry : buffers) | 219 for (const std::pair<int, PictureBuffer>& entry : buffers) |
220 ReleaseCodecBufferForPicture(entry.second); | 220 ReleaseCodecBufferForPicture(entry.second); |
221 } | 221 } |
222 | 222 |
223 void AndroidDeferredRenderingBackingStrategy::MaybeRenderEarly() { | 223 void AndroidDeferredRenderingBackingStrategy::MaybeRenderEarly() { |
224 if (pictures_out_for_display_.empty()) | 224 if (pictures_out_for_display_.empty()) |
225 return; | 225 return; |
226 | 226 |
227 // See if we can consume the front buffer / render to the SurfaceView. Iterate | 227 // See if we can consume the front buffer / render to the SurfaceView. Iterate |
228 // in reverse to find the most recent front buffer. If none is found, the | 228 // in reverse to find the most recent front buffer. If none is found, the |
229 // |front_index| will point to the beginning of the array. | 229 // |front_index| will point to the beginning of the array. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
269 return; | 269 return; |
270 } | 270 } |
271 | 271 |
272 // Due to the loop in the beginning this should never be true. | 272 // Due to the loop in the beginning this should never be true. |
273 DCHECK(!first_renderable_image->was_rendered_to_front_buffer()); | 273 DCHECK(!first_renderable_image->was_rendered_to_front_buffer()); |
274 first_renderable_image->UpdateSurface( | 274 first_renderable_image->UpdateSurface( |
275 AVDACodecImage::UpdateMode::RENDER_TO_BACK_BUFFER); | 275 AVDACodecImage::UpdateMode::RENDER_TO_BACK_BUFFER); |
276 } | 276 } |
277 | 277 |
278 void AndroidDeferredRenderingBackingStrategy::CodecChanged( | 278 void AndroidDeferredRenderingBackingStrategy::CodecChanged( |
279 media::VideoCodecBridge* codec) { | 279 VideoCodecBridge* codec) { |
280 media_codec_ = codec; | 280 media_codec_ = codec; |
281 shared_state_->CodecChanged(codec); | 281 shared_state_->CodecChanged(codec); |
282 } | 282 } |
283 | 283 |
284 void AndroidDeferredRenderingBackingStrategy::OnFrameAvailable() { | 284 void AndroidDeferredRenderingBackingStrategy::OnFrameAvailable() { |
285 shared_state_->SignalFrameAvailable(); | 285 shared_state_->SignalFrameAvailable(); |
286 } | 286 } |
287 | 287 |
288 bool AndroidDeferredRenderingBackingStrategy::ArePicturesOverlayable() { | 288 bool AndroidDeferredRenderingBackingStrategy::ArePicturesOverlayable() { |
289 // SurfaceView frames are always overlayable because that's the only way to | 289 // SurfaceView frames are always overlayable because that's the only way to |
290 // display them. | 290 // display them. |
291 return !surface_texture_; | 291 return !surface_texture_; |
292 } | 292 } |
293 | 293 |
294 void AndroidDeferredRenderingBackingStrategy::UpdatePictureBufferSize( | 294 void AndroidDeferredRenderingBackingStrategy::UpdatePictureBufferSize( |
295 media::PictureBuffer* picture_buffer, | 295 PictureBuffer* picture_buffer, |
296 const gfx::Size& new_size) { | 296 const gfx::Size& new_size) { |
297 // This strategy uses EGL images which manage the texture size for us. We | 297 // This strategy uses EGL images which manage the texture size for us. We |
298 // simply update the PictureBuffer meta-data and leave the texture as-is. | 298 // simply update the PictureBuffer meta-data and leave the texture as-is. |
299 picture_buffer->set_size(new_size); | 299 picture_buffer->set_size(new_size); |
300 } | 300 } |
301 | 301 |
302 } // namespace media | 302 } // namespace media |
OLD | NEW |