Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(305)

Unified Diff: content/common/gpu/media/v4l2_video_decode_accelerator.cc

Issue 137023008: Add support for Tegra V4L2 VDA (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/common/gpu/media/v4l2_video_decode_accelerator.cc
diff --git a/content/common/gpu/media/v4l2_video_decode_accelerator.cc b/content/common/gpu/media/v4l2_video_decode_accelerator.cc
index 68baaa85c845c91dda916e544ddd9e15e27431a3..b4a603c0e8b41a308ad98e85b07b9a9560076134 100644
--- a/content/common/gpu/media/v4l2_video_decode_accelerator.cc
+++ b/content/common/gpu/media/v4l2_video_decode_accelerator.cc
@@ -357,9 +357,8 @@ void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
attrs[13] = output_record.fds[1];
attrs[15] = 0;
attrs[17] = frame_buffer_size_.width();
-
- EGLImageKHR egl_image = eglCreateImageKHR(
- egl_display_, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attrs);
+ EGLImageKHR egl_image = device_->CreateEGLImage(
+ egl_display_, attrs, buffers[i].texture_id(), i);
if (egl_image == EGL_NO_IMAGE_KHR) {
DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
// Ownership of EGLImages allocated in previous iterations of this loop
@@ -369,9 +368,6 @@ void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
return;
}
- glBindTexture(GL_TEXTURE_EXTERNAL_OES, buffers[i].texture_id());
- glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, egl_image);
-
output_record.egl_image = egl_image;
output_record.picture_id = buffers[i].id();
free_output_buffers_.push(i);
@@ -712,10 +708,18 @@ bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
DCHECK(!device_poll_thread_.IsRunning());
// Initial decode. We haven't been able to get output stream format info yet.
// Get it, and start decoding.
+ bool no_free_input_buffers = false;
// Copy in and send to HW.
- if (!AppendToInputFrame(data, size))
+ if (!AppendToInputFrame(data, size)) {
+ if (free_input_buffers_.empty()) {
+ DVLOG(2) << "AppendToInputFrame(): no free input buffers";
+ // No free input buffers, try and see if format info is set.
+ no_free_input_buffers = true;
+ goto chk_format_info;
sheu 2014/03/07 00:18:08 We seem to be allergic to "goto" statements in Chr
shivdasp 2014/03/07 17:31:40 Jumping to GetFormatInfo() will move us out of kIn
sheu 2014/03/07 20:31:50 Ah, i see how it is. So if I have this right: the
sheu 2014/03/07 21:36:29 Random thought for posciak@: if we make the decode
shivdasp 2014/03/10 05:58:13 Ohh that's why this never happens on Exynos. Alrig
+ }
return false;
+ }
// If we only have a partial frame, don't flush and process yet.
if (decoder_partial_frame_pending_)
@@ -727,6 +731,7 @@ bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
// Recycle buffers.
Dequeue();
+chk_format_info:
// Check and see if we have format info yet.
struct v4l2_format format;
bool again = false;
@@ -735,6 +740,11 @@ bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
if (again) {
// Need more stream to decode format, return true and schedule next buffer.
+ if (no_free_input_buffers) {
+ // This buffer has not been consumed, so schedule another one.
+ *endpos = 0;
+ return true;
+ }
*endpos = size;
return true;
}
@@ -1758,7 +1768,7 @@ bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
client_,
output_buffer_map_.size(),
frame_buffer_size_,
- GL_TEXTURE_EXTERNAL_OES));
+ device_->GetTextureTarget()));
// Wait for the client to call AssignPictureBuffers() on the Child thread.
// We do this, because if we continue decoding without finishing buffer

Powered by Google App Engine
This is Rietveld 408576698