Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(894)

Side by Side Diff: content/common/gpu/media/exynos_video_decode_accelerator.cc

Issue 11198060: VDA implementation for Exynos, using V4L2 (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: Bug number update. Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
13
14 #include "base/bind.h"
15 #include "base/debug/trace_event.h"
16 #include "base/message_loop.h"
17 #include "base/message_loop_proxy.h"
18 #include "base/shared_memory.h"
19 #include "content/common/gpu/media/exynos_video_decode_accelerator.h"
20 #include "content/common/gpu/media/h264_parser.h"
21 #include "third_party/angle/include/GLES2/gl2.h"
22
23 namespace content {
24
25 #define EXYNOS_MFC_DEVICE "/dev/mfc-dec"
piman 2013/01/12 03:24:58 nit: const char [] kExynosMfcDevice = "/dev/mfc-de
sheu 2013/01/14 23:49:49 Done.
26 #define EXYNOS_GSC_DEVICE "/dev/gsc1"
27 #define EXYNOS_MALI_DRIVER "libmali.so"
28
29 #define NOTIFY_ERROR(x) \
30 do { \
31 SetDecoderState(kError); \
32 DLOG(ERROR) << "calling NotifyError(): " << x; \
33 NotifyError(x); \
34 } while (0)
35
36 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
37 do { \
38 if (ioctl(fd, type, arg) != 0) { \
39 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
40 NOTIFY_ERROR(PLATFORM_FAILURE); \
41 return; \
42 } \
43 } while (0)
44
45 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
46 do { \
47 if (ioctl(fd, type, arg) != 0) { \
48 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
49 NOTIFY_ERROR(PLATFORM_FAILURE); \
50 return false; \
51 } \
52 } while (0)
53
54 #define POSTSANDBOX_DLSYM(lib, func, type, name) \
55 func = reinterpret_cast<type>(dlsym(lib, name)); \
56 if (func == NULL) { \
57 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " \
58 << name << ": " << dlerror(); \
59 return false; \
60 }
61
62 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef {
63 BitstreamBufferRef(
64 base::WeakPtr<Client>& client,
65 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
66 base::SharedMemory* shm,
67 size_t size,
68 int32 input_id);
69 ~BitstreamBufferRef();
70 const base::WeakPtr<Client> client;
71 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
72 const scoped_ptr<base::SharedMemory> shm;
73 const size_t size;
74 off_t bytes_used;
75 const int32 input_id;
76 };
77
78 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef {
79 PictureBufferArrayRef(EGLDisplay egl_display, size_t count);
80 ~PictureBufferArrayRef();
81
82 struct PictureBufferRef {
83 EGLImageKHR egl_image;
84 int egl_image_fd;
85 int32 client_id;
86 };
87
88 EGLDisplay const egl_display;
89 std::vector<PictureBufferRef> picture_buffers;
90 };
91
92 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef {
93 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
94 ~EGLSyncKHRRef();
95 EGLDisplay const egl_display;
96 EGLSyncKHR egl_sync;
97 };
98
99 // TODO(sheu): fix OpenGL ES header includes, remove unnecessary redefinitions.
100 // http://crbug.com/169433
101 typedef void* GLeglImageOES;
102 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*,
103 void*);
104 typedef EGLImageKHR (*EglCreateImageKhrFunc)(EGLDisplay, EGLContext, EGLenum,
105 EGLClientBuffer, const EGLint*);
106 typedef EGLBoolean (*EglDestroyImageKhrFunc)(EGLDisplay, EGLImageKHR);
107 typedef EGLSyncKHR (*EglCreateSyncKhrFunc)(EGLDisplay, EGLenum, const EGLint*);
108 typedef EGLBoolean (*EglDestroySyncKhrFunc)(EGLDisplay, EGLSyncKHR);
109 typedef EGLint (*EglClientWaitSyncKhrFunc)(EGLDisplay, EGLSyncKHR, EGLint,
110 EGLTimeKHR);
111 typedef void (*GlEglImageTargetTexture2dOesFunc)(GLenum, GLeglImageOES);
112
113 static void* libmali_handle = NULL;
114 static MaliEglImageGetBufferExtPhandleFunc
115 mali_egl_image_get_buffer_ext_phandle = NULL;
116 static EglCreateImageKhrFunc egl_create_image_khr = NULL;
117 static EglDestroyImageKhrFunc egl_destroy_image_khr = NULL;
118 static EglCreateSyncKhrFunc egl_create_sync_khr = NULL;
119 static EglDestroySyncKhrFunc egl_destroy_sync_khr = NULL;
120 static EglClientWaitSyncKhrFunc egl_client_wait_sync_khr = NULL;
121 static GlEglImageTargetTexture2dOesFunc
122 gl_egl_image_target_texture_2d_oes = NULL;
piman 2013/01/12 03:24:58 nit: you can put all of these into an anonymous na
sheu 2013/01/14 23:49:49 Done.
123
124 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
125 base::WeakPtr<Client>& client,
126 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
127 base::SharedMemory* shm, size_t size, int32 input_id)
128 : client(client),
129 client_message_loop_proxy(client_message_loop_proxy),
130 shm(shm),
131 size(size),
132 bytes_used(0),
133 input_id(input_id) {
134 }
135
136 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
137 if (input_id >= 0)
piman 2013/01/12 03:24:58 nit: need {} if the statement spans more than 1 li
sheu 2013/01/14 23:49:49 Done.
138 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
139 &Client::NotifyEndOfBitstreamBuffer, client, input_id));
140 }
141
142 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef(
143 EGLDisplay egl_display, size_t count)
144 : egl_display(egl_display),
145 picture_buffers(count) {
146 for (size_t i = 0; i < picture_buffers.size(); ++i) {
147 PictureBufferRef& buffer = picture_buffers[i];
148 buffer.egl_image = EGL_NO_IMAGE_KHR;
149 buffer.egl_image_fd = -1;
150 buffer.client_id = -1;
151 }
152 }
153
154 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() {
155 for (size_t i = 0; i < picture_buffers.size(); ++i) {
156 PictureBufferRef& buffer = picture_buffers[i];
157 if (buffer.egl_image != EGL_NO_IMAGE_KHR)
158 egl_destroy_image_khr(egl_display, buffer.egl_image);
159 if (buffer.egl_image_fd != -1)
160 close(buffer.egl_image_fd);
piman 2013/01/12 03:24:58 Use HANDLE_EINTR from base/posix/eintr_wrapper.h o
sheu 2013/01/14 23:49:49 Done.
161 }
162 }
163
164 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
165 EGLDisplay egl_display, EGLSyncKHR egl_sync)
166 : egl_display(egl_display),
167 egl_sync(egl_sync) {
168 }
169
170 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
171 if (egl_sync != EGL_NO_SYNC_KHR)
172 egl_destroy_sync_khr(egl_display, egl_sync);
173 }
174
175 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord()
176 : at_device(false),
177 address(NULL),
178 length(0),
179 bytes_used(0),
180 input_id(-1) {
181 }
182
183 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() {
184 }
185
186 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord()
187 : at_device(false),
188 input_id(-1) {
189 bytes_used[0] = 0;
190 bytes_used[1] = 0;
191 address[0] = NULL;
192 address[1] = NULL;
193 length[0] = 0;
194 length[1] = 0;
195 }
196
197 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() {
198 }
199
200 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord()
201 : at_device(false),
202 mfc_output(-1) {
203 }
204
205 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() {
206 }
207
208 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord()
209 : at_device(false),
210 at_client(false),
211 fd(-1),
212 egl_image(EGL_NO_IMAGE_KHR),
213 egl_sync(EGL_NO_SYNC_KHR),
214 picture_id(-1) {
215 }
216
217 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() {
218 }
219
220 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator(
221 EGLDisplay egl_display,
222 EGLContext egl_context,
223 Client* client,
224 const base::Callback<bool(void)>& make_context_current)
225 : child_message_loop_proxy_(base::MessageLoopProxy::current()),
226 weak_this_(base::AsWeakPtr(this)),
227 client_ptr_factory_(client),
228 client_(client_ptr_factory_.GetWeakPtr()),
229 decoder_thread_("ExynosDecoderThread"),
230 decoder_state_(kUninitialized),
231 decoder_current_bitstream_buffer_(NULL),
232 decoder_delay_bitstream_buffer_id_(-1),
233 decoder_current_input_buffer_(-1),
234 decoder_decode_buffer_tasks_scheduled_(0),
235 decoder_frames_at_client_(0),
236 decoder_flushing_(false),
237 mfc_fd_(-1),
238 mfc_input_streamon_(false),
239 mfc_input_buffer_count_(0),
240 mfc_input_buffer_queued_count_(0),
241 mfc_output_streamon_(false),
242 mfc_output_buffer_count_(0),
243 mfc_output_buffer_queued_count_(0),
244 mfc_output_buffer_pixelformat_(0),
245 gsc_fd_(-1),
246 gsc_input_streamon_(false),
247 gsc_input_buffer_count_(0),
248 gsc_input_buffer_queued_count_(0),
249 gsc_output_streamon_(false),
250 gsc_output_buffer_count_(0),
251 gsc_output_buffer_queued_count_(0),
252 device_poll_thread_("ExynosDevicePollThread"),
253 device_poll_interrupt_fd_(-1),
254 make_context_current_(make_context_current),
255 egl_display_(egl_display),
256 egl_context_(egl_context),
257 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
258 }
259
260 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() {
261 DCHECK(!decoder_thread_.IsRunning());
262 DCHECK(!device_poll_thread_.IsRunning());
263 // Nuke the entire site from orbit -- it's the only way to be sure.
piman 2013/01/12 03:24:58 nit: This comment is pretty alien to me, I'm not s
sheu 2013/01/14 23:49:49 Not sure if being ironic or just hilarious coincid
piman 2013/01/15 01:36:51 It has to be a coincidence, there's no other conce
sheu 2013/01/15 05:05:43 Done.
264 if (device_poll_interrupt_fd_ != -1) {
265 close(device_poll_interrupt_fd_);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
266 device_poll_interrupt_fd_ = -1;
267 }
268 if (gsc_fd_ != -1) {
269 DestroyGscInputBuffers();
270 DestroyGscOutputBuffers();
271 close(gsc_fd_);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
272 gsc_fd_ = -1;
273 }
274 if (mfc_fd_ != -1) {
275 DestroyMfcInputBuffers();
276 DestroyMfcOutputBuffers();
277 close(mfc_fd_);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
278 mfc_fd_ = -1;
279 }
280
281 // These maps have members that should be manually destroyed, e.g. file
282 // descriptors, mmap() segments, etc.
283 DCHECK(mfc_input_buffer_map_.empty());
284 DCHECK(mfc_output_buffer_map_.empty());
285 DCHECK(gsc_input_buffer_map_.empty());
286 DCHECK(gsc_output_buffer_map_.empty());
287 }
288
289 bool ExynosVideoDecodeAccelerator::Initialize(
290 media::VideoCodecProfile profile) {
291 DVLOG(3) << "Initialize()";
292 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
293 DCHECK_EQ(decoder_state_, kUninitialized);
294
295 switch (profile) {
296 case media::H264PROFILE_BASELINE:
297 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
298 break;
299 case media::H264PROFILE_MAIN:
300 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
301 break;
302 case media::H264PROFILE_HIGH:
303 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
304 break;
305 case media::VP8PROFILE_MAIN:
306 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN";
307 break;
308 default:
309 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
310 return false;
311 };
312 video_profile_ = profile;
313
314 static bool sandbox_initialized = PostSandboxInitialization();
315 if (!sandbox_initialized) {
316 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed";
317 NOTIFY_ERROR(PLATFORM_FAILURE);
318 return false;
319 }
320
321 if (egl_display_ == EGL_NO_DISPLAY) {
322 DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
323 NOTIFY_ERROR(PLATFORM_FAILURE);
324 return false;
325 }
326
327 if (egl_context_ == EGL_NO_CONTEXT) {
328 DLOG(ERROR) << "Initialize(): could not get EGLContext";
329 NOTIFY_ERROR(PLATFORM_FAILURE);
330 return false;
331 }
332
333 // Open the video devices.
334 DVLOG(2) << "Initialize(): opening MFC device: " << EXYNOS_MFC_DEVICE;
335 mfc_fd_ = open(EXYNOS_MFC_DEVICE, O_RDWR | O_NONBLOCK | O_CLOEXEC);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
336 if (mfc_fd_ == -1) {
337 DPLOG(ERROR) << "Initialize(): could not open MFC device: "
338 << EXYNOS_MFC_DEVICE;
339 NOTIFY_ERROR(PLATFORM_FAILURE);
340 return false;
341 }
342 DVLOG(2) << "Initialize(): opening GSC device: " << EXYNOS_GSC_DEVICE;
343 gsc_fd_ = open(EXYNOS_GSC_DEVICE, O_RDWR | O_NONBLOCK | O_CLOEXEC);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
344 if (gsc_fd_ == -1) {
345 DPLOG(ERROR) << "Initialize(): could not open GSC device: "
346 << EXYNOS_GSC_DEVICE;
347 NOTIFY_ERROR(PLATFORM_FAILURE);
348 return false;
349 }
350
351 // Create the interrupt fd.
352 DCHECK_EQ(device_poll_interrupt_fd_, -1);
353 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
piman 2013/01/12 03:24:58 Is HANDLE_EINTR needed?
sheu 2013/01/14 23:49:49 eventfd does not return EINTR.
354 if (device_poll_interrupt_fd_ == -1) {
355 DPLOG(ERROR) << "Initialize(): eventfd() failed";
356 NOTIFY_ERROR(PLATFORM_FAILURE);
357 return false;
358 }
359
360 // Capabilities check.
361 struct v4l2_capability caps;
362 const __u32 kCapsRequired =
363 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
364 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
365 V4L2_CAP_STREAMING;
366 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps);
piman 2013/01/12 03:24:58 Should HANDLE_EINTR be used on this syscall, and/o
sheu 2013/01/14 23:49:49 The V4L2 API docs don't mention their ioctls being
367 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
368 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
369 ", caps check failed: 0x" << std::hex << caps.capabilities;
370 NOTIFY_ERROR(PLATFORM_FAILURE);
371 return false;
372 }
373 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps);
374 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
375 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
376 ", caps check failed: 0x" << std::hex << caps.capabilities;
377 NOTIFY_ERROR(PLATFORM_FAILURE);
378 return false;
379 }
380
381 // Some random ioctls that Exynos requires.
382 struct v4l2_control control;
383 memset(&control, 0, sizeof(control));
384 control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8
385 control.value = 8; // Magic number from Samsung folks.
386 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CTRL, &control);
387
388 if (!make_context_current_.Run()) {
389 DLOG(ERROR) << "Initialize(): could not make context current";
390 NOTIFY_ERROR(PLATFORM_FAILURE);
391 return false;
392 }
393
394 if (!CreateMfcInputBuffers())
395 return false;
396
397 // MFC output format has to be setup before streaming starts.
398 struct v4l2_format format;
399 memset(&format, 0, sizeof(format));
400 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
401 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16;
402 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
403
404 // Initialize format-specific bits.
405 if (video_profile_ >= media::H264PROFILE_MIN &&
406 video_profile_ <= media::H264PROFILE_MAX) {
407 decoder_h264_parser_.reset(new content::H264Parser());
408 }
409
410 if (!decoder_thread_.Start()) {
411 DLOG(ERROR) << "Initialize(): decoder thread failed to start";
412 NOTIFY_ERROR(PLATFORM_FAILURE);
413 return false;
414 }
415
416 SetDecoderState(kInitialized);
417
418 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
419 &Client::NotifyInitializeDone, client_));
420 return true;
421 }
422
423 void ExynosVideoDecodeAccelerator::Decode(
424 const media::BitstreamBuffer& bitstream_buffer) {
425 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
426 << ", size=" << bitstream_buffer.size();
427 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
428
429 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
430 client_, child_message_loop_proxy_,
431 new base::SharedMemory(bitstream_buffer.handle(), true),
432 bitstream_buffer.size(), bitstream_buffer.id()));
433 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
piman 2013/01/12 03:24:58 Would it make sense to do the Map on the decoder t
sheu 2013/01/14 23:49:49 Since the decoder thread also services the device,
piman 2013/01/15 01:36:51 Ok, it's a minor thing in any case, but I was thin
434 DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
435 NOTIFY_ERROR(UNREADABLE_INPUT);
436 return;
437 }
438 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory();
439
440 // DecodeTask() will take care of running a DecodeBufferTask().
441 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
442 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this),
443 base::Passed(&bitstream_record)));
444 }
445
446 void ExynosVideoDecodeAccelerator::AssignPictureBuffers(
447 const std::vector<media::PictureBuffer>& buffers) {
448 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
449 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
450
451 if (static_cast<int>(buffers.size()) != gsc_output_buffer_count_) {
452 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer_count";
453 NOTIFY_ERROR(INVALID_ARGUMENT);
454 return;
455 }
456
457 if (!make_context_current_.Run()) {
458 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
459 NOTIFY_ERROR(PLATFORM_FAILURE);
460 return;
461 }
462
463 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref(
464 new PictureBufferArrayRef(egl_display_, buffers.size()));
465
466 const static EGLint kImageAttrs[] = {
467 EGL_IMAGE_PRESERVED_KHR, 0,
468 EGL_NONE,
469 };
470 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay();
471 glActiveTexture(GL_TEXTURE0);
piman 2013/01/12 03:24:58 If you change the state of the current context (ac
472 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) {
473 PictureBufferArrayRef::PictureBufferRef& buffer =
474 pic_buffers_ref->picture_buffers[i];
475 // Create the X pixmap and then create an EGLImageKHR from it, so we can
476 // get dma_buf backing.
477 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0),
478 buffers[i].size().width(), buffers[i].size().height(), 32);
479 if (!pixmap) {
480 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap";
481 NOTIFY_ERROR(PLATFORM_FAILURE);
482 return;
483 }
484 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id());
485 EGLImageKHR egl_image;
486 egl_image = egl_create_image_khr(
piman 2013/01/12 03:24:58 nit: declaration + initialization on same statemen
sheu 2013/01/14 23:49:49 Done.
487 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR,
488 (EGLClientBuffer)pixmap, kImageAttrs);
489 // We can free the X pixmap immediately -- according to the
490 // EGL_KHR_image_base spec, the backing storage does not go away until the
491 // last referencing EGLImage is destroyed.
492 XFreePixmap(x_display, pixmap);
493 if (egl_image == EGL_NO_IMAGE_KHR) {
494 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
495 NOTIFY_ERROR(PLATFORM_FAILURE);
496 return;
497 }
498 buffer.egl_image = egl_image;
499 int fd;
500 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) {
501 DLOG(ERROR) << "AssignPictureBuffers(): "
502 << "could not get EGLImageKHR dmabuf fd";
503 NOTIFY_ERROR(PLATFORM_FAILURE);
504 return;
505 }
506 buffer.egl_image_fd = fd;
507 gl_egl_image_target_texture_2d_oes(GL_TEXTURE_2D, egl_image);
508 buffer.client_id = buffers[i].id();
509 }
510 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
511 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask,
512 base::Unretained(this), base::Passed(&pic_buffers_ref)));
513 }
514
515 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
516 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
517 // Must be run on child thread, as we'll insert a sync in the EGL context.
518 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
519
520 if (!make_context_current_.Run()) {
521 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
522 NOTIFY_ERROR(PLATFORM_FAILURE);
523 return;
524 }
525
526 EGLSyncKHR egl_sync;
527 egl_sync = egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
piman 2013/01/12 03:24:58 nit: declaration + initialization on same statemen
sheu 2013/01/14 23:49:49 Done.
528 if (egl_sync == EGL_NO_SYNC_KHR) {
529 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
530 NOTIFY_ERROR(PLATFORM_FAILURE);
531 return;
532 }
533
534 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
535 egl_display_, egl_sync));
536 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
537 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask,
538 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
539 }
540
541 void ExynosVideoDecodeAccelerator::Flush() {
542 DVLOG(3) << "Flush()";
543 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
544 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
545 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this)));
546 }
547
548 void ExynosVideoDecodeAccelerator::Reset() {
549 DVLOG(3) << "Reset()";
550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
551 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
552 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this)));
553 }
554
555 void ExynosVideoDecodeAccelerator::Destroy() {
556 DVLOG(3) << "Destroy()";
557 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
558
559 // We're destroying; cancel all callbacks.
560 client_ptr_factory_.InvalidateWeakPtrs();
561
562 // If the decoder thread is running, destroy using posted task.
563 if (decoder_thread_.IsRunning()) {
564 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
565 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
566 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
567 decoder_thread_.Stop();
568 } else {
569 // Otherwise, call the destroy task directly.
570 DestroyTask();
571 }
572
573 // Set to kError state just in case.
574 SetDecoderState(kError);
575
576 delete this;
577 }
578
579 // static
580 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() {
581 DVLOG(3) << "PreSandboxInitialization()";
582 dlerror();
583
584 libmali_handle = dlopen(EXYNOS_MALI_DRIVER, RTLD_LAZY | RTLD_LOCAL);
585 if (libmali_handle == NULL) {
586 DPLOG(ERROR) << "failed to dlopen() " << EXYNOS_MALI_DRIVER
587 << ": " << dlerror();
588 }
589 }
590
591 // static
592 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() {
593 DVLOG(3) << "PostSandboxInitialization()";
594 if (libmali_handle == NULL) {
595 DLOG(ERROR) << "PostSandboxInitialization(): no " << EXYNOS_MALI_DRIVER
596 << " driver handle";
597 return false;
598 }
599
600 dlerror();
601
602 POSTSANDBOX_DLSYM(libmali_handle,
603 mali_egl_image_get_buffer_ext_phandle,
604 MaliEglImageGetBufferExtPhandleFunc,
605 "mali_egl_image_get_buffer_ext_phandle");
606
607 POSTSANDBOX_DLSYM(libmali_handle,
608 egl_create_image_khr,
609 EglCreateImageKhrFunc,
610 "eglCreateImageKHR");
611
612 POSTSANDBOX_DLSYM(libmali_handle,
613 egl_destroy_image_khr,
614 EglDestroyImageKhrFunc,
615 "eglDestroyImageKHR");
616
617 POSTSANDBOX_DLSYM(libmali_handle,
618 egl_create_sync_khr,
619 EglCreateSyncKhrFunc,
620 "eglCreateSyncKHR");
621
622 POSTSANDBOX_DLSYM(libmali_handle,
623 egl_destroy_sync_khr,
624 EglDestroySyncKhrFunc,
625 "eglDestroySyncKHR");
626
627 POSTSANDBOX_DLSYM(libmali_handle,
628 egl_client_wait_sync_khr,
629 EglClientWaitSyncKhrFunc,
630 "eglClientWaitSyncKHR");
631
632 POSTSANDBOX_DLSYM(libmali_handle,
633 gl_egl_image_target_texture_2d_oes,
634 GlEglImageTargetTexture2dOesFunc,
635 "glEGLImageTargetTexture2DOES");
636
637 return true;
638 }
639
640 void ExynosVideoDecodeAccelerator::DecodeTask(
641 scoped_ptr<BitstreamBufferRef> bitstream_record) {
642 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id;
643 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
644 DCHECK_NE(decoder_state_, kUninitialized);
645 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id",
646 bitstream_record->input_id);
647
648 if (decoder_state_ == kResetting || decoder_flushing_) {
649 // In the case that we're resetting or flushing, we need to delay decoding
650 // the BitstreamBuffers that come after the Reset() or Flush() call. When
651 // we're here, we know that this DecodeTask() was scheduled by a Decode()
652 // call that came after (in the client thread) the Reset() or Flush() call;
653 // thus set up the delay if necessary.
654 if (decoder_delay_bitstream_buffer_id_ == -1)
655 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
656 } else if (decoder_state_ == kError) {
657 DVLOG(2) << "DecodeTask(): early out: kError state";
658 return;
659 }
660
661 decoder_input_queue_.push_back(
662 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
663 decoder_decode_buffer_tasks_scheduled_++;
664 DecodeBufferTask();
665 }
666
667 void ExynosVideoDecodeAccelerator::DecodeBufferTask() {
668 DVLOG(3) << "DecodeBufferTask()";
669 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
670 DCHECK_NE(decoder_state_, kUninitialized);
671 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask");
672
673 decoder_decode_buffer_tasks_scheduled_--;
674
675 if (decoder_state_ == kResetting) {
676 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
677 return;
678 } else if (decoder_state_ == kError) {
679 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
680 return;
681 }
682
683 if (decoder_current_bitstream_buffer_ == NULL) {
684 if (decoder_input_queue_.empty()) {
685 // We're waiting for a new buffer -- exit without scheduling a new task.
686 return;
687 }
688 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
689 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
690 // We're asked to delay decoding on this and subsequent buffers.
691 return;
692 }
693
694 // Setup to use the next buffer.
695 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
696 decoder_input_queue_.pop_front();
697 DVLOG(3) << "DecodeBufferTask(): reading input_id="
698 << decoder_current_bitstream_buffer_->input_id
699 << ", addr=" << decoder_current_bitstream_buffer_->shm->memory()
700 << ", size=" << decoder_current_bitstream_buffer_->size;
701 }
702 bool schedule_task = false;
703 const size_t size = decoder_current_bitstream_buffer_->size;
704 size_t decoded_size;
piman 2013/01/12 03:24:58 please initialize here - it's hard to track that a
sheu 2013/01/14 23:49:49 Done.
705 if (size == 0) {
706 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
707 decoded_size = 0;
708 if (input_id >= 0) {
709 // This is a buffer queued from the client that has zero size. Skip.
710 schedule_task = true;
711 } else {
712 schedule_task = true;
piman 2013/01/12 03:24:58 nit: this looks redundant with all the paths below
sheu 2013/01/14 23:49:49 Not quite redundant. I've moved it down a few lin
713 // This is a buffer of zero size, queued to flush the pipe. Flush.
714 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
715 static_cast<base::SharedMemory*>(NULL));
716 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
717 // current input, enqueue no data to the next frame, then flush that down.
718 if (decoder_current_input_buffer_ != -1 &&
719 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id !=
720 kFlushBufferId)
721 schedule_task = FlushInputFrame();
722
723 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
724 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
725 schedule_task = true;
726 } else {
727 // If we failed to enqueue the empty buffer (due to pipeline
728 // backpressure), don't advance the bitstream buffer queue, and don't
729 // schedule the next task. This bitstream buffer queue entry will get
730 // reprocessed when the pipeline frees up.
731 schedule_task = false;
732 }
733 }
734 } else {
735 // This is a buffer queued from the client, with actual contents. Decode.
736 const void* const data =
piman 2013/01/12 03:24:58 make this a const uint8* const data, it saves a ca
sheu 2013/01/14 23:49:49 Yeah. Leftover from fischman@'s earlier review.
737 reinterpret_cast<const uint8*>(
738 decoder_current_bitstream_buffer_->shm->memory()) +
739 decoder_current_bitstream_buffer_->bytes_used;
740 const size_t data_size =
741 decoder_current_bitstream_buffer_->size -
742 decoder_current_bitstream_buffer_->bytes_used;
743 if (!FindFrameFragment(reinterpret_cast<const uint8*>(data), data_size,
744 &decoded_size)) {
745 NOTIFY_ERROR(UNREADABLE_INPUT);
746 return;
747 }
748
749 switch (decoder_state_) {
750 case kInitialized:
751 case kAfterReset:
752 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
753 break;
754 case kDecoding:
755 schedule_task = DecodeBufferContinue(data, decoded_size);
756 break;
757 default:
758 NOTIFY_ERROR(ILLEGAL_STATE);
759 return;
760 }
761 }
762 if (decoder_state_ == kError) {
763 // Failed during decode.
764 return;
765 }
766
767 if (schedule_task) {
768 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
769 if (decoder_current_bitstream_buffer_->bytes_used ==
770 decoder_current_bitstream_buffer_->size) {
771 // Our current bitstream buffer is done; return it.
772 int32 input_id = decoder_current_bitstream_buffer_->input_id;
773 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
774 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
775 decoder_current_bitstream_buffer_.reset();
776 }
777 ScheduleDecodeBufferTaskIfNeeded();
778 }
779 }
780
781 bool ExynosVideoDecodeAccelerator::FindFrameFragment(
piman 2013/01/12 03:24:58 Could we have a unit test for this function?
sheu 2013/01/14 23:49:49 vda_unittests already tests multi-fragment decode.
piman 2013/01/15 01:36:51 The intent is that here, you have a bit parsing fu
sheu 2013/01/15 05:05:43 video_decode_accelerator_unittest covers the multi
782 const uint8* data,
783 size_t size,
784 size_t* endpos) {
785 if (video_profile_ >= media::H264PROFILE_MIN &&
786 video_profile_ <= media::H264PROFILE_MAX) {
787 // For H264, we need to feed HW one frame at a time. This is going to take
788 // some parsing of our input stream.
789 decoder_h264_parser_->SetStream(data, size);
790 content::H264NALU nalu;
791 content::H264Parser::Result result;
792
793 // Find the first NAL.
794 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
795 if (result == content::H264Parser::kInvalidStream ||
796 result == content::H264Parser::kUnsupportedStream)
797 return false;
798 *endpos = (nalu.data + nalu.size) - data;
799 if (result == content::H264Parser::kEOStream)
800 return true;
801
802 // Keep on peeking the next NALs while they don't indicate a frame
803 // boundary.
804 for (;;) {
805 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
806 if (result == content::H264Parser::kInvalidStream ||
807 result == content::H264Parser::kUnsupportedStream)
808 return false;
809 if (result == content::H264Parser::kEOStream)
810 return true;
811 switch (nalu.nal_unit_type) {
812 case content::H264NALU::kNonIDRSlice:
813 case content::H264NALU::kIDRSlice:
814 // For these two, if the "first_mb_in_slice" field is zero, start a
815 // new frame and return. This field is Exp-Golomb coded starting on
816 // the eighth data bit of the NAL; a zero value is encoded with a
817 // leading '1' bit in the byte, which we can detect as the byte being
818 // (unsigned) greater than or equal to 0x80.
819 if (nalu.data[1] >= 0x80)
820 return true;
821 break;
822 case content::H264NALU::kSPS:
823 case content::H264NALU::kPPS:
824 case content::H264NALU::kEOSeq:
825 case content::H264NALU::kEOStream:
826 // These unconditionally signal a frame boundary.
827 return true;
828 default:
829 // For all others, keep going.
830 break;
831 }
832 *endpos = (nalu.data + nalu.size) - reinterpret_cast<const uint8*>(data);
833 }
834 NOTREACHED();
835 return false;
836 } else {
837 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
838 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
839 // For VP8, we can just dump the entire buffer. No fragmentation needed.
840 *endpos = size;
841 return true;
842 }
843 }
844
845 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
846 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
847
848 // If we're behind on tasks, schedule another one.
849 int buffers_to_decode = decoder_input_queue_.size();
850 if (decoder_current_bitstream_buffer_ != NULL)
851 buffers_to_decode++;
852 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
853 decoder_decode_buffer_tasks_scheduled_++;
854 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
855 &ExynosVideoDecodeAccelerator::DecodeBufferTask,
856 base::Unretained(this)));
857 }
858 }
859
860 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial(
861 const void* data, size_t size, size_t* endpos) {
862 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
863 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
864 DCHECK_NE(decoder_state_, kUninitialized);
865 DCHECK_NE(decoder_state_, kDecoding);
866 DCHECK(!device_poll_thread_.IsRunning());
867 // Initial decode. We haven't been able to get output stream format info yet.
868 // Get it, and start decoding.
869
870 // Copy in and send to HW.
871 if (!AppendToInputFrame(data, size) || !FlushInputFrame())
872 return false;
873
874 // Recycle buffers.
875 DequeueMfc();
876
877 // Check and see if we have format info yet.
878 struct v4l2_format format;
879 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
880 if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) {
881 if (errno == EINVAL) {
882 // We will get EINVAL if we haven't seen sufficient stream to decode the
883 // format. Return true and schedule the next buffer.
884 *endpos = size;
885 return true;
886 } else {
887 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT";
888 NOTIFY_ERROR(PLATFORM_FAILURE);
889 return false;
890 }
891 }
892
893 // Run this initialization only on first startup.
894 if (decoder_state_ == kInitialized) {
895 DVLOG(3) << "DecodeBufferInitial(): running one-time initialization";
896 // Success! Setup our parameters.
897 CHECK_EQ(format.fmt.pix_mp.num_planes, 2);
898 frame_buffer_size_.SetSize(
899 format.fmt.pix_mp.width, format.fmt.pix_mp.height);
900 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage;
901 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage;
902 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
903 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16);
904
905 // Create our other buffers.
906 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() ||
907 !CreateGscOutputBuffers())
908 return false;
909
910 // MFC expects to process the initial buffer once during stream init to
911 // configure stream parameters, but will not consume the steam data on that
912 // iteration. Subsequent iterations (including after reset) do not require
913 // the stream init step.
914 *endpos = 0;
915 } else {
916 *endpos = size;
917 }
918
919 // StartDevicePoll will raise the error if there is one.
920 if (!StartDevicePoll())
921 return false;
922
923 decoder_state_ = kDecoding;
924 ScheduleDecodeBufferTaskIfNeeded();
925 return true;
926 }
927
928 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue(
929 const void* data, size_t size) {
930 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
931 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
932 DCHECK_EQ(decoder_state_, kDecoding);
933
934 // Both of these calls will set kError state if they fail.
935 return (AppendToInputFrame(data, size) && FlushInputFrame());
936 }
937
938 bool ExynosVideoDecodeAccelerator::AppendToInputFrame(
939 const void* data, size_t size) {
940 DVLOG(3) << "AppendToInputFrame()";
941 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
942 DCHECK_NE(decoder_state_, kUninitialized);
943 DCHECK_NE(decoder_state_, kResetting);
944 DCHECK_NE(decoder_state_, kError);
945 // This routine can handle data == NULL and size == 0, which occurs when
946 // we queue an empty buffer for the purposes of flushing the pipe.
947
948 // Flush if we're too big
949 if (decoder_current_input_buffer_ != -1) {
950 MfcInputRecord& input_record =
951 mfc_input_buffer_map_[decoder_current_input_buffer_];
952 if (input_record.bytes_used + size > input_record.length) {
953 if (!FlushInputFrame())
954 return false;
955 decoder_current_input_buffer_ = -1;
956 }
957 }
958
959 // Try to get an available input buffer
960 if (decoder_current_input_buffer_ == -1) {
961 if (mfc_free_input_buffers_.empty()) {
962 // See if we can get more free buffers from HW
963 DequeueMfc();
964 if (mfc_free_input_buffers_.empty()) {
965 // Nope!
966 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
967 return false;
968 }
969 }
970 decoder_current_input_buffer_ = mfc_free_input_buffers_.back();
971 mfc_free_input_buffers_.pop_back();
972 MfcInputRecord& input_record =
973 mfc_input_buffer_map_[decoder_current_input_buffer_];
974 DCHECK_EQ(input_record.bytes_used, 0);
975 DCHECK_EQ(input_record.input_id, -1);
976 DCHECK(decoder_current_bitstream_buffer_ != NULL);
977 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
978 }
979
980 DCHECK_EQ(data == NULL, size == 0);
981 if (size == 0) {
982 // If we asked for an empty buffer, return now. We return only after
983 // getting the next input buffer, since we might actually want an empty
984 // input buffer for flushing purposes.
985 return true;
986 }
987
988 // Copy in to the buffer.
989 MfcInputRecord& input_record =
990 mfc_input_buffer_map_[decoder_current_input_buffer_];
991 if (size > input_record.length - input_record.bytes_used) {
992 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
993 NOTIFY_ERROR(UNREADABLE_INPUT);
994 return false;
995 }
996 memcpy((char*)input_record.address + input_record.bytes_used, data, size);
piman 2013/01/12 03:24:58 nit: no c-style cast please
sheu 2013/01/14 23:49:49 Done.
997 input_record.bytes_used += size;
998
999 return true;
1000 }
1001
1002 bool ExynosVideoDecodeAccelerator::FlushInputFrame() {
1003 DVLOG(3) << "FlushInputFrame()";
1004 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1005 DCHECK_NE(decoder_state_, kUninitialized);
1006 DCHECK_NE(decoder_state_, kResetting);
1007 DCHECK_NE(decoder_state_, kError);
1008
1009 if (decoder_current_input_buffer_ == -1)
1010 return true;
1011
1012 MfcInputRecord& input_record =
1013 mfc_input_buffer_map_[decoder_current_input_buffer_];
1014 DCHECK_NE(input_record.input_id, -1);
1015 DCHECK_EQ(input_record.input_id == kFlushBufferId,
1016 input_record.bytes_used == 0);
1017 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
1018 // got from the client. We can skip it if it is empty.
1019 // * if input_id < 0 (should be kFlushBufferId in this case), this input
1020 // buffer was prompted by a flush buffer, and should be queued even when
1021 // empty.
1022 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1023 input_record.input_id = -1;
1024 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_);
1025 decoder_current_input_buffer_ = -1;
1026 return true;
1027 }
1028
1029 // Queue it to MFC.
1030 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_);
1031 decoder_current_input_buffer_ = -1;
1032 DVLOG(3) << "FlushInputFrame(): submitting input_id="
1033 << input_record.input_id;
1034 // Kick the MFC once since there's new available input for it.
1035 EnqueueMfc();
1036
1037 return (decoder_state_ != kError);
1038 }
1039
1040 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask(
1041 scoped_ptr<PictureBufferArrayRef> pic_buffers) {
1042 DVLOG(3) << "AssignPictureBuffersTask()";
1043 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1044 DCHECK_NE(decoder_state_, kUninitialized);
1045 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask");
1046
1047 // We run AssignPictureBuffersTask even if we're in kResetting.
1048 if (decoder_state_ == kError) {
1049 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state";
1050 return;
1051 }
1052
1053 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size());
1054 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1055 // We should be blank right now.
1056 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
1057 DCHECK_EQ(output_record.fd, -1);
1058 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1059 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1060 DCHECK_EQ(output_record.picture_id, -1);
1061 PictureBufferArrayRef::PictureBufferRef& buffer =
1062 pic_buffers->picture_buffers[i];
1063 output_record.fd = buffer.egl_image_fd;
1064 output_record.egl_image = buffer.egl_image;
1065 output_record.picture_id = buffer.client_id;
1066
1067 // Take ownership of the EGLImage and fd.
1068 buffer.egl_image = EGL_NO_IMAGE_KHR;
1069 buffer.egl_image_fd = -1;
1070 // And add this buffer to the free list.
1071 gsc_free_output_buffers_.push_back(i);
1072 }
1073
1074 // We got buffers! Kick the GSC.
1075 EnqueueGsc();
1076 }
1077
1078 void ExynosVideoDecodeAccelerator::ServiceDeviceTask() {
1079 DVLOG(3) << "ServiceDeviceTask()";
1080 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1081 DCHECK_NE(decoder_state_, kUninitialized);
1082 DCHECK_NE(decoder_state_, kInitialized);
1083 DCHECK_NE(decoder_state_, kAfterReset);
1084 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask");
1085
1086 if (decoder_state_ == kResetting) {
1087 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
1088 return;
1089 } else if (decoder_state_ == kError) {
1090 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
1091 return;
1092 }
1093
1094 DequeueMfc();
1095 DequeueGsc();
1096 EnqueueMfc();
1097 EnqueueGsc();
1098
1099 // Clear the interrupt fd.
1100 if (!ClearDevicePollInterrupt())
1101 return;
1102
1103 unsigned int poll_fds = 0;
1104 // Add MFC fd, if we should poll on it.
1105 // MFC can be polled as soon as either input or output buffers are queued.
1106 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
1107 poll_fds |= kPollMfc;
1108 // Add GSC fd, if we should poll on it.
1109 // GSC has to wait until both input and output buffers are queued.
1110 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
1111 poll_fds |= kPollGsc;
1112
1113 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
1114 // so either:
1115 // * device_poll_thread_ is running normally
1116 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1117 // shut it down, in which case we're either in kResetting or kError states
1118 // respectively, and we should have early-outed already.
1119 DCHECK(device_poll_thread_.message_loop());
1120 // Queue the DevicePollTask() now.
1121 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1122 &ExynosVideoDecodeAccelerator::DevicePollTask,
1123 base::Unretained(this),
1124 poll_fds));
1125
1126 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
1127 << decoder_input_queue_.size() << "->"
1128 << mfc_input_ready_queue_.size() << "] => MFC["
1129 << mfc_free_input_buffers_.size() << "+"
1130 << mfc_input_buffer_queued_count_ << "/"
1131 << mfc_input_buffer_count_ << "->"
1132 << mfc_free_output_buffers_.size() << "+"
1133 << mfc_output_buffer_queued_count_ << "/"
1134 << mfc_output_buffer_count_ << "] => "
1135 << mfc_output_gsc_input_queue_.size() << " => GSC["
1136 << gsc_free_input_buffers_.size() << "+"
1137 << gsc_input_buffer_queued_count_ << "/"
1138 << gsc_input_buffer_count_ << "->"
1139 << gsc_free_output_buffers_.size() << "+"
1140 << gsc_output_buffer_queued_count_ << "/"
1141 << gsc_output_buffer_count_ << "] => VDA["
1142 << decoder_frames_at_client_ << "]";
1143
1144 ScheduleDecodeBufferTaskIfNeeded();
1145 }
1146
1147 void ExynosVideoDecodeAccelerator::EnqueueMfc() {
1148 DVLOG(3) << "EnqueueMfc()";
1149 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1150 DCHECK_NE(decoder_state_, kUninitialized);
1151 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc");
1152
1153 // Drain the pipe of completed decode buffers.
1154 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
1155 while (!mfc_input_ready_queue_.empty()) {
1156 if (!EnqueueMfcInputRecord())
1157 return;
1158 }
1159 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
1160 // We just started up a previously empty queue.
1161 // Queue state changed; signal interrupt.
1162 if (!SetDevicePollInterrupt())
1163 return;
1164 // Start VIDIOC_STREAMON if we haven't yet.
1165 if (!mfc_input_streamon_) {
1166 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1167 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1168 mfc_input_streamon_ = true;
1169 }
1170 }
1171
1172 // Enqueue all the MFC outputs we can.
1173 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
1174 while (!mfc_free_output_buffers_.empty()) {
1175 if (!EnqueueMfcOutputRecord())
1176 return;
1177 }
1178 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
1179 // We just started up a previously empty queue.
1180 // Queue state changed; signal interrupt.
1181 if (!SetDevicePollInterrupt())
1182 return;
1183 // Start VIDIOC_STREAMON if we haven't yet.
1184 if (!mfc_output_streamon_) {
1185 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1186 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1187 mfc_output_streamon_ = true;
1188 }
1189 }
1190 }
1191
1192 void ExynosVideoDecodeAccelerator::DequeueMfc() {
1193 DVLOG(3) << "DequeueMfc()";
1194 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1195 DCHECK_NE(decoder_state_, kUninitialized);
1196 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc");
1197
1198 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
1199 // list.
1200 struct v4l2_buffer dqbuf;
1201 struct v4l2_plane planes[2];
1202 while (mfc_input_buffer_queued_count_ > 0) {
1203 DCHECK(mfc_input_streamon_);
1204 memset(&dqbuf, 0, sizeof(dqbuf));
1205 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1206 dqbuf.memory = V4L2_MEMORY_MMAP;
1207 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1208 if (errno == EAGAIN) {
1209 // EAGAIN if we're just out of buffers to dequeue.
1210 break;
1211 }
1212 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1213 NOTIFY_ERROR(PLATFORM_FAILURE);
1214 return;
1215 }
1216 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
1217 DCHECK(input_record.at_device);
1218 mfc_free_input_buffers_.push_back(dqbuf.index);
1219 input_record.at_device = false;
1220 input_record.bytes_used = 0;
1221 input_record.input_id = -1;
1222 mfc_input_buffer_queued_count_--;
1223 }
1224
1225 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
1226 // completed queue.
1227 while (mfc_output_buffer_queued_count_ > 0) {
1228 DCHECK(mfc_output_streamon_);
1229 memset(&dqbuf, 0, sizeof(dqbuf));
1230 memset(planes, 0, sizeof(planes));
1231 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1232 dqbuf.memory = V4L2_MEMORY_MMAP;
1233 dqbuf.m.planes = planes;
1234 dqbuf.length = 2;
1235 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1236 if (errno == EAGAIN) {
1237 // EAGAIN if we're just out of buffers to dequeue.
1238 break;
1239 }
1240 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1241 NOTIFY_ERROR(PLATFORM_FAILURE);
1242 return;
1243 }
1244 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
1245 DCHECK(output_record.at_device);
1246 output_record.at_device = false;
1247 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
1248 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
1249 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) {
1250 // This is an empty output buffer returned as part of a flush.
1251 mfc_free_output_buffers_.push_back(dqbuf.index);
1252 output_record.input_id = -1;
1253 } else {
1254 // This is an output buffer with contents to pass down the pipe.
1255 mfc_output_gsc_input_queue_.push_back(dqbuf.index);
1256 output_record.input_id = dqbuf.timestamp.tv_sec;
1257 DCHECK(output_record.input_id >= 0);
1258 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id;
1259 // We don't count this output buffer dequeued yet, or add it to the free
1260 // list, as it has data GSC needs to process.
1261
1262 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe.
1263 SetDevicePollInterrupt();
1264 }
1265 mfc_output_buffer_queued_count_--;
1266 }
1267
1268 NotifyFlushDoneIfNeeded();
1269 }
1270
1271 void ExynosVideoDecodeAccelerator::EnqueueGsc() {
1272 DVLOG(3) << "EnqueueGsc()";
1273 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1274 DCHECK_NE(decoder_state_, kUninitialized);
1275 DCHECK_NE(decoder_state_, kInitialized);
1276 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc");
1277
1278 // Drain the pipe of completed MFC output buffers.
1279 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
1280 while (!mfc_output_gsc_input_queue_.empty() &&
1281 !gsc_free_input_buffers_.empty()) {
1282 if (!EnqueueGscInputRecord())
1283 return;
1284 }
1285 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
1286 // We just started up a previously empty queue.
1287 // Queue state changed; signal interrupt.
1288 if (!SetDevicePollInterrupt())
1289 return;
1290 // Start VIDIOC_STREAMON if we haven't yet.
1291 if (!gsc_input_streamon_) {
1292 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1293 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1294 gsc_input_streamon_ = true;
1295 }
1296 }
1297
1298 // Enqueue a GSC output, only if we need one
1299 if (gsc_input_buffer_queued_count_ != 0 &&
1300 gsc_output_buffer_queued_count_ == 0 &&
1301 !gsc_free_output_buffers_.empty()) {
1302 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
1303 if (!EnqueueGscOutputRecord())
1304 return;
1305 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
1306 // We just started up a previously empty queue.
1307 // Queue state changed; signal interrupt.
1308 if (!SetDevicePollInterrupt())
1309 return;
1310 // Start VIDIOC_STREAMON if we haven't yet.
1311 if (!gsc_output_streamon_) {
1312 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1313 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1314 gsc_output_streamon_ = true;
1315 }
1316 }
1317 }
1318 // Bug check: GSC is liable to race conditions if more than one buffer is
1319 // simultaneously queued.
1320 DCHECK_GE(1, gsc_output_buffer_queued_count_);
1321 }
1322
1323 void ExynosVideoDecodeAccelerator::DequeueGsc() {
1324 DVLOG(3) << "DequeueGsc()";
1325 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1326 DCHECK_NE(decoder_state_, kUninitialized);
1327 DCHECK_NE(decoder_state_, kInitialized);
1328 DCHECK_NE(decoder_state_, kAfterReset);
1329 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc");
1330
1331 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
1332 // list. Also recycle the corresponding MFC output buffers at this time.
1333 struct v4l2_buffer dqbuf;
1334 while (gsc_input_buffer_queued_count_ > 0) {
1335 DCHECK(gsc_input_streamon_);
1336 memset(&dqbuf, 0, sizeof(dqbuf));
1337 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1338 dqbuf.memory = V4L2_MEMORY_DMABUF;
1339 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1340 if (errno == EAGAIN) {
1341 // EAGAIN if we're just out of buffers to dequeue.
1342 break;
1343 }
1344 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1345 NOTIFY_ERROR(PLATFORM_FAILURE);
1346 return;
1347 }
1348 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
1349 MfcOutputRecord& output_record =
1350 mfc_output_buffer_map_[input_record.mfc_output];
1351 DCHECK(input_record.at_device);
1352 gsc_free_input_buffers_.push_back(dqbuf.index);
1353 mfc_free_output_buffers_.push_back(input_record.mfc_output);
1354 input_record.at_device = false;
1355 input_record.mfc_output = -1;
1356 output_record.input_id = -1;
1357 gsc_input_buffer_queued_count_--;
1358 }
1359
1360 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to
1361 // the client. Don't recycle to its free list yet -- we can't do that until
1362 // ReusePictureBuffer() returns it to us.
1363 while (gsc_output_buffer_queued_count_ > 0) {
1364 DCHECK(gsc_output_streamon_);
1365 memset(&dqbuf, 0, sizeof(dqbuf));
1366 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1367 dqbuf.memory = V4L2_MEMORY_DMABUF;
1368 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1369 if (errno == EAGAIN) {
1370 // EAGAIN if we're just out of buffers to dequeue.
1371 break;
1372 }
1373 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1374 NOTIFY_ERROR(PLATFORM_FAILURE);
1375 return;
1376 }
1377 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
1378 DCHECK(output_record.at_device);
1379 DCHECK(!output_record.at_client);
1380 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1381 output_record.at_device = false;
1382 output_record.at_client = true;
1383 gsc_output_buffer_queued_count_--;
1384 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1385 &Client::PictureReady, client_, media::Picture(
1386 output_record.picture_id, dqbuf.timestamp.tv_sec)));
1387 decoder_frames_at_client_++;
1388 }
1389
1390 NotifyFlushDoneIfNeeded();
1391 }
1392
1393 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() {
1394 DVLOG(3) << "EnqueueMfcInputRecord()";
1395 DCHECK(!mfc_input_ready_queue_.empty());
1396
1397 // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
1398 const int buffer = mfc_input_ready_queue_.back();
1399 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer];
1400 DCHECK(!input_record.at_device);
1401 struct v4l2_buffer qbuf;
1402 struct v4l2_plane qbuf_plane;
1403 memset(&qbuf, 0, sizeof(qbuf));
1404 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1405 qbuf.index = buffer;
1406 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1407 qbuf.timestamp.tv_sec = input_record.input_id;
1408 qbuf.memory = V4L2_MEMORY_MMAP;
1409 qbuf.m.planes = &qbuf_plane;
1410 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1411 qbuf.length = 1;
1412 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1413 mfc_input_ready_queue_.pop_back();
1414 input_record.at_device = true;
1415 mfc_input_buffer_queued_count_++;
1416 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id="
1417 << input_record.input_id;
1418 return true;
1419 }
1420
1421 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() {
1422 DVLOG(3) << "EnqueueMfcOutputRecord()";
1423 DCHECK(!mfc_free_output_buffers_.empty());
1424
1425 // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
1426 const int buffer = mfc_free_output_buffers_.back();
1427 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer];
1428 DCHECK(!output_record.at_device);
1429 DCHECK_EQ(output_record.input_id, -1);
1430 struct v4l2_buffer qbuf;
1431 struct v4l2_plane qbuf_planes[2];
1432 memset(&qbuf, 0, sizeof(qbuf));
1433 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1434 qbuf.index = buffer;
1435 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1436 qbuf.memory = V4L2_MEMORY_MMAP;
1437 qbuf.m.planes = qbuf_planes;
1438 qbuf.length = 2;
1439 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1440 mfc_free_output_buffers_.pop_back();
1441 output_record.at_device = true;
1442 mfc_output_buffer_queued_count_++;
1443 return true;
1444 }
1445
1446 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() {
1447 DVLOG(3) << "EnqueueGscInputRecord()";
1448 DCHECK(!gsc_free_input_buffers_.empty());
1449
1450 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output
1451 // (VIDEO_CAPTURE) buffer.
1452 const int mfc_buffer = mfc_output_gsc_input_queue_.front();
1453 const int gsc_buffer = gsc_free_input_buffers_.back();
1454 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
1455 DCHECK(!output_record.at_device);
1456 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
1457 DCHECK(!input_record.at_device);
1458 DCHECK_EQ(input_record.mfc_output, -1);
1459 struct v4l2_buffer qbuf;
1460 struct v4l2_plane qbuf_planes[2];
1461 memset(&qbuf, 0, sizeof(qbuf));
1462 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1463 qbuf.index = gsc_buffer;
1464 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1465 qbuf.timestamp.tv_sec = output_record.input_id;
1466 qbuf.memory = V4L2_MEMORY_USERPTR;
1467 qbuf.m.planes = qbuf_planes;
1468 qbuf.m.planes[0].bytesused = output_record.bytes_used[0];
1469 qbuf.m.planes[0].length = mfc_output_buffer_size_[0];
1470 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0];
1471 qbuf.m.planes[1].bytesused = output_record.bytes_used[1];
1472 qbuf.m.planes[1].length = mfc_output_buffer_size_[1];
1473 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1];
1474 qbuf.length = 2;
1475 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1476 mfc_output_gsc_input_queue_.pop_front();
1477 gsc_free_input_buffers_.pop_back();
1478 input_record.at_device = true;
1479 input_record.mfc_output = mfc_buffer;
1480 output_record.bytes_used[0] = 0;
1481 output_record.bytes_used[1] = 0;
1482 gsc_input_buffer_queued_count_++;
1483 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id="
1484 << output_record.input_id;
1485 return true;
1486 }
1487
1488 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() {
1489 DVLOG(3) << "EnqueueGscOutputRecord()";
1490 DCHECK(!gsc_free_output_buffers_.empty());
1491
1492 // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
1493 const int buffer = gsc_free_output_buffers_.front();
1494 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer];
1495 DCHECK(!output_record.at_device);
1496 DCHECK(!output_record.at_client);
1497 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1498 TRACE_EVENT0(
1499 "Video Decoder",
1500 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR");
1501 // If we have to wait for completion, wait. Note that
1502 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the
1503 // buffer that has been in the queue the longest.
1504 egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0,
1505 EGL_FOREVER_KHR);
1506 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
1507 output_record.egl_sync = EGL_NO_SYNC_KHR;
1508 }
1509 struct v4l2_buffer qbuf;
1510 struct v4l2_plane qbuf_plane;
1511 memset(&qbuf, 0, sizeof(qbuf));
1512 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1513 qbuf.index = buffer;
1514 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1515 qbuf.memory = V4L2_MEMORY_DMABUF;
1516 qbuf.m.planes = &qbuf_plane;
1517 qbuf.m.planes[0].m.fd = output_record.fd;
1518 qbuf.length = 1;
1519 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1520 gsc_free_output_buffers_.pop_front();
1521 output_record.at_device = true;
1522 gsc_output_buffer_queued_count_++;
1523 return true;
1524 }
1525
1526 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask(
1527 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1528 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1529 << picture_buffer_id;
1530 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1531 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask");
1532
1533 // We run ReusePictureBufferTask even if we're in kResetting.
1534 if (decoder_state_ == kError) {
1535 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1536 return;
1537 }
1538
1539 size_t index;
1540 for (index = 0; index < gsc_output_buffer_map_.size(); ++index)
1541 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id)
1542 break;
1543
1544 if (index >= gsc_output_buffer_map_.size()) {
1545 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found";
1546 NOTIFY_ERROR(INVALID_ARGUMENT);
1547 return;
1548 }
1549
1550 GscOutputRecord& output_record = gsc_output_buffer_map_[index];
1551 if (output_record.at_device || !output_record.at_client) {
1552 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1553 NOTIFY_ERROR(INVALID_ARGUMENT);
1554 return;
1555 }
1556
1557 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1558 output_record.at_client = false;
1559 output_record.egl_sync = egl_sync_ref->egl_sync;
1560 gsc_free_output_buffers_.push_back(index);
1561 decoder_frames_at_client_--;
1562 // Take ownership of the EGLSync.
1563 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1564 // We got a buffer back, so kick the GSC.
1565 EnqueueGsc();
1566 }
1567
1568 void ExynosVideoDecodeAccelerator::FlushTask() {
1569 DVLOG(3) << "FlushTask()";
1570 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1571 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask");
1572
1573 // Flush outstanding buffers.
1574 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1575 // There's nothing in the pipe, so return done immediately.
1576 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1577 &Client::NotifyFlushDone, client_));
1578 return;
1579 } else if (decoder_state_ == kError) {
1580 DVLOG(2) << "FlushTask(): early out: kError state";
1581 return;
1582 }
1583
1584 // We don't support stacked flushing.
1585 DCHECK(!decoder_flushing_);
1586
1587 // Queue up an empty buffer -- this triggers the flush.
1588 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>(
1589 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0,
1590 kFlushBufferId)));
1591 decoder_flushing_ = true;
1592
1593 ScheduleDecodeBufferTaskIfNeeded();
1594 }
1595
1596 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1597 if (!decoder_flushing_)
1598 return;
1599
1600 // Pipeline is empty when:
1601 // * Decoder input queue is empty of non-delayed buffers.
1602 // * There is no currently filling input buffer.
1603 // * MFC input holding queue is empty.
1604 // * All MFC input (VIDEO_OUTPUT) buffers are returned.
1605 // * MFC -> GSC holding queue is empty.
1606 // * All GSC input (VIDEO_OUTPUT) buffers are returned.
1607 if (!decoder_input_queue_.empty()) {
1608 if (decoder_input_queue_.front()->input_id !=
1609 decoder_delay_bitstream_buffer_id_)
1610 return;
1611 }
1612 if (decoder_current_input_buffer_ != -1)
1613 return;
1614 if ((mfc_input_ready_queue_.size() +
1615 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() +
1616 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0)
1617 return;
1618
1619 decoder_delay_bitstream_buffer_id_ = -1;
1620 decoder_flushing_ = false;
1621 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1622 &Client::NotifyFlushDone, client_));
1623
1624 // While we were flushing, we early-outed DecodeBufferTask()s.
1625 ScheduleDecodeBufferTaskIfNeeded();
1626 }
1627
1628 void ExynosVideoDecodeAccelerator::ResetTask() {
1629 DVLOG(3) << "ResetTask()";
1630 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1631 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask");
1632
1633 if (decoder_state_ == kError) {
1634 DVLOG(2) << "ResetTask(): early out: kError state";
1635 return;
1636 }
1637
1638 // We stop streaming, but we _don't_ destroy our buffers.
1639 if (!StopDevicePoll())
1640 return;
1641
1642 decoder_current_bitstream_buffer_.reset();
1643 decoder_input_queue_.clear();
1644
1645 decoder_current_input_buffer_ = -1;
1646 decoder_decode_buffer_tasks_scheduled_ = 0;
piman 2013/01/12 03:24:58 This is suspicious... It's incremented when schedu
sheu 2013/01/14 23:49:49 Aaaand... that's a bug. Thanks. Done.
1647
1648 // If we were flushing, we'll never return any more BitstreamBuffers or
1649 // PictureBuffers; they have all been dropped and returned by now.
1650 NotifyFlushDoneIfNeeded();
1651
1652 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1653 // jobs will early-out in the kResetting state.
1654 decoder_state_ = kResetting;
1655 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1656 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1657 }
1658
1659 void ExynosVideoDecodeAccelerator::ResetDoneTask() {
1660 DVLOG(3) << "ResetDoneTask()";
1661 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1662 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask");
1663
1664 if (decoder_state_ == kError) {
1665 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1666 return;
1667 }
1668
1669 // Reset format-specific bits.
1670 if (video_profile_ >= media::H264PROFILE_MIN &&
1671 video_profile_ <= media::H264PROFILE_MAX) {
1672 decoder_h264_parser_.reset(new content::H264Parser());
1673 }
1674
1675 // Jobs drained, we're finished resetting.
1676 DCHECK_EQ(decoder_state_, kResetting);
1677 decoder_state_ = kAfterReset;
1678 decoder_delay_bitstream_buffer_id_ = -1;
1679 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1680 &Client::NotifyResetDone, client_));
1681
1682 // While we were resetting, we early-outed DecodeBufferTask()s.
1683 ScheduleDecodeBufferTaskIfNeeded();
1684 }
1685
1686 void ExynosVideoDecodeAccelerator::DestroyTask() {
1687 DVLOG(3) << "DestroyTask()";
1688 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask");
1689
1690 // DestroyTask() should run regardless of decoder_state_.
1691
1692 // Stop streaming and the device_poll_thread_.
1693 StopDevicePoll();
1694
1695 decoder_current_bitstream_buffer_.reset();
1696 decoder_current_input_buffer_ = -1;
1697 decoder_decode_buffer_tasks_scheduled_ = 0;
1698 decoder_frames_at_client_ = 0;
1699 decoder_input_queue_.clear();
1700 decoder_flushing_ = false;
1701
1702 // Set our state to kError. Just in case.
1703 decoder_state_ = kError;
1704 }
1705
1706 bool ExynosVideoDecodeAccelerator::StartDevicePoll() {
1707 DVLOG(3) << "StartDevicePoll()";
1708 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1709 DCHECK(!device_poll_thread_.IsRunning());
1710
1711 // Start up the device poll thread and schedule its first DevicePollTask().
1712 if (!device_poll_thread_.Start()) {
1713 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1714 NOTIFY_ERROR(PLATFORM_FAILURE);
1715 return false;
1716 }
1717 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1718 &ExynosVideoDecodeAccelerator::DevicePollTask,
1719 base::Unretained(this),
1720 0));
1721
1722 return true;
1723 }
1724
1725 bool ExynosVideoDecodeAccelerator::StopDevicePoll() {
1726 DVLOG(3) << "StopDevicePoll()";
1727 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1728
1729 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1730 if (!SetDevicePollInterrupt())
1731 return false;
1732 device_poll_thread_.Stop();
1733 // Clear the interrupt now, to be sure.
1734 if (!ClearDevicePollInterrupt())
1735 return false;
1736
1737 // Stop streaming.
1738 if (mfc_input_streamon_) {
1739 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1740 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1741 }
1742 mfc_input_streamon_ = false;
1743 if (mfc_output_streamon_) {
1744 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1745 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1746 }
1747 mfc_output_streamon_ = false;
1748 if (gsc_input_streamon_) {
1749 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1750 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1751 }
1752 gsc_input_streamon_ = false;
1753 if (gsc_output_streamon_) {
1754 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1755 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1756 }
1757 gsc_output_streamon_ = false;
1758
1759 // Reset all our accounting info.
1760 mfc_input_ready_queue_.clear();
1761 mfc_free_input_buffers_.clear();
1762 DCHECK_EQ(mfc_input_buffer_count_,
1763 static_cast<int>(mfc_input_buffer_map_.size()));
1764 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
1765 mfc_free_input_buffers_.push_back(i);
1766 mfc_input_buffer_map_[i].at_device = false;
1767 mfc_input_buffer_map_[i].bytes_used = 0;
1768 mfc_input_buffer_map_[i].input_id = -1;
1769 }
1770 mfc_input_buffer_queued_count_ = 0;
1771 mfc_free_output_buffers_.clear();
1772 DCHECK_EQ(mfc_output_buffer_count_,
1773 static_cast<int>(mfc_output_buffer_map_.size()));
1774 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
1775 mfc_free_output_buffers_.push_back(i);
1776 mfc_output_buffer_map_[i].at_device = false;
1777 mfc_output_buffer_map_[i].input_id = -1;
1778 }
1779 mfc_output_buffer_queued_count_ = 0;
1780 mfc_output_gsc_input_queue_.clear();
1781 gsc_free_input_buffers_.clear();
1782 DCHECK_EQ(gsc_input_buffer_count_,
1783 static_cast<int>(gsc_input_buffer_map_.size()));
1784 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
1785 gsc_free_input_buffers_.push_back(i);
1786 gsc_input_buffer_map_[i].at_device = false;
1787 gsc_input_buffer_map_[i].mfc_output = -1;
1788 }
1789 gsc_input_buffer_queued_count_ = 0;
1790 gsc_free_output_buffers_.clear();
1791 DCHECK_EQ(gsc_output_buffer_count_,
1792 static_cast<int>(gsc_output_buffer_map_.size()));
1793 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1794 // Only mark those free that aren't being held by the VDA.
1795 if (!gsc_output_buffer_map_[i].at_client) {
1796 gsc_free_output_buffers_.push_back(i);
1797 gsc_output_buffer_map_[i].at_device = false;
1798 }
1799 }
1800 gsc_output_buffer_queued_count_ = 0;
1801
1802 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1803 return true;
1804 }
1805
1806 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() {
1807 DVLOG(3) << "SetDevicePollInterrupt()";
1808 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1809
1810 const uint64 buf = 1;
1811 if (write(device_poll_interrupt_fd_, &buf, sizeof(buf)) == -1) {
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
1812 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
1813 NOTIFY_ERROR(PLATFORM_FAILURE);
1814 return false;
1815 }
1816 return true;
1817 }
1818
1819 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() {
1820 DVLOG(3) << "ClearDevicePollInterrupt()";
1821 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1822
1823 uint64 buf;
1824 if (read(device_poll_interrupt_fd_, &buf, sizeof(buf)) == -1) {
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
1825 if (errno == EAGAIN) {
1826 // No interrupt flag set, and we're reading nonblocking. Not an error.
1827 return true;
1828 } else {
1829 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
1830 NOTIFY_ERROR(PLATFORM_FAILURE);
1831 return false;
1832 }
1833 }
1834 return true;
1835 }
1836
1837 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) {
1838 DVLOG(3) << "DevicePollTask()";
1839 DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current());
1840 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask");
1841
1842 // This routine just polls the set of device fds, and schedules a
1843 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur.
1844 // Other threads may notify this task to return early by writing to
1845 // device_poll_interrupt_fd_.
1846 struct pollfd pollfds[3];
1847 nfds_t nfds;
1848
1849 // Add device_poll_interrupt_fd_;
1850 pollfds[0].fd = device_poll_interrupt_fd_;
1851 pollfds[0].events = POLLIN | POLLERR;
1852 nfds = 1;
1853
1854 if (poll_fds & kPollMfc) {
1855 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
1856 pollfds[nfds].fd = mfc_fd_;
1857 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1858 nfds++;
1859 }
1860 // Add GSC fd, if we should poll on it.
1861 // GSC has to wait until both input and output buffers are queued.
1862 if (poll_fds & kPollGsc) {
1863 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
1864 pollfds[nfds].fd = gsc_fd_;
1865 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1866 nfds++;
1867 }
1868
1869 // Poll it!
1870 int ret;
1871 do {
1872 ret = poll(pollfds, nfds, -1);
1873 } while (ret < 1 && errno == EINTR);
piman 2013/01/12 03:24:58 You can just use HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
sheu 2013/01/14 23:49:49 Done.
1874 if (ret == -1) {
1875 DPLOG(ERROR) << "DevicePollTask(): poll() failed";
1876 NOTIFY_ERROR(PLATFORM_FAILURE);
1877 return;
1878 }
1879
1880 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1881 // touch decoder state from this thread.
1882 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1883 &ExynosVideoDecodeAccelerator::ServiceDeviceTask,
1884 base::Unretained(this)));
1885 }
1886
1887 void ExynosVideoDecodeAccelerator::NotifyError(Error error) {
1888 DVLOG(2) << "NotifyError()";
1889
1890 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1891 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1892 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error));
1893 return;
1894 }
1895
1896 if (client_) {
1897 client_->NotifyError(error);
1898 client_ptr_factory_.InvalidateWeakPtrs();
1899 }
1900 }
1901
1902 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) {
1903 DVLOG(3) << "SetDecoderState(): state=%d" << state;
1904
1905 // We can touch decoder_state_ only if this is the decoder thread or the
1906 // decoder thread isn't running.
1907 if (decoder_thread_.message_loop() != NULL &&
1908 decoder_thread_.message_loop() != MessageLoop::current()) {
1909 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1910 &ExynosVideoDecodeAccelerator::SetDecoderState,
1911 base::Unretained(this), state));
1912 } else {
1913 decoder_state_ = state;
1914 }
1915 }
1916
1917 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() {
1918 DVLOG(3) << "CreateMfcInputBuffers()";
1919 // We always run this as we prepare to initialize.
1920 DCHECK_EQ(decoder_state_, kUninitialized);
1921 DCHECK(!mfc_input_streamon_);
1922 DCHECK_EQ(mfc_input_buffer_count_, 0);
1923
1924 __u32 pixelformat = 0;
1925 if (video_profile_ >= media::H264PROFILE_MIN &&
1926 video_profile_ <= media::H264PROFILE_MAX) {
1927 pixelformat = V4L2_PIX_FMT_H264;
1928 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
1929 video_profile_ <= media::VP8PROFILE_MAX) {
1930 pixelformat = V4L2_PIX_FMT_VP8;
1931 } else {
1932 NOTREACHED();
1933 }
1934
1935 struct v4l2_format format;
1936 memset(&format, 0, sizeof(format));
1937 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1938 format.fmt.pix_mp.pixelformat = pixelformat;
1939 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize;
1940 format.fmt.pix_mp.num_planes = 1;
1941 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
1942
1943 struct v4l2_requestbuffers reqbufs;
1944 memset(&reqbufs, 0, sizeof(reqbufs));
1945 reqbufs.count = kMfcInputBufferCount;
1946 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1947 reqbufs.memory = V4L2_MEMORY_MMAP;
1948 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
1949 mfc_input_buffer_count_ = reqbufs.count;
1950 mfc_input_buffer_map_.resize(mfc_input_buffer_count_);
1951 for (int i = 0; i < mfc_input_buffer_count_; ++i) {
1952 mfc_free_input_buffers_.push_back(i);
1953
1954 // Query for the MEMORY_MMAP pointer.
1955 struct v4l2_plane planes[1];
1956 struct v4l2_buffer buffer;
1957 memset(&buffer, 0, sizeof(buffer));
1958 memset(planes, 0, sizeof(planes));
1959 buffer.index = i;
1960 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1961 buffer.memory = V4L2_MEMORY_MMAP;
1962 buffer.m.planes = planes;
1963 buffer.length = 1;
1964 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
1965 void* address = mmap(NULL, buffer.m.planes[0].length,
1966 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
1967 buffer.m.planes[0].m.mem_offset);
1968 if (address == MAP_FAILED) {
1969 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
1970 return false;
1971 }
1972 mfc_input_buffer_map_[i].address = address;
1973 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length;
1974 }
1975
1976 return true;
1977 }
1978
1979 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() {
1980 DVLOG(3) << "CreateMfcOutputBuffers()";
1981 DCHECK_EQ(decoder_state_, kInitialized);
1982 DCHECK(!mfc_output_streamon_);
1983 DCHECK_EQ(mfc_output_buffer_count_, 0);
1984
1985 // Number of MFC output buffers we need.
1986 struct v4l2_control ctrl;
1987 memset(&ctrl, 0, sizeof(ctrl));
1988 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1989 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl);
1990
1991 // Output format setup in Initialize().
1992
1993 // Allocate the output buffers.
1994 struct v4l2_requestbuffers reqbufs;
1995 memset(&reqbufs, 0, sizeof(reqbufs));
1996 reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount;
1997 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1998 reqbufs.memory = V4L2_MEMORY_MMAP;
1999 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
2000
2001 // Fill our free-buffers list, and create DMABUFs from them.
2002 mfc_output_buffer_count_ = reqbufs.count;
2003 mfc_output_buffer_map_.resize(mfc_output_buffer_count_);
2004 for (int i = 0; i < mfc_output_buffer_count_; ++i) {
2005 mfc_free_output_buffers_.push_back(i);
2006
2007 // Query for the MEMORY_MMAP pointer.
2008 struct v4l2_plane planes[2];
2009 struct v4l2_buffer buffer;
2010 memset(&buffer, 0, sizeof(buffer));
2011 memset(planes, 0, sizeof(planes));
2012 buffer.index = i;
2013 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2014 buffer.memory = V4L2_MEMORY_MMAP;
2015 buffer.m.planes = planes;
2016 buffer.length = 2;
2017 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2018
2019 // Get their user memory for GSC input.
2020 for (int j = 0; j < 2; ++j) {
2021 void* address = mmap(NULL, buffer.m.planes[j].length,
2022 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2023 buffer.m.planes[j].m.mem_offset);
2024 if (address == MAP_FAILED) {
2025 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2026 return false;
2027 }
2028 mfc_output_buffer_map_[i].address[j] = address;
2029 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length;
2030 }
2031 }
2032
2033 return true;
2034 }
2035
2036 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() {
2037 DVLOG(3) << "CreateGscInputBuffers()";
2038 DCHECK_EQ(decoder_state_, kInitialized);
2039 DCHECK(!gsc_input_streamon_);
2040 DCHECK_EQ(gsc_input_buffer_count_, 0);
2041
2042 struct v4l2_format format;
2043 memset(&format, 0, sizeof(format));
2044 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2045 format.fmt.pix_mp.width = frame_buffer_size_.width();
2046 format.fmt.pix_mp.height = frame_buffer_size_.height();
2047 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_;
2048 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0];
2049 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1];
2050 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much
2051 // sense. Convention seems to be to assume 8bpp for these tiled formats.
2052 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width();
2053 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width();
2054 format.fmt.pix_mp.num_planes = 2;
2055 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2056
2057 struct v4l2_control control;
2058 memset(&control, 0, sizeof(control));
2059 control.id = V4L2_CID_ROTATE;
2060 control.value = 0;
2061 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2062
2063 memset(&control, 0, sizeof(control));
2064 control.id = V4L2_CID_HFLIP;
2065 control.value = 0;
2066 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2067
2068 memset(&control, 0, sizeof(control));
2069 control.id = V4L2_CID_VFLIP;
2070 control.value = 0;
2071 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2072
2073 memset(&control, 0, sizeof(control));
2074 control.id = V4L2_CID_GLOBAL_ALPHA;
2075 control.value = 255;
2076 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2077
2078 struct v4l2_requestbuffers reqbufs;
2079 memset(&reqbufs, 0, sizeof(reqbufs));
2080 reqbufs.count = kGscInputBufferCount;
2081 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2082 reqbufs.memory = V4L2_MEMORY_USERPTR;
2083 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2084
2085 gsc_input_buffer_count_ = reqbufs.count;
2086 gsc_input_buffer_map_.resize(gsc_input_buffer_count_);
2087 for (int i = 0; i < gsc_input_buffer_count_; ++i) {
2088 gsc_free_input_buffers_.push_back(i);
2089 gsc_input_buffer_map_[i].mfc_output = -1;
2090 }
2091
2092 return true;
2093 }
2094
2095 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() {
2096 DVLOG(3) << "CreateGscOutputBuffers()";
2097 DCHECK_EQ(decoder_state_, kInitialized);
2098 DCHECK(!gsc_output_streamon_);
2099 DCHECK_EQ(gsc_output_buffer_count_, 0);
2100
2101 // GSC outputs into the EGLImages we create from the textures we are
2102 // assigned. Assume RGBA8888 format.
2103 struct v4l2_format format;
2104 memset(&format, 0, sizeof(format));
2105 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2106 format.fmt.pix_mp.width = frame_buffer_size_.width();
2107 format.fmt.pix_mp.height = frame_buffer_size_.height();
2108 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
2109 format.fmt.pix_mp.plane_fmt[0].sizeimage =
2110 frame_buffer_size_.width() * frame_buffer_size_.height() * 4;
2111 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4;
2112 format.fmt.pix_mp.num_planes = 1;
2113 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2114
2115 struct v4l2_requestbuffers reqbufs;
2116 memset(&reqbufs, 0, sizeof(reqbufs));
2117 reqbufs.count = kGscOutputBufferCount;
2118 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2119 reqbufs.memory = V4L2_MEMORY_DMABUF;
2120 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2121
2122 // We don't actually fill in the freelist or the map here. That happens once
2123 // we have actual usable buffers, after AssignPictureBuffers();
2124 gsc_output_buffer_count_ = reqbufs.count;
2125 gsc_output_buffer_map_.resize(gsc_output_buffer_count_);
2126
2127 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): "
2128 << "buffer_count=" << gsc_output_buffer_count_
2129 << ", width=" << frame_buffer_size_.width()
2130 << ", height=" << frame_buffer_size_.height();
2131 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2132 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_,
2133 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()),
2134 GL_TEXTURE_2D));
2135
2136 return true;
2137 }
2138
2139 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() {
2140 DVLOG(3) << "DestroyMfcInputBuffers()";
2141 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2142 DCHECK(!mfc_input_streamon_);
2143
2144 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2145 if (mfc_input_buffer_map_[i].address != NULL) {
2146 munmap(mfc_input_buffer_map_[i].address,
2147 mfc_input_buffer_map_[i].length);
2148 }
2149 }
2150
2151 struct v4l2_requestbuffers reqbufs;
2152 memset(&reqbufs, 0, sizeof(reqbufs));
2153 reqbufs.count = 0;
2154 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2155 reqbufs.memory = V4L2_MEMORY_MMAP;
2156 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2157 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2158
2159 mfc_input_buffer_map_.clear();
2160 mfc_free_input_buffers_.clear();
2161 mfc_input_buffer_count_ = 0;
2162 }
2163
2164 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() {
2165 DVLOG(3) << "DestroyMfcOutputBuffers()";
2166 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2167 DCHECK(!mfc_output_streamon_);
2168
2169 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2170 if (mfc_output_buffer_map_[i].address[0] != NULL)
2171 munmap(mfc_output_buffer_map_[i].address[0],
2172 mfc_output_buffer_map_[i].length[0]);
2173 if (mfc_output_buffer_map_[i].address[1] != NULL)
2174 munmap(mfc_output_buffer_map_[i].address[1],
2175 mfc_output_buffer_map_[i].length[1]);
2176 }
2177
2178 struct v4l2_requestbuffers reqbufs;
2179 memset(&reqbufs, 0, sizeof(reqbufs));
2180 reqbufs.count = 0;
2181 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2182 reqbufs.memory = V4L2_MEMORY_MMAP;
2183 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2184 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
2185
2186 mfc_output_buffer_map_.clear();
2187 mfc_free_output_buffers_.clear();
2188 mfc_output_buffer_count_ = 0;
2189 }
2190
2191 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() {
2192 DVLOG(3) << "DestroyGscInputBuffers()";
2193 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2194 DCHECK(!gsc_input_streamon_);
2195
2196 struct v4l2_requestbuffers reqbufs;
2197 memset(&reqbufs, 0, sizeof(reqbufs));
2198 reqbufs.count = 0;
2199 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2200 reqbufs.memory = V4L2_MEMORY_DMABUF;
2201 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2202 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2203
2204 gsc_input_buffer_map_.clear();
2205 gsc_free_input_buffers_.clear();
2206 gsc_input_buffer_count_ = 0;
2207 }
2208
2209 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() {
2210 DVLOG(3) << "DestroyGscOutputBuffers()";
2211 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2212 DCHECK(!gsc_output_streamon_);
2213
2214 if (gsc_output_buffer_map_.size() != 0) {
2215 if (!make_context_current_.Run())
2216 DLOG(ERROR) << "DestroyGscOutputBuffers(): "
2217 << "could not make context current";
2218
2219 size_t i = 0;
2220 do {
2221 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
2222 if (output_record.fd != -1)
2223 close(output_record.fd);
piman 2013/01/12 03:24:58 HANDLE_EINTR
sheu 2013/01/14 23:49:49 Done.
2224 if (output_record.egl_image != EGL_NO_IMAGE_KHR)
2225 egl_destroy_image_khr(egl_display_, output_record.egl_image);
2226 if (output_record.egl_sync != EGL_NO_SYNC_KHR)
2227 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
2228 if (client_)
2229 client_->DismissPictureBuffer(output_record.picture_id);
2230 ++i;
2231 } while (i < gsc_output_buffer_map_.size());
2232 }
2233
2234 struct v4l2_requestbuffers reqbufs;
2235 memset(&reqbufs, 0, sizeof(reqbufs));
2236 reqbufs.count = 0;
2237 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2238 reqbufs.memory = V4L2_MEMORY_DMABUF;
2239 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2240 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2241
2242 gsc_output_buffer_map_.clear();
2243 gsc_free_output_buffers_.clear();
2244 gsc_output_buffer_count_ = 0;
2245 }
2246
2247 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698