Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(46)

Side by Side Diff: content/common/gpu/media/exynos_video_decode_accelerator.cc

Issue 11198060: VDA implementation for Exynos, using V4L2 (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: content:: fixes from piman@. Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
13
14 #include "base/bind.h"
15 #include "base/debug/trace_event.h"
16 #include "base/message_loop.h"
17 #include "base/message_loop_proxy.h"
18 #include "base/posix/eintr_wrapper.h"
19 #include "base/shared_memory.h"
20 #include "content/common/gpu/gl_scoped_binders.h"
21 #include "content/common/gpu/media/exynos_video_decode_accelerator.h"
22 #include "content/common/gpu/media/h264_parser.h"
23 #include "third_party/angle/include/GLES2/gl2.h"
24
25 namespace content {
26
27 #define NOTIFY_ERROR(x) \
28 do { \
29 SetDecoderState(kError); \
30 DLOG(ERROR) << "calling NotifyError(): " << x; \
31 NotifyError(x); \
32 } while (0)
33
34 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
35 do { \
36 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
37 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
38 NOTIFY_ERROR(PLATFORM_FAILURE); \
39 return; \
40 } \
41 } while (0)
42
43 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
44 do { \
45 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
46 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
47 NOTIFY_ERROR(PLATFORM_FAILURE); \
48 return false; \
49 } \
50 } while (0)
51
52 #define POSTSANDBOX_DLSYM(lib, func, type, name) \
53 func = reinterpret_cast<type>(dlsym(lib, name)); \
54 if (func == NULL) { \
55 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " \
56 << name << ": " << dlerror(); \
57 return false; \
58 }
59
60 namespace {
61
62 const char kExynosMfcDevice[] = "/dev/mfc-dec";
63 const char kExynosGscDevice[] = "/dev/gsc1";
64 const char kMaliDriver[] = "libmali.so";
65
66 // TODO(sheu): fix OpenGL ES header includes, remove unnecessary redefinitions.
67 // http://crbug.com/169433
68 typedef void* GLeglImageOES;
69 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*,
70 void*);
71 typedef EGLImageKHR (*EglCreateImageKhrFunc)(EGLDisplay, EGLContext, EGLenum,
72 EGLClientBuffer, const EGLint*);
73 typedef EGLBoolean (*EglDestroyImageKhrFunc)(EGLDisplay, EGLImageKHR);
74 typedef EGLSyncKHR (*EglCreateSyncKhrFunc)(EGLDisplay, EGLenum, const EGLint*);
75 typedef EGLBoolean (*EglDestroySyncKhrFunc)(EGLDisplay, EGLSyncKHR);
76 typedef EGLint (*EglClientWaitSyncKhrFunc)(EGLDisplay, EGLSyncKHR, EGLint,
77 EGLTimeKHR);
78 typedef void (*GlEglImageTargetTexture2dOesFunc)(GLenum, GLeglImageOES);
79
80 void* libmali_handle = NULL;
81 MaliEglImageGetBufferExtPhandleFunc
82 mali_egl_image_get_buffer_ext_phandle = NULL;
83 EglCreateImageKhrFunc egl_create_image_khr = NULL;
84 EglDestroyImageKhrFunc egl_destroy_image_khr = NULL;
85 EglCreateSyncKhrFunc egl_create_sync_khr = NULL;
86 EglDestroySyncKhrFunc egl_destroy_sync_khr = NULL;
87 EglClientWaitSyncKhrFunc egl_client_wait_sync_khr = NULL;
88 GlEglImageTargetTexture2dOesFunc gl_egl_image_target_texture_2d_oes = NULL;
89
90 } // anonymous namespace
91
92 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef {
93 BitstreamBufferRef(
94 base::WeakPtr<Client>& client,
95 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
96 base::SharedMemory* shm,
97 size_t size,
98 int32 input_id);
99 ~BitstreamBufferRef();
100 const base::WeakPtr<Client> client;
101 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
102 const scoped_ptr<base::SharedMemory> shm;
103 const size_t size;
104 off_t bytes_used;
105 const int32 input_id;
106 };
107
108 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef {
109 PictureBufferArrayRef(EGLDisplay egl_display, size_t count);
110 ~PictureBufferArrayRef();
111
112 struct PictureBufferRef {
113 EGLImageKHR egl_image;
114 int egl_image_fd;
115 int32 client_id;
116 };
117
118 EGLDisplay const egl_display;
119 std::vector<PictureBufferRef> picture_buffers;
120 };
121
122 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef {
123 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
124 ~EGLSyncKHRRef();
125 EGLDisplay const egl_display;
126 EGLSyncKHR egl_sync;
127 };
128
129 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
130 base::WeakPtr<Client>& client,
131 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
132 base::SharedMemory* shm, size_t size, int32 input_id)
133 : client(client),
134 client_message_loop_proxy(client_message_loop_proxy),
135 shm(shm),
136 size(size),
137 bytes_used(0),
138 input_id(input_id) {
139 }
140
141 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
142 if (input_id >= 0) {
143 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
144 &Client::NotifyEndOfBitstreamBuffer, client, input_id));
145 }
146 }
147
148 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef(
149 EGLDisplay egl_display, size_t count)
150 : egl_display(egl_display),
151 picture_buffers(count) {
152 for (size_t i = 0; i < picture_buffers.size(); ++i) {
153 PictureBufferRef& buffer = picture_buffers[i];
154 buffer.egl_image = EGL_NO_IMAGE_KHR;
155 buffer.egl_image_fd = -1;
156 buffer.client_id = -1;
157 }
158 }
159
160 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() {
161 for (size_t i = 0; i < picture_buffers.size(); ++i) {
162 PictureBufferRef& buffer = picture_buffers[i];
163 if (buffer.egl_image != EGL_NO_IMAGE_KHR)
164 egl_destroy_image_khr(egl_display, buffer.egl_image);
165 if (buffer.egl_image_fd != -1)
166 HANDLE_EINTR(close(buffer.egl_image_fd));
167 }
168 }
169
170 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
171 EGLDisplay egl_display, EGLSyncKHR egl_sync)
172 : egl_display(egl_display),
173 egl_sync(egl_sync) {
174 }
175
176 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
177 if (egl_sync != EGL_NO_SYNC_KHR)
178 egl_destroy_sync_khr(egl_display, egl_sync);
179 }
180
181 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord()
182 : at_device(false),
183 address(NULL),
184 length(0),
185 bytes_used(0),
186 input_id(-1) {
187 }
188
189 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() {
190 }
191
192 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord()
193 : at_device(false),
194 input_id(-1) {
195 bytes_used[0] = 0;
196 bytes_used[1] = 0;
197 address[0] = NULL;
198 address[1] = NULL;
199 length[0] = 0;
200 length[1] = 0;
piman 2013/01/15 01:36:51 nit: indentation was right before...
sheu 2013/01/15 05:05:43 The more things I touch, the more stuff breaks...
201 }
202
203 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() {
204 }
205
206 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord()
207 : at_device(false),
208 mfc_output(-1) {
209 }
210
211 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() {
212 }
213
214 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord()
215 : at_device(false),
216 at_client(false),
217 fd(-1),
218 egl_image(EGL_NO_IMAGE_KHR),
219 egl_sync(EGL_NO_SYNC_KHR),
220 picture_id(-1) {
221 }
222
223 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() {
224 }
225
226 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator(
227 EGLDisplay egl_display,
228 EGLContext egl_context,
229 Client* client,
230 const base::Callback<bool(void)>& make_context_current)
231 : child_message_loop_proxy_(base::MessageLoopProxy::current()),
232 weak_this_(base::AsWeakPtr(this)),
233 client_ptr_factory_(client),
234 client_(client_ptr_factory_.GetWeakPtr()),
235 decoder_thread_("ExynosDecoderThread"),
236 decoder_state_(kUninitialized),
237 decoder_current_bitstream_buffer_(NULL),
238 decoder_delay_bitstream_buffer_id_(-1),
239 decoder_current_input_buffer_(-1),
240 decoder_decode_buffer_tasks_scheduled_(0),
241 decoder_frames_at_client_(0),
242 decoder_flushing_(false),
243 mfc_fd_(-1),
244 mfc_input_streamon_(false),
245 mfc_input_buffer_count_(0),
246 mfc_input_buffer_queued_count_(0),
247 mfc_output_streamon_(false),
248 mfc_output_buffer_count_(0),
249 mfc_output_buffer_queued_count_(0),
250 mfc_output_buffer_pixelformat_(0),
251 gsc_fd_(-1),
252 gsc_input_streamon_(false),
253 gsc_input_buffer_count_(0),
254 gsc_input_buffer_queued_count_(0),
255 gsc_output_streamon_(false),
256 gsc_output_buffer_count_(0),
257 gsc_output_buffer_queued_count_(0),
258 device_poll_thread_("ExynosDevicePollThread"),
259 device_poll_interrupt_fd_(-1),
260 make_context_current_(make_context_current),
261 egl_display_(egl_display),
262 egl_context_(egl_context),
263 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
264 }
265
266 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() {
267 DCHECK(!decoder_thread_.IsRunning());
268 DCHECK(!device_poll_thread_.IsRunning());
269 // Nuke the entire site from orbit -- it's the only way to be sure.
270 if (device_poll_interrupt_fd_ != -1) {
271 HANDLE_EINTR(close(device_poll_interrupt_fd_));
272 device_poll_interrupt_fd_ = -1;
273 }
274 if (gsc_fd_ != -1) {
275 DestroyGscInputBuffers();
276 DestroyGscOutputBuffers();
277 HANDLE_EINTR(close(gsc_fd_));
278 gsc_fd_ = -1;
279 }
280 if (mfc_fd_ != -1) {
281 DestroyMfcInputBuffers();
282 DestroyMfcOutputBuffers();
283 HANDLE_EINTR(close(mfc_fd_));
284 mfc_fd_ = -1;
285 }
286
287 // These maps have members that should be manually destroyed, e.g. file
288 // descriptors, mmap() segments, etc.
289 DCHECK(mfc_input_buffer_map_.empty());
290 DCHECK(mfc_output_buffer_map_.empty());
291 DCHECK(gsc_input_buffer_map_.empty());
292 DCHECK(gsc_output_buffer_map_.empty());
293 }
294
295 bool ExynosVideoDecodeAccelerator::Initialize(
296 media::VideoCodecProfile profile) {
297 DVLOG(3) << "Initialize()";
298 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
299 DCHECK_EQ(decoder_state_, kUninitialized);
300
301 switch (profile) {
302 case media::H264PROFILE_BASELINE:
303 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
304 break;
305 case media::H264PROFILE_MAIN:
306 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
307 break;
308 case media::H264PROFILE_HIGH:
309 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
310 break;
311 case media::VP8PROFILE_MAIN:
312 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN";
313 break;
314 default:
315 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
316 return false;
317 };
318 video_profile_ = profile;
319
320 static bool sandbox_initialized = PostSandboxInitialization();
321 if (!sandbox_initialized) {
322 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed";
323 NOTIFY_ERROR(PLATFORM_FAILURE);
324 return false;
325 }
326
327 if (egl_display_ == EGL_NO_DISPLAY) {
328 DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
329 NOTIFY_ERROR(PLATFORM_FAILURE);
330 return false;
331 }
332
333 if (egl_context_ == EGL_NO_CONTEXT) {
334 DLOG(ERROR) << "Initialize(): could not get EGLContext";
335 NOTIFY_ERROR(PLATFORM_FAILURE);
336 return false;
337 }
338
339 // Open the video devices.
340 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice;
341 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice,
342 O_RDWR | O_NONBLOCK | O_CLOEXEC));
343 if (mfc_fd_ == -1) {
344 DPLOG(ERROR) << "Initialize(): could not open MFC device: "
345 << kExynosMfcDevice;
346 NOTIFY_ERROR(PLATFORM_FAILURE);
347 return false;
348 }
349 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice;
350 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice,
351 O_RDWR | O_NONBLOCK | O_CLOEXEC));
352 if (gsc_fd_ == -1) {
353 DPLOG(ERROR) << "Initialize(): could not open GSC device: "
354 << kExynosGscDevice;
355 NOTIFY_ERROR(PLATFORM_FAILURE);
356 return false;
357 }
358
359 // Create the interrupt fd.
360 DCHECK_EQ(device_poll_interrupt_fd_, -1);
361 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
362 if (device_poll_interrupt_fd_ == -1) {
363 DPLOG(ERROR) << "Initialize(): eventfd() failed";
364 NOTIFY_ERROR(PLATFORM_FAILURE);
365 return false;
366 }
367
368 // Capabilities check.
369 struct v4l2_capability caps;
370 const __u32 kCapsRequired =
371 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
372 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
373 V4L2_CAP_STREAMING;
374 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps);
375 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
376 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
377 ", caps check failed: 0x" << std::hex << caps.capabilities;
378 NOTIFY_ERROR(PLATFORM_FAILURE);
379 return false;
380 }
381 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps);
382 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
383 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
384 ", caps check failed: 0x" << std::hex << caps.capabilities;
385 NOTIFY_ERROR(PLATFORM_FAILURE);
386 return false;
387 }
388
389 // Some random ioctls that Exynos requires.
390 struct v4l2_control control;
391 memset(&control, 0, sizeof(control));
392 control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8
393 control.value = 8; // Magic number from Samsung folks.
394 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CTRL, &control);
395
396 if (!make_context_current_.Run()) {
397 DLOG(ERROR) << "Initialize(): could not make context current";
398 NOTIFY_ERROR(PLATFORM_FAILURE);
399 return false;
400 }
401
402 if (!CreateMfcInputBuffers())
403 return false;
404
405 // MFC output format has to be setup before streaming starts.
406 struct v4l2_format format;
407 memset(&format, 0, sizeof(format));
408 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
409 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16;
410 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
411
412 // Initialize format-specific bits.
413 if (video_profile_ >= media::H264PROFILE_MIN &&
414 video_profile_ <= media::H264PROFILE_MAX) {
415 decoder_h264_parser_.reset(new content::H264Parser());
416 }
417
418 if (!decoder_thread_.Start()) {
419 DLOG(ERROR) << "Initialize(): decoder thread failed to start";
420 NOTIFY_ERROR(PLATFORM_FAILURE);
421 return false;
422 }
423
424 SetDecoderState(kInitialized);
425
426 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
427 &Client::NotifyInitializeDone, client_));
428 return true;
429 }
430
431 void ExynosVideoDecodeAccelerator::Decode(
432 const media::BitstreamBuffer& bitstream_buffer) {
433 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
434 << ", size=" << bitstream_buffer.size();
435 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
436
437 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
438 client_, child_message_loop_proxy_,
439 new base::SharedMemory(bitstream_buffer.handle(), true),
440 bitstream_buffer.size(), bitstream_buffer.id()));
441 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
442 DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
443 NOTIFY_ERROR(UNREADABLE_INPUT);
444 return;
445 }
446 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory();
447
448 // DecodeTask() will take care of running a DecodeBufferTask().
449 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
450 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this),
451 base::Passed(&bitstream_record)));
452 }
453
454 void ExynosVideoDecodeAccelerator::AssignPictureBuffers(
455 const std::vector<media::PictureBuffer>& buffers) {
456 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
457 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
458
459 if (static_cast<int>(buffers.size()) != gsc_output_buffer_count_) {
460 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer_count";
461 NOTIFY_ERROR(INVALID_ARGUMENT);
462 return;
463 }
464
465 if (!make_context_current_.Run()) {
466 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
467 NOTIFY_ERROR(PLATFORM_FAILURE);
468 return;
469 }
470
471 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref(
472 new PictureBufferArrayRef(egl_display_, buffers.size()));
473
474 const static EGLint kImageAttrs[] = {
475 EGL_IMAGE_PRESERVED_KHR, 0,
476 EGL_NONE,
477 };
478 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay();
479 ScopedTextureBinder bind_restore(0);
480 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) {
481 PictureBufferArrayRef::PictureBufferRef& buffer =
482 pic_buffers_ref->picture_buffers[i];
483 // Create the X pixmap and then create an EGLImageKHR from it, so we can
484 // get dma_buf backing.
485 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0),
486 buffers[i].size().width(), buffers[i].size().height(), 32);
487 if (!pixmap) {
488 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap";
489 NOTIFY_ERROR(PLATFORM_FAILURE);
490 return;
491 }
492 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id());
493 EGLImageKHR egl_image = egl_create_image_khr(
494 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR,
495 (EGLClientBuffer)pixmap, kImageAttrs);
496 // We can free the X pixmap immediately -- according to the
497 // EGL_KHR_image_base spec, the backing storage does not go away until the
498 // last referencing EGLImage is destroyed.
499 XFreePixmap(x_display, pixmap);
500 if (egl_image == EGL_NO_IMAGE_KHR) {
501 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
502 NOTIFY_ERROR(PLATFORM_FAILURE);
503 return;
504 }
505 buffer.egl_image = egl_image;
506 int fd;
507 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) {
508 DLOG(ERROR) << "AssignPictureBuffers(): "
509 << "could not get EGLImageKHR dmabuf fd";
510 NOTIFY_ERROR(PLATFORM_FAILURE);
511 return;
512 }
513 buffer.egl_image_fd = fd;
514 gl_egl_image_target_texture_2d_oes(GL_TEXTURE_2D, egl_image);
515 buffer.client_id = buffers[i].id();
516 }
517 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
518 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask,
519 base::Unretained(this), base::Passed(&pic_buffers_ref)));
520 }
521
522 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
523 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
524 // Must be run on child thread, as we'll insert a sync in the EGL context.
525 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
526
527 if (!make_context_current_.Run()) {
528 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
529 NOTIFY_ERROR(PLATFORM_FAILURE);
530 return;
531 }
532
533 EGLSyncKHR egl_sync =
534 egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
535 if (egl_sync == EGL_NO_SYNC_KHR) {
536 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
537 NOTIFY_ERROR(PLATFORM_FAILURE);
538 return;
539 }
540
541 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
542 egl_display_, egl_sync));
543 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
544 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask,
545 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
546 }
547
548 void ExynosVideoDecodeAccelerator::Flush() {
549 DVLOG(3) << "Flush()";
550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
551 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
552 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this)));
553 }
554
555 void ExynosVideoDecodeAccelerator::Reset() {
556 DVLOG(3) << "Reset()";
557 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
558 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
559 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this)));
560 }
561
562 void ExynosVideoDecodeAccelerator::Destroy() {
563 DVLOG(3) << "Destroy()";
564 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
565
566 // We're destroying; cancel all callbacks.
567 client_ptr_factory_.InvalidateWeakPtrs();
568
569 // If the decoder thread is running, destroy using posted task.
570 if (decoder_thread_.IsRunning()) {
571 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
572 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
573 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
574 decoder_thread_.Stop();
575 } else {
576 // Otherwise, call the destroy task directly.
577 DestroyTask();
578 }
579
580 // Set to kError state just in case.
581 SetDecoderState(kError);
582
583 delete this;
584 }
585
586 // static
587 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() {
588 DVLOG(3) << "PreSandboxInitialization()";
589 dlerror();
590
591 libmali_handle = dlopen(kMaliDriver, RTLD_LAZY | RTLD_LOCAL);
592 if (libmali_handle == NULL) {
593 DPLOG(ERROR) << "failed to dlopen() " << kMaliDriver << ": " << dlerror();
594 }
595 }
596
597 // static
598 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() {
599 DVLOG(3) << "PostSandboxInitialization()";
600 if (libmali_handle == NULL) {
601 DLOG(ERROR) << "PostSandboxInitialization(): no " << kMaliDriver
602 << " driver handle";
603 return false;
604 }
605
606 dlerror();
607
608 POSTSANDBOX_DLSYM(libmali_handle,
609 mali_egl_image_get_buffer_ext_phandle,
610 MaliEglImageGetBufferExtPhandleFunc,
611 "mali_egl_image_get_buffer_ext_phandle");
612
613 POSTSANDBOX_DLSYM(libmali_handle,
614 egl_create_image_khr,
615 EglCreateImageKhrFunc,
616 "eglCreateImageKHR");
617
618 POSTSANDBOX_DLSYM(libmali_handle,
619 egl_destroy_image_khr,
620 EglDestroyImageKhrFunc,
621 "eglDestroyImageKHR");
622
623 POSTSANDBOX_DLSYM(libmali_handle,
624 egl_create_sync_khr,
625 EglCreateSyncKhrFunc,
626 "eglCreateSyncKHR");
627
628 POSTSANDBOX_DLSYM(libmali_handle,
629 egl_destroy_sync_khr,
630 EglDestroySyncKhrFunc,
631 "eglDestroySyncKHR");
632
633 POSTSANDBOX_DLSYM(libmali_handle,
634 egl_client_wait_sync_khr,
635 EglClientWaitSyncKhrFunc,
636 "eglClientWaitSyncKHR");
637
638 POSTSANDBOX_DLSYM(libmali_handle,
639 gl_egl_image_target_texture_2d_oes,
640 GlEglImageTargetTexture2dOesFunc,
641 "glEGLImageTargetTexture2DOES");
642
643 return true;
644 }
645
646 void ExynosVideoDecodeAccelerator::DecodeTask(
647 scoped_ptr<BitstreamBufferRef> bitstream_record) {
648 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id;
649 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
650 DCHECK_NE(decoder_state_, kUninitialized);
651 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id",
652 bitstream_record->input_id);
653
654 if (decoder_state_ == kResetting || decoder_flushing_) {
655 // In the case that we're resetting or flushing, we need to delay decoding
656 // the BitstreamBuffers that come after the Reset() or Flush() call. When
657 // we're here, we know that this DecodeTask() was scheduled by a Decode()
658 // call that came after (in the client thread) the Reset() or Flush() call;
659 // thus set up the delay if necessary.
660 if (decoder_delay_bitstream_buffer_id_ == -1)
661 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
662 } else if (decoder_state_ == kError) {
663 DVLOG(2) << "DecodeTask(): early out: kError state";
664 return;
665 }
666
667 decoder_input_queue_.push_back(
668 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
669 decoder_decode_buffer_tasks_scheduled_++;
670 DecodeBufferTask();
671 }
672
673 void ExynosVideoDecodeAccelerator::DecodeBufferTask() {
674 DVLOG(3) << "DecodeBufferTask()";
675 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
676 DCHECK_NE(decoder_state_, kUninitialized);
677 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask");
678
679 decoder_decode_buffer_tasks_scheduled_--;
680
681 if (decoder_state_ == kResetting) {
682 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
683 return;
684 } else if (decoder_state_ == kError) {
685 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
686 return;
687 }
688
689 if (decoder_current_bitstream_buffer_ == NULL) {
690 if (decoder_input_queue_.empty()) {
691 // We're waiting for a new buffer -- exit without scheduling a new task.
692 return;
693 }
694 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
695 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
696 // We're asked to delay decoding on this and subsequent buffers.
697 return;
698 }
699
700 // Setup to use the next buffer.
701 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
702 decoder_input_queue_.pop_front();
703 DVLOG(3) << "DecodeBufferTask(): reading input_id="
704 << decoder_current_bitstream_buffer_->input_id
705 << ", addr=" << decoder_current_bitstream_buffer_->shm->memory()
706 << ", size=" << decoder_current_bitstream_buffer_->size;
707 }
708 bool schedule_task = false;
709 const size_t size = decoder_current_bitstream_buffer_->size;
710 size_t decoded_size = 0;
711 if (size == 0) {
712 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
713 if (input_id >= 0) {
714 // This is a buffer queued from the client that has zero size. Skip.
715 schedule_task = true;
716 } else {
717 // This is a buffer of zero size, queued to flush the pipe. Flush.
718 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
719 static_cast<base::SharedMemory*>(NULL));
720 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
721 // current input, enqueue no data to the next frame, then flush that down.
722 schedule_task = true;
723 if (decoder_current_input_buffer_ != -1 &&
724 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id !=
725 kFlushBufferId)
726 schedule_task = FlushInputFrame();
727
728 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
729 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
730 schedule_task = true;
piman 2013/01/15 01:36:51 nit: you can skip this since it's already true.
sheu 2013/01/15 05:05:43 I figure the compiler would take care of it, and I
731 } else {
732 // If we failed to enqueue the empty buffer (due to pipeline
733 // backpressure), don't advance the bitstream buffer queue, and don't
734 // schedule the next task. This bitstream buffer queue entry will get
735 // reprocessed when the pipeline frees up.
736 schedule_task = false;
737 }
738 }
739 } else {
740 // This is a buffer queued from the client, with actual contents. Decode.
741 const uint8* const data =
742 reinterpret_cast<const uint8*>(
743 decoder_current_bitstream_buffer_->shm->memory()) +
744 decoder_current_bitstream_buffer_->bytes_used;
745 const size_t data_size =
746 decoder_current_bitstream_buffer_->size -
747 decoder_current_bitstream_buffer_->bytes_used;
748 if (!FindFrameFragment(data, data_size, &decoded_size)) {
749 NOTIFY_ERROR(UNREADABLE_INPUT);
750 return;
751 }
752
753 switch (decoder_state_) {
754 case kInitialized:
755 case kAfterReset:
756 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
757 break;
758 case kDecoding:
759 schedule_task = DecodeBufferContinue(data, decoded_size);
760 break;
761 default:
762 NOTIFY_ERROR(ILLEGAL_STATE);
763 return;
764 }
765 }
766 if (decoder_state_ == kError) {
767 // Failed during decode.
768 return;
769 }
770
771 if (schedule_task) {
772 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
773 if (decoder_current_bitstream_buffer_->bytes_used ==
774 decoder_current_bitstream_buffer_->size) {
775 // Our current bitstream buffer is done; return it.
776 int32 input_id = decoder_current_bitstream_buffer_->input_id;
777 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
778 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
779 decoder_current_bitstream_buffer_.reset();
780 }
781 ScheduleDecodeBufferTaskIfNeeded();
782 }
783 }
784
785 bool ExynosVideoDecodeAccelerator::FindFrameFragment(
786 const uint8* data,
787 size_t size,
788 size_t* endpos) {
789 if (video_profile_ >= media::H264PROFILE_MIN &&
790 video_profile_ <= media::H264PROFILE_MAX) {
791 // For H264, we need to feed HW one frame at a time. This is going to take
792 // some parsing of our input stream.
793 decoder_h264_parser_->SetStream(data, size);
794 content::H264NALU nalu;
795 content::H264Parser::Result result;
796
797 // Find the first NAL.
798 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
799 if (result == content::H264Parser::kInvalidStream ||
800 result == content::H264Parser::kUnsupportedStream)
801 return false;
802 *endpos = (nalu.data + nalu.size) - data;
803 if (result == content::H264Parser::kEOStream)
804 return true;
805
806 // Keep on peeking the next NALs while they don't indicate a frame
807 // boundary.
808 for (;;) {
809 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
810 if (result == content::H264Parser::kInvalidStream ||
811 result == content::H264Parser::kUnsupportedStream)
812 return false;
813 if (result == content::H264Parser::kEOStream)
814 return true;
815 switch (nalu.nal_unit_type) {
816 case content::H264NALU::kNonIDRSlice:
817 case content::H264NALU::kIDRSlice:
818 // For these two, if the "first_mb_in_slice" field is zero, start a
819 // new frame and return. This field is Exp-Golomb coded starting on
820 // the eighth data bit of the NAL; a zero value is encoded with a
821 // leading '1' bit in the byte, which we can detect as the byte being
822 // (unsigned) greater than or equal to 0x80.
823 if (nalu.data[1] >= 0x80)
824 return true;
825 break;
826 case content::H264NALU::kSPS:
827 case content::H264NALU::kPPS:
828 case content::H264NALU::kEOSeq:
829 case content::H264NALU::kEOStream:
830 // These unconditionally signal a frame boundary.
831 return true;
832 default:
833 // For all others, keep going.
834 break;
835 }
836 *endpos = (nalu.data + nalu.size) - reinterpret_cast<const uint8*>(data);
837 }
838 NOTREACHED();
839 return false;
840 } else {
841 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
842 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
843 // For VP8, we can just dump the entire buffer. No fragmentation needed.
844 *endpos = size;
845 return true;
846 }
847 }
848
849 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
850 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
851
852 // If we're behind on tasks, schedule another one.
853 int buffers_to_decode = decoder_input_queue_.size();
854 if (decoder_current_bitstream_buffer_ != NULL)
855 buffers_to_decode++;
856 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
857 decoder_decode_buffer_tasks_scheduled_++;
858 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
859 &ExynosVideoDecodeAccelerator::DecodeBufferTask,
860 base::Unretained(this)));
861 }
862 }
863
864 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial(
865 const void* data, size_t size, size_t* endpos) {
866 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
867 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
868 DCHECK_NE(decoder_state_, kUninitialized);
869 DCHECK_NE(decoder_state_, kDecoding);
870 DCHECK(!device_poll_thread_.IsRunning());
871 // Initial decode. We haven't been able to get output stream format info yet.
872 // Get it, and start decoding.
873
874 // Copy in and send to HW.
875 if (!AppendToInputFrame(data, size) || !FlushInputFrame())
876 return false;
877
878 // Recycle buffers.
879 DequeueMfc();
880
881 // Check and see if we have format info yet.
882 struct v4l2_format format;
883 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
884 if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) {
885 if (errno == EINVAL) {
886 // We will get EINVAL if we haven't seen sufficient stream to decode the
887 // format. Return true and schedule the next buffer.
888 *endpos = size;
889 return true;
890 } else {
891 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT";
892 NOTIFY_ERROR(PLATFORM_FAILURE);
893 return false;
894 }
895 }
896
897 // Run this initialization only on first startup.
898 if (decoder_state_ == kInitialized) {
899 DVLOG(3) << "DecodeBufferInitial(): running one-time initialization";
900 // Success! Setup our parameters.
901 CHECK_EQ(format.fmt.pix_mp.num_planes, 2);
902 frame_buffer_size_.SetSize(
903 format.fmt.pix_mp.width, format.fmt.pix_mp.height);
904 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage;
905 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage;
906 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
907 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16);
908
909 // Create our other buffers.
910 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() ||
911 !CreateGscOutputBuffers())
912 return false;
913
914 // MFC expects to process the initial buffer once during stream init to
915 // configure stream parameters, but will not consume the steam data on that
916 // iteration. Subsequent iterations (including after reset) do not require
917 // the stream init step.
918 *endpos = 0;
919 } else {
920 *endpos = size;
921 }
922
923 // StartDevicePoll will raise the error if there is one.
924 if (!StartDevicePoll())
925 return false;
926
927 decoder_state_ = kDecoding;
928 ScheduleDecodeBufferTaskIfNeeded();
929 return true;
930 }
931
932 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue(
933 const void* data, size_t size) {
934 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
935 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
936 DCHECK_EQ(decoder_state_, kDecoding);
937
938 // Both of these calls will set kError state if they fail.
939 return (AppendToInputFrame(data, size) && FlushInputFrame());
940 }
941
942 bool ExynosVideoDecodeAccelerator::AppendToInputFrame(
943 const void* data, size_t size) {
944 DVLOG(3) << "AppendToInputFrame()";
945 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
946 DCHECK_NE(decoder_state_, kUninitialized);
947 DCHECK_NE(decoder_state_, kResetting);
948 DCHECK_NE(decoder_state_, kError);
949 // This routine can handle data == NULL and size == 0, which occurs when
950 // we queue an empty buffer for the purposes of flushing the pipe.
951
952 // Flush if we're too big
953 if (decoder_current_input_buffer_ != -1) {
954 MfcInputRecord& input_record =
955 mfc_input_buffer_map_[decoder_current_input_buffer_];
956 if (input_record.bytes_used + size > input_record.length) {
957 if (!FlushInputFrame())
958 return false;
959 decoder_current_input_buffer_ = -1;
960 }
961 }
962
963 // Try to get an available input buffer
964 if (decoder_current_input_buffer_ == -1) {
965 if (mfc_free_input_buffers_.empty()) {
966 // See if we can get more free buffers from HW
967 DequeueMfc();
968 if (mfc_free_input_buffers_.empty()) {
969 // Nope!
970 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
971 return false;
972 }
973 }
974 decoder_current_input_buffer_ = mfc_free_input_buffers_.back();
975 mfc_free_input_buffers_.pop_back();
976 MfcInputRecord& input_record =
977 mfc_input_buffer_map_[decoder_current_input_buffer_];
978 DCHECK_EQ(input_record.bytes_used, 0);
979 DCHECK_EQ(input_record.input_id, -1);
980 DCHECK(decoder_current_bitstream_buffer_ != NULL);
981 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
982 }
983
984 DCHECK_EQ(data == NULL, size == 0);
985 if (size == 0) {
986 // If we asked for an empty buffer, return now. We return only after
987 // getting the next input buffer, since we might actually want an empty
988 // input buffer for flushing purposes.
989 return true;
990 }
991
992 // Copy in to the buffer.
993 MfcInputRecord& input_record =
994 mfc_input_buffer_map_[decoder_current_input_buffer_];
995 if (size > input_record.length - input_record.bytes_used) {
996 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
997 NOTIFY_ERROR(UNREADABLE_INPUT);
998 return false;
999 }
1000 memcpy(
1001 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
1002 data,
1003 size);
1004 input_record.bytes_used += size;
1005
1006 return true;
1007 }
1008
1009 bool ExynosVideoDecodeAccelerator::FlushInputFrame() {
1010 DVLOG(3) << "FlushInputFrame()";
1011 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1012 DCHECK_NE(decoder_state_, kUninitialized);
1013 DCHECK_NE(decoder_state_, kResetting);
1014 DCHECK_NE(decoder_state_, kError);
1015
1016 if (decoder_current_input_buffer_ == -1)
1017 return true;
1018
1019 MfcInputRecord& input_record =
1020 mfc_input_buffer_map_[decoder_current_input_buffer_];
1021 DCHECK_NE(input_record.input_id, -1);
1022 DCHECK_EQ(input_record.input_id == kFlushBufferId,
1023 input_record.bytes_used == 0);
1024 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
1025 // got from the client. We can skip it if it is empty.
1026 // * if input_id < 0 (should be kFlushBufferId in this case), this input
1027 // buffer was prompted by a flush buffer, and should be queued even when
1028 // empty.
1029 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1030 input_record.input_id = -1;
1031 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_);
1032 decoder_current_input_buffer_ = -1;
1033 return true;
1034 }
1035
1036 // Queue it to MFC.
1037 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_);
1038 decoder_current_input_buffer_ = -1;
1039 DVLOG(3) << "FlushInputFrame(): submitting input_id="
1040 << input_record.input_id;
1041 // Kick the MFC once since there's new available input for it.
1042 EnqueueMfc();
1043
1044 return (decoder_state_ != kError);
1045 }
1046
1047 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask(
1048 scoped_ptr<PictureBufferArrayRef> pic_buffers) {
1049 DVLOG(3) << "AssignPictureBuffersTask()";
1050 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1051 DCHECK_NE(decoder_state_, kUninitialized);
1052 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask");
1053
1054 // We run AssignPictureBuffersTask even if we're in kResetting.
1055 if (decoder_state_ == kError) {
1056 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state";
1057 return;
1058 }
1059
1060 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size());
1061 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1062 // We should be blank right now.
1063 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
1064 DCHECK_EQ(output_record.fd, -1);
1065 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1066 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1067 DCHECK_EQ(output_record.picture_id, -1);
1068 PictureBufferArrayRef::PictureBufferRef& buffer =
1069 pic_buffers->picture_buffers[i];
1070 output_record.fd = buffer.egl_image_fd;
1071 output_record.egl_image = buffer.egl_image;
1072 output_record.picture_id = buffer.client_id;
1073
1074 // Take ownership of the EGLImage and fd.
1075 buffer.egl_image = EGL_NO_IMAGE_KHR;
1076 buffer.egl_image_fd = -1;
1077 // And add this buffer to the free list.
1078 gsc_free_output_buffers_.push_back(i);
1079 }
1080
1081 // We got buffers! Kick the GSC.
1082 EnqueueGsc();
1083 }
1084
1085 void ExynosVideoDecodeAccelerator::ServiceDeviceTask() {
1086 DVLOG(3) << "ServiceDeviceTask()";
1087 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1088 DCHECK_NE(decoder_state_, kUninitialized);
1089 DCHECK_NE(decoder_state_, kInitialized);
1090 DCHECK_NE(decoder_state_, kAfterReset);
1091 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask");
1092
1093 if (decoder_state_ == kResetting) {
1094 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
1095 return;
1096 } else if (decoder_state_ == kError) {
1097 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
1098 return;
1099 }
1100
1101 DequeueMfc();
1102 DequeueGsc();
1103 EnqueueMfc();
1104 EnqueueGsc();
1105
1106 // Clear the interrupt fd.
1107 if (!ClearDevicePollInterrupt())
1108 return;
1109
1110 unsigned int poll_fds = 0;
1111 // Add MFC fd, if we should poll on it.
1112 // MFC can be polled as soon as either input or output buffers are queued.
1113 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
1114 poll_fds |= kPollMfc;
1115 // Add GSC fd, if we should poll on it.
1116 // GSC has to wait until both input and output buffers are queued.
1117 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
1118 poll_fds |= kPollGsc;
1119
1120 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
1121 // so either:
1122 // * device_poll_thread_ is running normally
1123 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1124 // shut it down, in which case we're either in kResetting or kError states
1125 // respectively, and we should have early-outed already.
1126 DCHECK(device_poll_thread_.message_loop());
1127 // Queue the DevicePollTask() now.
1128 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1129 &ExynosVideoDecodeAccelerator::DevicePollTask,
1130 base::Unretained(this),
1131 poll_fds));
1132
1133 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
1134 << decoder_input_queue_.size() << "->"
1135 << mfc_input_ready_queue_.size() << "] => MFC["
1136 << mfc_free_input_buffers_.size() << "+"
1137 << mfc_input_buffer_queued_count_ << "/"
1138 << mfc_input_buffer_count_ << "->"
1139 << mfc_free_output_buffers_.size() << "+"
1140 << mfc_output_buffer_queued_count_ << "/"
1141 << mfc_output_buffer_count_ << "] => "
1142 << mfc_output_gsc_input_queue_.size() << " => GSC["
1143 << gsc_free_input_buffers_.size() << "+"
1144 << gsc_input_buffer_queued_count_ << "/"
1145 << gsc_input_buffer_count_ << "->"
1146 << gsc_free_output_buffers_.size() << "+"
1147 << gsc_output_buffer_queued_count_ << "/"
1148 << gsc_output_buffer_count_ << "] => VDA["
1149 << decoder_frames_at_client_ << "]";
1150
1151 ScheduleDecodeBufferTaskIfNeeded();
1152 }
1153
1154 void ExynosVideoDecodeAccelerator::EnqueueMfc() {
1155 DVLOG(3) << "EnqueueMfc()";
1156 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1157 DCHECK_NE(decoder_state_, kUninitialized);
1158 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc");
1159
1160 // Drain the pipe of completed decode buffers.
1161 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
1162 while (!mfc_input_ready_queue_.empty()) {
1163 if (!EnqueueMfcInputRecord())
1164 return;
1165 }
1166 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
1167 // We just started up a previously empty queue.
1168 // Queue state changed; signal interrupt.
1169 if (!SetDevicePollInterrupt())
1170 return;
1171 // Start VIDIOC_STREAMON if we haven't yet.
1172 if (!mfc_input_streamon_) {
1173 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1174 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1175 mfc_input_streamon_ = true;
1176 }
1177 }
1178
1179 // Enqueue all the MFC outputs we can.
1180 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
1181 while (!mfc_free_output_buffers_.empty()) {
1182 if (!EnqueueMfcOutputRecord())
1183 return;
1184 }
1185 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
1186 // We just started up a previously empty queue.
1187 // Queue state changed; signal interrupt.
1188 if (!SetDevicePollInterrupt())
1189 return;
1190 // Start VIDIOC_STREAMON if we haven't yet.
1191 if (!mfc_output_streamon_) {
1192 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1193 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1194 mfc_output_streamon_ = true;
1195 }
1196 }
1197 }
1198
1199 void ExynosVideoDecodeAccelerator::DequeueMfc() {
1200 DVLOG(3) << "DequeueMfc()";
1201 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1202 DCHECK_NE(decoder_state_, kUninitialized);
1203 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc");
1204
1205 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
1206 // list.
1207 struct v4l2_buffer dqbuf;
1208 struct v4l2_plane planes[2];
1209 while (mfc_input_buffer_queued_count_ > 0) {
1210 DCHECK(mfc_input_streamon_);
1211 memset(&dqbuf, 0, sizeof(dqbuf));
1212 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1213 dqbuf.memory = V4L2_MEMORY_MMAP;
1214 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1215 if (errno == EAGAIN) {
1216 // EAGAIN if we're just out of buffers to dequeue.
1217 break;
1218 }
1219 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1220 NOTIFY_ERROR(PLATFORM_FAILURE);
1221 return;
1222 }
1223 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
1224 DCHECK(input_record.at_device);
1225 mfc_free_input_buffers_.push_back(dqbuf.index);
1226 input_record.at_device = false;
1227 input_record.bytes_used = 0;
1228 input_record.input_id = -1;
1229 mfc_input_buffer_queued_count_--;
1230 }
1231
1232 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
1233 // completed queue.
1234 while (mfc_output_buffer_queued_count_ > 0) {
1235 DCHECK(mfc_output_streamon_);
1236 memset(&dqbuf, 0, sizeof(dqbuf));
1237 memset(planes, 0, sizeof(planes));
1238 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1239 dqbuf.memory = V4L2_MEMORY_MMAP;
1240 dqbuf.m.planes = planes;
1241 dqbuf.length = 2;
1242 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1243 if (errno == EAGAIN) {
1244 // EAGAIN if we're just out of buffers to dequeue.
1245 break;
1246 }
1247 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1248 NOTIFY_ERROR(PLATFORM_FAILURE);
1249 return;
1250 }
1251 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
1252 DCHECK(output_record.at_device);
1253 output_record.at_device = false;
1254 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
1255 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
1256 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) {
1257 // This is an empty output buffer returned as part of a flush.
1258 mfc_free_output_buffers_.push_back(dqbuf.index);
1259 output_record.input_id = -1;
1260 } else {
1261 // This is an output buffer with contents to pass down the pipe.
1262 mfc_output_gsc_input_queue_.push_back(dqbuf.index);
1263 output_record.input_id = dqbuf.timestamp.tv_sec;
1264 DCHECK(output_record.input_id >= 0);
1265 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id;
1266 // We don't count this output buffer dequeued yet, or add it to the free
1267 // list, as it has data GSC needs to process.
1268
1269 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe.
1270 SetDevicePollInterrupt();
1271 }
1272 mfc_output_buffer_queued_count_--;
1273 }
1274
1275 NotifyFlushDoneIfNeeded();
1276 }
1277
1278 void ExynosVideoDecodeAccelerator::EnqueueGsc() {
1279 DVLOG(3) << "EnqueueGsc()";
1280 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1281 DCHECK_NE(decoder_state_, kUninitialized);
1282 DCHECK_NE(decoder_state_, kInitialized);
1283 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc");
1284
1285 // Drain the pipe of completed MFC output buffers.
1286 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
1287 while (!mfc_output_gsc_input_queue_.empty() &&
1288 !gsc_free_input_buffers_.empty()) {
1289 if (!EnqueueGscInputRecord())
1290 return;
1291 }
1292 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
1293 // We just started up a previously empty queue.
1294 // Queue state changed; signal interrupt.
1295 if (!SetDevicePollInterrupt())
1296 return;
1297 // Start VIDIOC_STREAMON if we haven't yet.
1298 if (!gsc_input_streamon_) {
1299 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1300 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1301 gsc_input_streamon_ = true;
1302 }
1303 }
1304
1305 // Enqueue a GSC output, only if we need one
1306 if (gsc_input_buffer_queued_count_ != 0 &&
1307 gsc_output_buffer_queued_count_ == 0 &&
1308 !gsc_free_output_buffers_.empty()) {
1309 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
1310 if (!EnqueueGscOutputRecord())
1311 return;
1312 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
1313 // We just started up a previously empty queue.
1314 // Queue state changed; signal interrupt.
1315 if (!SetDevicePollInterrupt())
1316 return;
1317 // Start VIDIOC_STREAMON if we haven't yet.
1318 if (!gsc_output_streamon_) {
1319 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1320 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1321 gsc_output_streamon_ = true;
1322 }
1323 }
1324 }
1325 // Bug check: GSC is liable to race conditions if more than one buffer is
1326 // simultaneously queued.
1327 DCHECK_GE(1, gsc_output_buffer_queued_count_);
1328 }
1329
1330 void ExynosVideoDecodeAccelerator::DequeueGsc() {
1331 DVLOG(3) << "DequeueGsc()";
1332 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1333 DCHECK_NE(decoder_state_, kUninitialized);
1334 DCHECK_NE(decoder_state_, kInitialized);
1335 DCHECK_NE(decoder_state_, kAfterReset);
1336 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc");
1337
1338 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
1339 // list. Also recycle the corresponding MFC output buffers at this time.
1340 struct v4l2_buffer dqbuf;
1341 while (gsc_input_buffer_queued_count_ > 0) {
1342 DCHECK(gsc_input_streamon_);
1343 memset(&dqbuf, 0, sizeof(dqbuf));
1344 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1345 dqbuf.memory = V4L2_MEMORY_DMABUF;
1346 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1347 if (errno == EAGAIN) {
1348 // EAGAIN if we're just out of buffers to dequeue.
1349 break;
1350 }
1351 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1352 NOTIFY_ERROR(PLATFORM_FAILURE);
1353 return;
1354 }
1355 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
1356 MfcOutputRecord& output_record =
1357 mfc_output_buffer_map_[input_record.mfc_output];
1358 DCHECK(input_record.at_device);
1359 gsc_free_input_buffers_.push_back(dqbuf.index);
1360 mfc_free_output_buffers_.push_back(input_record.mfc_output);
1361 input_record.at_device = false;
1362 input_record.mfc_output = -1;
1363 output_record.input_id = -1;
1364 gsc_input_buffer_queued_count_--;
1365 }
1366
1367 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to
1368 // the client. Don't recycle to its free list yet -- we can't do that until
1369 // ReusePictureBuffer() returns it to us.
1370 while (gsc_output_buffer_queued_count_ > 0) {
1371 DCHECK(gsc_output_streamon_);
1372 memset(&dqbuf, 0, sizeof(dqbuf));
1373 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1374 dqbuf.memory = V4L2_MEMORY_DMABUF;
1375 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1376 if (errno == EAGAIN) {
1377 // EAGAIN if we're just out of buffers to dequeue.
1378 break;
1379 }
1380 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1381 NOTIFY_ERROR(PLATFORM_FAILURE);
1382 return;
1383 }
1384 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
1385 DCHECK(output_record.at_device);
1386 DCHECK(!output_record.at_client);
1387 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1388 output_record.at_device = false;
1389 output_record.at_client = true;
1390 gsc_output_buffer_queued_count_--;
1391 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1392 &Client::PictureReady, client_, media::Picture(
1393 output_record.picture_id, dqbuf.timestamp.tv_sec)));
1394 decoder_frames_at_client_++;
1395 }
1396
1397 NotifyFlushDoneIfNeeded();
1398 }
1399
1400 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() {
1401 DVLOG(3) << "EnqueueMfcInputRecord()";
1402 DCHECK(!mfc_input_ready_queue_.empty());
1403
1404 // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
1405 const int buffer = mfc_input_ready_queue_.back();
1406 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer];
1407 DCHECK(!input_record.at_device);
1408 struct v4l2_buffer qbuf;
1409 struct v4l2_plane qbuf_plane;
1410 memset(&qbuf, 0, sizeof(qbuf));
1411 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1412 qbuf.index = buffer;
1413 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1414 qbuf.timestamp.tv_sec = input_record.input_id;
1415 qbuf.memory = V4L2_MEMORY_MMAP;
1416 qbuf.m.planes = &qbuf_plane;
1417 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1418 qbuf.length = 1;
1419 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1420 mfc_input_ready_queue_.pop_back();
1421 input_record.at_device = true;
1422 mfc_input_buffer_queued_count_++;
1423 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id="
1424 << input_record.input_id;
1425 return true;
1426 }
1427
1428 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() {
1429 DVLOG(3) << "EnqueueMfcOutputRecord()";
1430 DCHECK(!mfc_free_output_buffers_.empty());
1431
1432 // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
1433 const int buffer = mfc_free_output_buffers_.back();
1434 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer];
1435 DCHECK(!output_record.at_device);
1436 DCHECK_EQ(output_record.input_id, -1);
1437 struct v4l2_buffer qbuf;
1438 struct v4l2_plane qbuf_planes[2];
1439 memset(&qbuf, 0, sizeof(qbuf));
1440 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1441 qbuf.index = buffer;
1442 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1443 qbuf.memory = V4L2_MEMORY_MMAP;
1444 qbuf.m.planes = qbuf_planes;
1445 qbuf.length = 2;
1446 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1447 mfc_free_output_buffers_.pop_back();
1448 output_record.at_device = true;
1449 mfc_output_buffer_queued_count_++;
1450 return true;
1451 }
1452
1453 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() {
1454 DVLOG(3) << "EnqueueGscInputRecord()";
1455 DCHECK(!gsc_free_input_buffers_.empty());
1456
1457 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output
1458 // (VIDEO_CAPTURE) buffer.
1459 const int mfc_buffer = mfc_output_gsc_input_queue_.front();
1460 const int gsc_buffer = gsc_free_input_buffers_.back();
1461 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
1462 DCHECK(!output_record.at_device);
1463 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
1464 DCHECK(!input_record.at_device);
1465 DCHECK_EQ(input_record.mfc_output, -1);
1466 struct v4l2_buffer qbuf;
1467 struct v4l2_plane qbuf_planes[2];
1468 memset(&qbuf, 0, sizeof(qbuf));
1469 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1470 qbuf.index = gsc_buffer;
1471 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1472 qbuf.timestamp.tv_sec = output_record.input_id;
1473 qbuf.memory = V4L2_MEMORY_USERPTR;
1474 qbuf.m.planes = qbuf_planes;
1475 qbuf.m.planes[0].bytesused = output_record.bytes_used[0];
1476 qbuf.m.planes[0].length = mfc_output_buffer_size_[0];
1477 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0];
1478 qbuf.m.planes[1].bytesused = output_record.bytes_used[1];
1479 qbuf.m.planes[1].length = mfc_output_buffer_size_[1];
1480 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1];
1481 qbuf.length = 2;
1482 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1483 mfc_output_gsc_input_queue_.pop_front();
1484 gsc_free_input_buffers_.pop_back();
1485 input_record.at_device = true;
1486 input_record.mfc_output = mfc_buffer;
1487 output_record.bytes_used[0] = 0;
1488 output_record.bytes_used[1] = 0;
1489 gsc_input_buffer_queued_count_++;
1490 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id="
1491 << output_record.input_id;
1492 return true;
1493 }
1494
1495 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() {
1496 DVLOG(3) << "EnqueueGscOutputRecord()";
1497 DCHECK(!gsc_free_output_buffers_.empty());
1498
1499 // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
1500 const int buffer = gsc_free_output_buffers_.front();
1501 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer];
1502 DCHECK(!output_record.at_device);
1503 DCHECK(!output_record.at_client);
1504 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1505 TRACE_EVENT0(
1506 "Video Decoder",
1507 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR");
1508 // If we have to wait for completion, wait. Note that
1509 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the
1510 // buffer that has been in the queue the longest.
1511 egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0,
1512 EGL_FOREVER_KHR);
1513 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
1514 output_record.egl_sync = EGL_NO_SYNC_KHR;
1515 }
1516 struct v4l2_buffer qbuf;
1517 struct v4l2_plane qbuf_plane;
1518 memset(&qbuf, 0, sizeof(qbuf));
1519 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1520 qbuf.index = buffer;
1521 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1522 qbuf.memory = V4L2_MEMORY_DMABUF;
1523 qbuf.m.planes = &qbuf_plane;
1524 qbuf.m.planes[0].m.fd = output_record.fd;
1525 qbuf.length = 1;
1526 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1527 gsc_free_output_buffers_.pop_front();
1528 output_record.at_device = true;
1529 gsc_output_buffer_queued_count_++;
1530 return true;
1531 }
1532
1533 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask(
1534 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1535 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1536 << picture_buffer_id;
1537 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1538 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask");
1539
1540 // We run ReusePictureBufferTask even if we're in kResetting.
1541 if (decoder_state_ == kError) {
1542 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1543 return;
1544 }
1545
1546 size_t index;
1547 for (index = 0; index < gsc_output_buffer_map_.size(); ++index)
1548 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id)
1549 break;
1550
1551 if (index >= gsc_output_buffer_map_.size()) {
1552 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found";
1553 NOTIFY_ERROR(INVALID_ARGUMENT);
1554 return;
1555 }
1556
1557 GscOutputRecord& output_record = gsc_output_buffer_map_[index];
1558 if (output_record.at_device || !output_record.at_client) {
1559 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1560 NOTIFY_ERROR(INVALID_ARGUMENT);
1561 return;
1562 }
1563
1564 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1565 output_record.at_client = false;
1566 output_record.egl_sync = egl_sync_ref->egl_sync;
1567 gsc_free_output_buffers_.push_back(index);
1568 decoder_frames_at_client_--;
1569 // Take ownership of the EGLSync.
1570 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1571 // We got a buffer back, so kick the GSC.
1572 EnqueueGsc();
1573 }
1574
1575 void ExynosVideoDecodeAccelerator::FlushTask() {
1576 DVLOG(3) << "FlushTask()";
1577 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1578 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask");
1579
1580 // Flush outstanding buffers.
1581 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1582 // There's nothing in the pipe, so return done immediately.
1583 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1584 &Client::NotifyFlushDone, client_));
1585 return;
1586 } else if (decoder_state_ == kError) {
1587 DVLOG(2) << "FlushTask(): early out: kError state";
1588 return;
1589 }
1590
1591 // We don't support stacked flushing.
1592 DCHECK(!decoder_flushing_);
1593
1594 // Queue up an empty buffer -- this triggers the flush.
1595 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>(
1596 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0,
1597 kFlushBufferId)));
1598 decoder_flushing_ = true;
1599
1600 ScheduleDecodeBufferTaskIfNeeded();
1601 }
1602
1603 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1604 if (!decoder_flushing_)
1605 return;
1606
1607 // Pipeline is empty when:
1608 // * Decoder input queue is empty of non-delayed buffers.
1609 // * There is no currently filling input buffer.
1610 // * MFC input holding queue is empty.
1611 // * All MFC input (VIDEO_OUTPUT) buffers are returned.
1612 // * MFC -> GSC holding queue is empty.
1613 // * All GSC input (VIDEO_OUTPUT) buffers are returned.
1614 if (!decoder_input_queue_.empty()) {
1615 if (decoder_input_queue_.front()->input_id !=
1616 decoder_delay_bitstream_buffer_id_)
1617 return;
1618 }
1619 if (decoder_current_input_buffer_ != -1)
1620 return;
1621 if ((mfc_input_ready_queue_.size() +
1622 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() +
1623 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0)
1624 return;
1625
1626 decoder_delay_bitstream_buffer_id_ = -1;
1627 decoder_flushing_ = false;
1628 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1629 &Client::NotifyFlushDone, client_));
1630
1631 // While we were flushing, we early-outed DecodeBufferTask()s.
1632 ScheduleDecodeBufferTaskIfNeeded();
1633 }
1634
1635 void ExynosVideoDecodeAccelerator::ResetTask() {
1636 DVLOG(3) << "ResetTask()";
1637 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1638 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask");
1639
1640 if (decoder_state_ == kError) {
1641 DVLOG(2) << "ResetTask(): early out: kError state";
1642 return;
1643 }
1644
1645 // We stop streaming, but we _don't_ destroy our buffers.
1646 if (!StopDevicePoll())
1647 return;
1648
1649 decoder_current_bitstream_buffer_.reset();
1650 decoder_input_queue_.clear();
1651
1652 decoder_current_input_buffer_ = -1;
1653
1654 // If we were flushing, we'll never return any more BitstreamBuffers or
1655 // PictureBuffers; they have all been dropped and returned by now.
1656 NotifyFlushDoneIfNeeded();
1657
1658 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1659 // jobs will early-out in the kResetting state.
1660 decoder_state_ = kResetting;
1661 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1662 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1663 }
1664
1665 void ExynosVideoDecodeAccelerator::ResetDoneTask() {
1666 DVLOG(3) << "ResetDoneTask()";
1667 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1668 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask");
1669
1670 if (decoder_state_ == kError) {
1671 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1672 return;
1673 }
1674
1675 // Reset format-specific bits.
1676 if (video_profile_ >= media::H264PROFILE_MIN &&
1677 video_profile_ <= media::H264PROFILE_MAX) {
1678 decoder_h264_parser_.reset(new content::H264Parser());
1679 }
1680
1681 // Jobs drained, we're finished resetting.
1682 DCHECK_EQ(decoder_state_, kResetting);
1683 decoder_state_ = kAfterReset;
1684 decoder_delay_bitstream_buffer_id_ = -1;
1685 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1686 &Client::NotifyResetDone, client_));
1687
1688 // While we were resetting, we early-outed DecodeBufferTask()s.
1689 ScheduleDecodeBufferTaskIfNeeded();
1690 }
1691
1692 void ExynosVideoDecodeAccelerator::DestroyTask() {
1693 DVLOG(3) << "DestroyTask()";
1694 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask");
1695
1696 // DestroyTask() should run regardless of decoder_state_.
1697
1698 // Stop streaming and the device_poll_thread_.
1699 StopDevicePoll();
1700
1701 decoder_current_bitstream_buffer_.reset();
1702 decoder_current_input_buffer_ = -1;
1703 decoder_decode_buffer_tasks_scheduled_ = 0;
1704 decoder_frames_at_client_ = 0;
1705 decoder_input_queue_.clear();
1706 decoder_flushing_ = false;
1707
1708 // Set our state to kError. Just in case.
1709 decoder_state_ = kError;
1710 }
1711
1712 bool ExynosVideoDecodeAccelerator::StartDevicePoll() {
1713 DVLOG(3) << "StartDevicePoll()";
1714 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1715 DCHECK(!device_poll_thread_.IsRunning());
1716
1717 // Start up the device poll thread and schedule its first DevicePollTask().
1718 if (!device_poll_thread_.Start()) {
1719 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1720 NOTIFY_ERROR(PLATFORM_FAILURE);
1721 return false;
1722 }
1723 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1724 &ExynosVideoDecodeAccelerator::DevicePollTask,
1725 base::Unretained(this),
1726 0));
1727
1728 return true;
1729 }
1730
1731 bool ExynosVideoDecodeAccelerator::StopDevicePoll() {
1732 DVLOG(3) << "StopDevicePoll()";
1733 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1734
1735 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1736 if (!SetDevicePollInterrupt())
1737 return false;
1738 device_poll_thread_.Stop();
1739 // Clear the interrupt now, to be sure.
1740 if (!ClearDevicePollInterrupt())
1741 return false;
1742
1743 // Stop streaming.
1744 if (mfc_input_streamon_) {
1745 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1746 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1747 }
1748 mfc_input_streamon_ = false;
1749 if (mfc_output_streamon_) {
1750 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1751 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1752 }
1753 mfc_output_streamon_ = false;
1754 if (gsc_input_streamon_) {
1755 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1756 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1757 }
1758 gsc_input_streamon_ = false;
1759 if (gsc_output_streamon_) {
1760 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1761 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1762 }
1763 gsc_output_streamon_ = false;
1764
1765 // Reset all our accounting info.
1766 mfc_input_ready_queue_.clear();
1767 mfc_free_input_buffers_.clear();
1768 DCHECK_EQ(mfc_input_buffer_count_,
1769 static_cast<int>(mfc_input_buffer_map_.size()));
1770 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
1771 mfc_free_input_buffers_.push_back(i);
1772 mfc_input_buffer_map_[i].at_device = false;
1773 mfc_input_buffer_map_[i].bytes_used = 0;
1774 mfc_input_buffer_map_[i].input_id = -1;
1775 }
1776 mfc_input_buffer_queued_count_ = 0;
1777 mfc_free_output_buffers_.clear();
1778 DCHECK_EQ(mfc_output_buffer_count_,
1779 static_cast<int>(mfc_output_buffer_map_.size()));
1780 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
1781 mfc_free_output_buffers_.push_back(i);
1782 mfc_output_buffer_map_[i].at_device = false;
1783 mfc_output_buffer_map_[i].input_id = -1;
1784 }
1785 mfc_output_buffer_queued_count_ = 0;
1786 mfc_output_gsc_input_queue_.clear();
1787 gsc_free_input_buffers_.clear();
1788 DCHECK_EQ(gsc_input_buffer_count_,
1789 static_cast<int>(gsc_input_buffer_map_.size()));
1790 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
1791 gsc_free_input_buffers_.push_back(i);
1792 gsc_input_buffer_map_[i].at_device = false;
1793 gsc_input_buffer_map_[i].mfc_output = -1;
1794 }
1795 gsc_input_buffer_queued_count_ = 0;
1796 gsc_free_output_buffers_.clear();
1797 DCHECK_EQ(gsc_output_buffer_count_,
1798 static_cast<int>(gsc_output_buffer_map_.size()));
1799 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1800 // Only mark those free that aren't being held by the VDA.
1801 if (!gsc_output_buffer_map_[i].at_client) {
1802 gsc_free_output_buffers_.push_back(i);
1803 gsc_output_buffer_map_[i].at_device = false;
1804 }
1805 }
1806 gsc_output_buffer_queued_count_ = 0;
1807
1808 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1809 return true;
1810 }
1811
1812 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() {
1813 DVLOG(3) << "SetDevicePollInterrupt()";
1814 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1815
1816 const uint64 buf = 1;
1817 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1818 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
1819 NOTIFY_ERROR(PLATFORM_FAILURE);
1820 return false;
1821 }
1822 return true;
1823 }
1824
1825 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() {
1826 DVLOG(3) << "ClearDevicePollInterrupt()";
1827 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1828
1829 uint64 buf;
1830 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1831 if (errno == EAGAIN) {
1832 // No interrupt flag set, and we're reading nonblocking. Not an error.
1833 return true;
1834 } else {
1835 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
1836 NOTIFY_ERROR(PLATFORM_FAILURE);
1837 return false;
1838 }
1839 }
1840 return true;
1841 }
1842
1843 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) {
1844 DVLOG(3) << "DevicePollTask()";
1845 DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current());
1846 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask");
1847
1848 // This routine just polls the set of device fds, and schedules a
1849 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur.
1850 // Other threads may notify this task to return early by writing to
1851 // device_poll_interrupt_fd_.
1852 struct pollfd pollfds[3];
1853 nfds_t nfds;
1854
1855 // Add device_poll_interrupt_fd_;
1856 pollfds[0].fd = device_poll_interrupt_fd_;
1857 pollfds[0].events = POLLIN | POLLERR;
1858 nfds = 1;
1859
1860 if (poll_fds & kPollMfc) {
1861 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
1862 pollfds[nfds].fd = mfc_fd_;
1863 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1864 nfds++;
1865 }
1866 // Add GSC fd, if we should poll on it.
1867 // GSC has to wait until both input and output buffers are queued.
1868 if (poll_fds & kPollGsc) {
1869 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
1870 pollfds[nfds].fd = gsc_fd_;
1871 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1872 nfds++;
1873 }
1874
1875 // Poll it!
1876 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) {
1877 DPLOG(ERROR) << "DevicePollTask(): poll() failed";
1878 NOTIFY_ERROR(PLATFORM_FAILURE);
1879 return;
1880 }
1881
1882 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1883 // touch decoder state from this thread.
1884 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1885 &ExynosVideoDecodeAccelerator::ServiceDeviceTask,
1886 base::Unretained(this)));
1887 }
1888
1889 void ExynosVideoDecodeAccelerator::NotifyError(Error error) {
1890 DVLOG(2) << "NotifyError()";
1891
1892 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1893 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1894 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error));
1895 return;
1896 }
1897
1898 if (client_) {
1899 client_->NotifyError(error);
1900 client_ptr_factory_.InvalidateWeakPtrs();
1901 }
1902 }
1903
1904 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) {
1905 DVLOG(3) << "SetDecoderState(): state=%d" << state;
1906
1907 // We can touch decoder_state_ only if this is the decoder thread or the
1908 // decoder thread isn't running.
1909 if (decoder_thread_.message_loop() != NULL &&
1910 decoder_thread_.message_loop() != MessageLoop::current()) {
1911 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1912 &ExynosVideoDecodeAccelerator::SetDecoderState,
1913 base::Unretained(this), state));
1914 } else {
1915 decoder_state_ = state;
1916 }
1917 }
1918
1919 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() {
1920 DVLOG(3) << "CreateMfcInputBuffers()";
1921 // We always run this as we prepare to initialize.
1922 DCHECK_EQ(decoder_state_, kUninitialized);
1923 DCHECK(!mfc_input_streamon_);
1924 DCHECK_EQ(mfc_input_buffer_count_, 0);
1925
1926 __u32 pixelformat = 0;
1927 if (video_profile_ >= media::H264PROFILE_MIN &&
1928 video_profile_ <= media::H264PROFILE_MAX) {
1929 pixelformat = V4L2_PIX_FMT_H264;
1930 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
1931 video_profile_ <= media::VP8PROFILE_MAX) {
1932 pixelformat = V4L2_PIX_FMT_VP8;
1933 } else {
1934 NOTREACHED();
1935 }
1936
1937 struct v4l2_format format;
1938 memset(&format, 0, sizeof(format));
1939 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1940 format.fmt.pix_mp.pixelformat = pixelformat;
1941 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize;
1942 format.fmt.pix_mp.num_planes = 1;
1943 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
1944
1945 struct v4l2_requestbuffers reqbufs;
1946 memset(&reqbufs, 0, sizeof(reqbufs));
1947 reqbufs.count = kMfcInputBufferCount;
1948 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1949 reqbufs.memory = V4L2_MEMORY_MMAP;
1950 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
1951 mfc_input_buffer_count_ = reqbufs.count;
1952 mfc_input_buffer_map_.resize(mfc_input_buffer_count_);
1953 for (int i = 0; i < mfc_input_buffer_count_; ++i) {
1954 mfc_free_input_buffers_.push_back(i);
1955
1956 // Query for the MEMORY_MMAP pointer.
1957 struct v4l2_plane planes[1];
1958 struct v4l2_buffer buffer;
1959 memset(&buffer, 0, sizeof(buffer));
1960 memset(planes, 0, sizeof(planes));
1961 buffer.index = i;
1962 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1963 buffer.memory = V4L2_MEMORY_MMAP;
1964 buffer.m.planes = planes;
1965 buffer.length = 1;
1966 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
1967 void* address = mmap(NULL, buffer.m.planes[0].length,
1968 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
1969 buffer.m.planes[0].m.mem_offset);
1970 if (address == MAP_FAILED) {
1971 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
1972 return false;
1973 }
1974 mfc_input_buffer_map_[i].address = address;
1975 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length;
1976 }
1977
1978 return true;
1979 }
1980
1981 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() {
1982 DVLOG(3) << "CreateMfcOutputBuffers()";
1983 DCHECK_EQ(decoder_state_, kInitialized);
1984 DCHECK(!mfc_output_streamon_);
1985 DCHECK_EQ(mfc_output_buffer_count_, 0);
1986
1987 // Number of MFC output buffers we need.
1988 struct v4l2_control ctrl;
1989 memset(&ctrl, 0, sizeof(ctrl));
1990 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1991 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl);
1992
1993 // Output format setup in Initialize().
1994
1995 // Allocate the output buffers.
1996 struct v4l2_requestbuffers reqbufs;
1997 memset(&reqbufs, 0, sizeof(reqbufs));
1998 reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount;
1999 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2000 reqbufs.memory = V4L2_MEMORY_MMAP;
2001 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
2002
2003 // Fill our free-buffers list, and create DMABUFs from them.
2004 mfc_output_buffer_count_ = reqbufs.count;
2005 mfc_output_buffer_map_.resize(mfc_output_buffer_count_);
2006 for (int i = 0; i < mfc_output_buffer_count_; ++i) {
2007 mfc_free_output_buffers_.push_back(i);
2008
2009 // Query for the MEMORY_MMAP pointer.
2010 struct v4l2_plane planes[2];
2011 struct v4l2_buffer buffer;
2012 memset(&buffer, 0, sizeof(buffer));
2013 memset(planes, 0, sizeof(planes));
2014 buffer.index = i;
2015 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2016 buffer.memory = V4L2_MEMORY_MMAP;
2017 buffer.m.planes = planes;
2018 buffer.length = 2;
2019 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2020
2021 // Get their user memory for GSC input.
2022 for (int j = 0; j < 2; ++j) {
2023 void* address = mmap(NULL, buffer.m.planes[j].length,
2024 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2025 buffer.m.planes[j].m.mem_offset);
2026 if (address == MAP_FAILED) {
2027 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2028 return false;
2029 }
2030 mfc_output_buffer_map_[i].address[j] = address;
2031 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length;
2032 }
2033 }
2034
2035 return true;
2036 }
2037
2038 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() {
2039 DVLOG(3) << "CreateGscInputBuffers()";
2040 DCHECK_EQ(decoder_state_, kInitialized);
2041 DCHECK(!gsc_input_streamon_);
2042 DCHECK_EQ(gsc_input_buffer_count_, 0);
2043
2044 struct v4l2_format format;
2045 memset(&format, 0, sizeof(format));
2046 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2047 format.fmt.pix_mp.width = frame_buffer_size_.width();
2048 format.fmt.pix_mp.height = frame_buffer_size_.height();
2049 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_;
2050 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0];
2051 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1];
2052 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much
2053 // sense. Convention seems to be to assume 8bpp for these tiled formats.
2054 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width();
2055 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width();
2056 format.fmt.pix_mp.num_planes = 2;
2057 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2058
2059 struct v4l2_control control;
2060 memset(&control, 0, sizeof(control));
2061 control.id = V4L2_CID_ROTATE;
2062 control.value = 0;
2063 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2064
2065 memset(&control, 0, sizeof(control));
2066 control.id = V4L2_CID_HFLIP;
2067 control.value = 0;
2068 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2069
2070 memset(&control, 0, sizeof(control));
2071 control.id = V4L2_CID_VFLIP;
2072 control.value = 0;
2073 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2074
2075 memset(&control, 0, sizeof(control));
2076 control.id = V4L2_CID_GLOBAL_ALPHA;
2077 control.value = 255;
2078 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2079
2080 struct v4l2_requestbuffers reqbufs;
2081 memset(&reqbufs, 0, sizeof(reqbufs));
2082 reqbufs.count = kGscInputBufferCount;
2083 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2084 reqbufs.memory = V4L2_MEMORY_USERPTR;
2085 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2086
2087 gsc_input_buffer_count_ = reqbufs.count;
2088 gsc_input_buffer_map_.resize(gsc_input_buffer_count_);
2089 for (int i = 0; i < gsc_input_buffer_count_; ++i) {
2090 gsc_free_input_buffers_.push_back(i);
2091 gsc_input_buffer_map_[i].mfc_output = -1;
2092 }
2093
2094 return true;
2095 }
2096
2097 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() {
2098 DVLOG(3) << "CreateGscOutputBuffers()";
2099 DCHECK_EQ(decoder_state_, kInitialized);
2100 DCHECK(!gsc_output_streamon_);
2101 DCHECK_EQ(gsc_output_buffer_count_, 0);
2102
2103 // GSC outputs into the EGLImages we create from the textures we are
2104 // assigned. Assume RGBA8888 format.
2105 struct v4l2_format format;
2106 memset(&format, 0, sizeof(format));
2107 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2108 format.fmt.pix_mp.width = frame_buffer_size_.width();
2109 format.fmt.pix_mp.height = frame_buffer_size_.height();
2110 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
2111 format.fmt.pix_mp.plane_fmt[0].sizeimage =
2112 frame_buffer_size_.width() * frame_buffer_size_.height() * 4;
2113 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4;
2114 format.fmt.pix_mp.num_planes = 1;
2115 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2116
2117 struct v4l2_requestbuffers reqbufs;
2118 memset(&reqbufs, 0, sizeof(reqbufs));
2119 reqbufs.count = kGscOutputBufferCount;
2120 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2121 reqbufs.memory = V4L2_MEMORY_DMABUF;
2122 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2123
2124 // We don't actually fill in the freelist or the map here. That happens once
2125 // we have actual usable buffers, after AssignPictureBuffers();
2126 gsc_output_buffer_count_ = reqbufs.count;
2127 gsc_output_buffer_map_.resize(gsc_output_buffer_count_);
2128
2129 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): "
2130 << "buffer_count=" << gsc_output_buffer_count_
2131 << ", width=" << frame_buffer_size_.width()
2132 << ", height=" << frame_buffer_size_.height();
2133 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2134 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_,
2135 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()),
2136 GL_TEXTURE_2D));
2137
2138 return true;
2139 }
2140
2141 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() {
2142 DVLOG(3) << "DestroyMfcInputBuffers()";
2143 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2144 DCHECK(!mfc_input_streamon_);
2145
2146 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2147 if (mfc_input_buffer_map_[i].address != NULL) {
2148 munmap(mfc_input_buffer_map_[i].address,
2149 mfc_input_buffer_map_[i].length);
2150 }
2151 }
2152
2153 struct v4l2_requestbuffers reqbufs;
2154 memset(&reqbufs, 0, sizeof(reqbufs));
2155 reqbufs.count = 0;
2156 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2157 reqbufs.memory = V4L2_MEMORY_MMAP;
2158 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2159 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2160
2161 mfc_input_buffer_map_.clear();
2162 mfc_free_input_buffers_.clear();
2163 mfc_input_buffer_count_ = 0;
2164 }
2165
2166 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() {
2167 DVLOG(3) << "DestroyMfcOutputBuffers()";
2168 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2169 DCHECK(!mfc_output_streamon_);
2170
2171 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2172 if (mfc_output_buffer_map_[i].address[0] != NULL)
2173 munmap(mfc_output_buffer_map_[i].address[0],
2174 mfc_output_buffer_map_[i].length[0]);
2175 if (mfc_output_buffer_map_[i].address[1] != NULL)
2176 munmap(mfc_output_buffer_map_[i].address[1],
2177 mfc_output_buffer_map_[i].length[1]);
2178 }
2179
2180 struct v4l2_requestbuffers reqbufs;
2181 memset(&reqbufs, 0, sizeof(reqbufs));
2182 reqbufs.count = 0;
2183 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2184 reqbufs.memory = V4L2_MEMORY_MMAP;
2185 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2186 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
2187
2188 mfc_output_buffer_map_.clear();
2189 mfc_free_output_buffers_.clear();
2190 mfc_output_buffer_count_ = 0;
2191 }
2192
2193 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() {
2194 DVLOG(3) << "DestroyGscInputBuffers()";
2195 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2196 DCHECK(!gsc_input_streamon_);
2197
2198 struct v4l2_requestbuffers reqbufs;
2199 memset(&reqbufs, 0, sizeof(reqbufs));
2200 reqbufs.count = 0;
2201 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2202 reqbufs.memory = V4L2_MEMORY_DMABUF;
2203 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2204 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2205
2206 gsc_input_buffer_map_.clear();
2207 gsc_free_input_buffers_.clear();
2208 gsc_input_buffer_count_ = 0;
2209 }
2210
2211 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() {
2212 DVLOG(3) << "DestroyGscOutputBuffers()";
2213 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2214 DCHECK(!gsc_output_streamon_);
2215
2216 if (gsc_output_buffer_map_.size() != 0) {
2217 if (!make_context_current_.Run())
2218 DLOG(ERROR) << "DestroyGscOutputBuffers(): "
2219 << "could not make context current";
2220
2221 size_t i = 0;
2222 do {
2223 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
2224 if (output_record.fd != -1)
2225 HANDLE_EINTR(close(output_record.fd));
2226 if (output_record.egl_image != EGL_NO_IMAGE_KHR)
2227 egl_destroy_image_khr(egl_display_, output_record.egl_image);
2228 if (output_record.egl_sync != EGL_NO_SYNC_KHR)
2229 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
2230 if (client_)
2231 client_->DismissPictureBuffer(output_record.picture_id);
2232 ++i;
2233 } while (i < gsc_output_buffer_map_.size());
2234 }
2235
2236 struct v4l2_requestbuffers reqbufs;
2237 memset(&reqbufs, 0, sizeof(reqbufs));
2238 reqbufs.count = 0;
2239 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2240 reqbufs.memory = V4L2_MEMORY_DMABUF;
2241 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2242 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2243
2244 gsc_output_buffer_map_.clear();
2245 gsc_free_output_buffers_.clear();
2246 gsc_output_buffer_count_ = 0;
2247 }
2248
2249 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/media/exynos_video_decode_accelerator.h ('k') | content/common/gpu/media/gpu_video_decode_accelerator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698