Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(160)

Side by Side Diff: content/common/gpu/media/exynos_video_decode_accelerator.cc

Issue 11198060: VDA implementation for Exynos, using V4L2 (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: Comment removal. Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
13
14 #include "base/bind.h"
15 #include "base/debug/trace_event.h"
16 #include "base/message_loop.h"
17 #include "base/message_loop_proxy.h"
18 #include "base/posix/eintr_wrapper.h"
19 #include "base/shared_memory.h"
20 #include "content/common/gpu/gl_scoped_binders.h"
21 #include "content/common/gpu/media/exynos_video_decode_accelerator.h"
22 #include "content/common/gpu/media/h264_parser.h"
23 #include "third_party/angle/include/GLES2/gl2.h"
24
25 namespace content {
26
27 #define NOTIFY_ERROR(x) \
28 do { \
29 SetDecoderState(kError); \
30 DLOG(ERROR) << "calling NotifyError(): " << x; \
31 NotifyError(x); \
32 } while (0)
33
34 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
35 do { \
36 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
37 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
38 NOTIFY_ERROR(PLATFORM_FAILURE); \
39 return; \
40 } \
41 } while (0)
42
43 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
44 do { \
45 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \
46 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
47 NOTIFY_ERROR(PLATFORM_FAILURE); \
48 return false; \
49 } \
50 } while (0)
51
52 #define POSTSANDBOX_DLSYM(lib, func, type, name) \
53 func = reinterpret_cast<type>(dlsym(lib, name)); \
54 if (func == NULL) { \
55 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " \
56 << name << ": " << dlerror(); \
57 return false; \
58 }
59
60 namespace {
61
62 const char kExynosMfcDevice[] = "/dev/mfc-dec";
63 const char kExynosGscDevice[] = "/dev/gsc1";
64 const char kMaliDriver[] = "libmali.so";
65
66 // TODO(sheu): fix OpenGL ES header includes, remove unnecessary redefinitions.
67 // http://crbug.com/169433
68 typedef void* GLeglImageOES;
69 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*,
70 void*);
71 typedef EGLImageKHR (*EglCreateImageKhrFunc)(EGLDisplay, EGLContext, EGLenum,
72 EGLClientBuffer, const EGLint*);
73 typedef EGLBoolean (*EglDestroyImageKhrFunc)(EGLDisplay, EGLImageKHR);
74 typedef EGLSyncKHR (*EglCreateSyncKhrFunc)(EGLDisplay, EGLenum, const EGLint*);
75 typedef EGLBoolean (*EglDestroySyncKhrFunc)(EGLDisplay, EGLSyncKHR);
76 typedef EGLint (*EglClientWaitSyncKhrFunc)(EGLDisplay, EGLSyncKHR, EGLint,
77 EGLTimeKHR);
78 typedef void (*GlEglImageTargetTexture2dOesFunc)(GLenum, GLeglImageOES);
79
80 void* libmali_handle = NULL;
81 MaliEglImageGetBufferExtPhandleFunc
82 mali_egl_image_get_buffer_ext_phandle = NULL;
83 EglCreateImageKhrFunc egl_create_image_khr = NULL;
84 EglDestroyImageKhrFunc egl_destroy_image_khr = NULL;
85 EglCreateSyncKhrFunc egl_create_sync_khr = NULL;
86 EglDestroySyncKhrFunc egl_destroy_sync_khr = NULL;
87 EglClientWaitSyncKhrFunc egl_client_wait_sync_khr = NULL;
88 GlEglImageTargetTexture2dOesFunc gl_egl_image_target_texture_2d_oes = NULL;
89
90 } // anonymous namespace
91
92 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef {
93 BitstreamBufferRef(
94 base::WeakPtr<Client>& client,
95 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
96 base::SharedMemory* shm,
97 size_t size,
98 int32 input_id);
99 ~BitstreamBufferRef();
100 const base::WeakPtr<Client> client;
101 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
102 const scoped_ptr<base::SharedMemory> shm;
103 const size_t size;
104 off_t bytes_used;
105 const int32 input_id;
106 };
107
108 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef {
109 PictureBufferArrayRef(EGLDisplay egl_display, size_t count);
110 ~PictureBufferArrayRef();
111
112 struct PictureBufferRef {
113 EGLImageKHR egl_image;
114 int egl_image_fd;
115 int32 client_id;
116 };
117
118 EGLDisplay const egl_display;
119 std::vector<PictureBufferRef> picture_buffers;
120 };
121
122 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef {
123 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
124 ~EGLSyncKHRRef();
125 EGLDisplay const egl_display;
126 EGLSyncKHR egl_sync;
127 };
128
129 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
130 base::WeakPtr<Client>& client,
131 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
132 base::SharedMemory* shm, size_t size, int32 input_id)
133 : client(client),
134 client_message_loop_proxy(client_message_loop_proxy),
135 shm(shm),
136 size(size),
137 bytes_used(0),
138 input_id(input_id) {
139 }
140
141 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
142 if (input_id >= 0) {
143 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
144 &Client::NotifyEndOfBitstreamBuffer, client, input_id));
145 }
146 }
147
148 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef(
149 EGLDisplay egl_display, size_t count)
150 : egl_display(egl_display),
151 picture_buffers(count) {
152 for (size_t i = 0; i < picture_buffers.size(); ++i) {
153 PictureBufferRef& buffer = picture_buffers[i];
154 buffer.egl_image = EGL_NO_IMAGE_KHR;
155 buffer.egl_image_fd = -1;
156 buffer.client_id = -1;
157 }
158 }
159
160 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() {
161 for (size_t i = 0; i < picture_buffers.size(); ++i) {
162 PictureBufferRef& buffer = picture_buffers[i];
163 if (buffer.egl_image != EGL_NO_IMAGE_KHR)
164 egl_destroy_image_khr(egl_display, buffer.egl_image);
165 if (buffer.egl_image_fd != -1)
166 HANDLE_EINTR(close(buffer.egl_image_fd));
167 }
168 }
169
170 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
171 EGLDisplay egl_display, EGLSyncKHR egl_sync)
172 : egl_display(egl_display),
173 egl_sync(egl_sync) {
174 }
175
176 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
177 if (egl_sync != EGL_NO_SYNC_KHR)
178 egl_destroy_sync_khr(egl_display, egl_sync);
179 }
180
181 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord()
182 : at_device(false),
183 address(NULL),
184 length(0),
185 bytes_used(0),
186 input_id(-1) {
187 }
188
189 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() {
190 }
191
192 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord()
193 : at_device(false),
194 input_id(-1) {
195 bytes_used[0] = 0;
196 bytes_used[1] = 0;
197 address[0] = NULL;
198 address[1] = NULL;
199 length[0] = 0;
200 length[1] = 0;
201 }
202
203 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() {
204 }
205
206 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord()
207 : at_device(false),
208 mfc_output(-1) {
209 }
210
211 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() {
212 }
213
214 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord()
215 : at_device(false),
216 at_client(false),
217 fd(-1),
218 egl_image(EGL_NO_IMAGE_KHR),
219 egl_sync(EGL_NO_SYNC_KHR),
220 picture_id(-1) {
221 }
222
223 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() {
224 }
225
226 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator(
227 EGLDisplay egl_display,
228 EGLContext egl_context,
229 Client* client,
230 const base::Callback<bool(void)>& make_context_current)
231 : child_message_loop_proxy_(base::MessageLoopProxy::current()),
232 weak_this_(base::AsWeakPtr(this)),
233 client_ptr_factory_(client),
234 client_(client_ptr_factory_.GetWeakPtr()),
235 decoder_thread_("ExynosDecoderThread"),
236 decoder_state_(kUninitialized),
237 decoder_current_bitstream_buffer_(NULL),
238 decoder_delay_bitstream_buffer_id_(-1),
239 decoder_current_input_buffer_(-1),
240 decoder_decode_buffer_tasks_scheduled_(0),
241 decoder_frames_at_client_(0),
242 decoder_flushing_(false),
243 mfc_fd_(-1),
244 mfc_input_streamon_(false),
245 mfc_input_buffer_count_(0),
246 mfc_input_buffer_queued_count_(0),
247 mfc_output_streamon_(false),
248 mfc_output_buffer_count_(0),
249 mfc_output_buffer_queued_count_(0),
250 mfc_output_buffer_pixelformat_(0),
251 gsc_fd_(-1),
252 gsc_input_streamon_(false),
253 gsc_input_buffer_count_(0),
254 gsc_input_buffer_queued_count_(0),
255 gsc_output_streamon_(false),
256 gsc_output_buffer_count_(0),
257 gsc_output_buffer_queued_count_(0),
258 device_poll_thread_("ExynosDevicePollThread"),
259 device_poll_interrupt_fd_(-1),
260 make_context_current_(make_context_current),
261 egl_display_(egl_display),
262 egl_context_(egl_context),
263 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
264 }
265
266 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() {
267 DCHECK(!decoder_thread_.IsRunning());
268 DCHECK(!device_poll_thread_.IsRunning());
269
270 if (device_poll_interrupt_fd_ != -1) {
271 HANDLE_EINTR(close(device_poll_interrupt_fd_));
272 device_poll_interrupt_fd_ = -1;
273 }
274 if (gsc_fd_ != -1) {
275 DestroyGscInputBuffers();
276 DestroyGscOutputBuffers();
277 HANDLE_EINTR(close(gsc_fd_));
278 gsc_fd_ = -1;
279 }
280 if (mfc_fd_ != -1) {
281 DestroyMfcInputBuffers();
282 DestroyMfcOutputBuffers();
283 HANDLE_EINTR(close(mfc_fd_));
284 mfc_fd_ = -1;
285 }
286
287 // These maps have members that should be manually destroyed, e.g. file
288 // descriptors, mmap() segments, etc.
289 DCHECK(mfc_input_buffer_map_.empty());
290 DCHECK(mfc_output_buffer_map_.empty());
291 DCHECK(gsc_input_buffer_map_.empty());
292 DCHECK(gsc_output_buffer_map_.empty());
293 }
294
295 bool ExynosVideoDecodeAccelerator::Initialize(
296 media::VideoCodecProfile profile) {
297 DVLOG(3) << "Initialize()";
298 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
299 DCHECK_EQ(decoder_state_, kUninitialized);
300
301 switch (profile) {
302 case media::H264PROFILE_BASELINE:
303 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
304 break;
305 case media::H264PROFILE_MAIN:
306 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
307 break;
308 case media::H264PROFILE_HIGH:
309 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
310 break;
311 case media::VP8PROFILE_MAIN:
312 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN";
313 break;
314 default:
315 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
316 return false;
317 };
318 video_profile_ = profile;
319
320 static bool sandbox_initialized = PostSandboxInitialization();
321 if (!sandbox_initialized) {
322 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed";
323 NOTIFY_ERROR(PLATFORM_FAILURE);
324 return false;
325 }
326
327 if (egl_display_ == EGL_NO_DISPLAY) {
328 DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
329 NOTIFY_ERROR(PLATFORM_FAILURE);
330 return false;
331 }
332
333 if (egl_context_ == EGL_NO_CONTEXT) {
334 DLOG(ERROR) << "Initialize(): could not get EGLContext";
335 NOTIFY_ERROR(PLATFORM_FAILURE);
336 return false;
337 }
338
339 // Open the video devices.
340 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice;
341 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice,
342 O_RDWR | O_NONBLOCK | O_CLOEXEC));
343 if (mfc_fd_ == -1) {
344 DPLOG(ERROR) << "Initialize(): could not open MFC device: "
345 << kExynosMfcDevice;
346 NOTIFY_ERROR(PLATFORM_FAILURE);
347 return false;
348 }
349 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice;
350 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice,
351 O_RDWR | O_NONBLOCK | O_CLOEXEC));
352 if (gsc_fd_ == -1) {
353 DPLOG(ERROR) << "Initialize(): could not open GSC device: "
354 << kExynosGscDevice;
355 NOTIFY_ERROR(PLATFORM_FAILURE);
356 return false;
357 }
358
359 // Create the interrupt fd.
360 DCHECK_EQ(device_poll_interrupt_fd_, -1);
361 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
362 if (device_poll_interrupt_fd_ == -1) {
363 DPLOG(ERROR) << "Initialize(): eventfd() failed";
364 NOTIFY_ERROR(PLATFORM_FAILURE);
365 return false;
366 }
367
368 // Capabilities check.
369 struct v4l2_capability caps;
370 const __u32 kCapsRequired =
371 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
372 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
373 V4L2_CAP_STREAMING;
374 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps);
375 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
376 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
377 ", caps check failed: 0x" << std::hex << caps.capabilities;
378 NOTIFY_ERROR(PLATFORM_FAILURE);
379 return false;
380 }
381 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps);
382 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
383 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
384 ", caps check failed: 0x" << std::hex << caps.capabilities;
385 NOTIFY_ERROR(PLATFORM_FAILURE);
386 return false;
387 }
388
389 // Some random ioctls that Exynos requires.
390 struct v4l2_control control;
391 memset(&control, 0, sizeof(control));
392 control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8
393 control.value = 8; // Magic number from Samsung folks.
394 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CTRL, &control);
395
396 if (!make_context_current_.Run()) {
397 DLOG(ERROR) << "Initialize(): could not make context current";
398 NOTIFY_ERROR(PLATFORM_FAILURE);
399 return false;
400 }
401
402 if (!CreateMfcInputBuffers())
403 return false;
404
405 // MFC output format has to be setup before streaming starts.
406 struct v4l2_format format;
407 memset(&format, 0, sizeof(format));
408 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
409 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16;
410 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
411
412 // Initialize format-specific bits.
413 if (video_profile_ >= media::H264PROFILE_MIN &&
414 video_profile_ <= media::H264PROFILE_MAX) {
415 decoder_h264_parser_.reset(new content::H264Parser());
416 }
417
418 if (!decoder_thread_.Start()) {
419 DLOG(ERROR) << "Initialize(): decoder thread failed to start";
420 NOTIFY_ERROR(PLATFORM_FAILURE);
421 return false;
422 }
423
424 SetDecoderState(kInitialized);
425
426 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
427 &Client::NotifyInitializeDone, client_));
428 return true;
429 }
430
431 void ExynosVideoDecodeAccelerator::Decode(
432 const media::BitstreamBuffer& bitstream_buffer) {
433 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
434 << ", size=" << bitstream_buffer.size();
435 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
436
437 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
438 client_, child_message_loop_proxy_,
439 new base::SharedMemory(bitstream_buffer.handle(), true),
440 bitstream_buffer.size(), bitstream_buffer.id()));
441 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
442 DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
443 NOTIFY_ERROR(UNREADABLE_INPUT);
444 return;
445 }
446 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory();
447
448 // DecodeTask() will take care of running a DecodeBufferTask().
449 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
450 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this),
451 base::Passed(&bitstream_record)));
452 }
453
454 void ExynosVideoDecodeAccelerator::AssignPictureBuffers(
455 const std::vector<media::PictureBuffer>& buffers) {
456 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
457 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
458
459 if (static_cast<int>(buffers.size()) != gsc_output_buffer_count_) {
460 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer_count";
461 NOTIFY_ERROR(INVALID_ARGUMENT);
462 return;
463 }
464
465 if (!make_context_current_.Run()) {
466 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
467 NOTIFY_ERROR(PLATFORM_FAILURE);
468 return;
469 }
470
471 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref(
472 new PictureBufferArrayRef(egl_display_, buffers.size()));
473
474 const static EGLint kImageAttrs[] = {
475 EGL_IMAGE_PRESERVED_KHR, 0,
476 EGL_NONE,
477 };
478 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay();
479 ScopedTextureBinder bind_restore(0);
480 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) {
481 PictureBufferArrayRef::PictureBufferRef& buffer =
482 pic_buffers_ref->picture_buffers[i];
483 // Create the X pixmap and then create an EGLImageKHR from it, so we can
484 // get dma_buf backing.
485 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0),
486 buffers[i].size().width(), buffers[i].size().height(), 32);
487 if (!pixmap) {
488 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap";
489 NOTIFY_ERROR(PLATFORM_FAILURE);
490 return;
491 }
492 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id());
493 EGLImageKHR egl_image = egl_create_image_khr(
494 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR,
495 (EGLClientBuffer)pixmap, kImageAttrs);
496 // We can free the X pixmap immediately -- according to the
497 // EGL_KHR_image_base spec, the backing storage does not go away until the
498 // last referencing EGLImage is destroyed.
499 XFreePixmap(x_display, pixmap);
500 if (egl_image == EGL_NO_IMAGE_KHR) {
501 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
502 NOTIFY_ERROR(PLATFORM_FAILURE);
503 return;
504 }
505 buffer.egl_image = egl_image;
506 int fd;
507 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) {
508 DLOG(ERROR) << "AssignPictureBuffers(): "
509 << "could not get EGLImageKHR dmabuf fd";
510 NOTIFY_ERROR(PLATFORM_FAILURE);
511 return;
512 }
513 buffer.egl_image_fd = fd;
514 gl_egl_image_target_texture_2d_oes(GL_TEXTURE_2D, egl_image);
515 buffer.client_id = buffers[i].id();
516 }
517 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
518 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask,
519 base::Unretained(this), base::Passed(&pic_buffers_ref)));
520 }
521
522 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
523 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
524 // Must be run on child thread, as we'll insert a sync in the EGL context.
525 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
526
527 if (!make_context_current_.Run()) {
528 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
529 NOTIFY_ERROR(PLATFORM_FAILURE);
530 return;
531 }
532
533 EGLSyncKHR egl_sync =
534 egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
535 if (egl_sync == EGL_NO_SYNC_KHR) {
536 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
537 NOTIFY_ERROR(PLATFORM_FAILURE);
538 return;
539 }
540
541 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
542 egl_display_, egl_sync));
543 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
544 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask,
545 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
546 }
547
548 void ExynosVideoDecodeAccelerator::Flush() {
549 DVLOG(3) << "Flush()";
550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
551 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
552 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this)));
553 }
554
555 void ExynosVideoDecodeAccelerator::Reset() {
556 DVLOG(3) << "Reset()";
557 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
558 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
559 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this)));
560 }
561
562 void ExynosVideoDecodeAccelerator::Destroy() {
563 DVLOG(3) << "Destroy()";
564 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
565
566 // We're destroying; cancel all callbacks.
567 client_ptr_factory_.InvalidateWeakPtrs();
568
569 // If the decoder thread is running, destroy using posted task.
570 if (decoder_thread_.IsRunning()) {
571 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
572 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
573 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
574 decoder_thread_.Stop();
575 } else {
576 // Otherwise, call the destroy task directly.
577 DestroyTask();
578 }
579
580 // Set to kError state just in case.
581 SetDecoderState(kError);
582
583 delete this;
584 }
585
586 // static
587 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() {
588 DVLOG(3) << "PreSandboxInitialization()";
589 dlerror();
590
591 libmali_handle = dlopen(kMaliDriver, RTLD_LAZY | RTLD_LOCAL);
592 if (libmali_handle == NULL) {
593 DPLOG(ERROR) << "failed to dlopen() " << kMaliDriver << ": " << dlerror();
594 }
595 }
596
597 // static
598 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() {
599 DVLOG(3) << "PostSandboxInitialization()";
600 if (libmali_handle == NULL) {
601 DLOG(ERROR) << "PostSandboxInitialization(): no " << kMaliDriver
602 << " driver handle";
603 return false;
604 }
605
606 dlerror();
607
608 POSTSANDBOX_DLSYM(libmali_handle,
609 mali_egl_image_get_buffer_ext_phandle,
610 MaliEglImageGetBufferExtPhandleFunc,
611 "mali_egl_image_get_buffer_ext_phandle");
612
613 POSTSANDBOX_DLSYM(libmali_handle,
614 egl_create_image_khr,
615 EglCreateImageKhrFunc,
616 "eglCreateImageKHR");
617
618 POSTSANDBOX_DLSYM(libmali_handle,
619 egl_destroy_image_khr,
620 EglDestroyImageKhrFunc,
621 "eglDestroyImageKHR");
622
623 POSTSANDBOX_DLSYM(libmali_handle,
624 egl_create_sync_khr,
625 EglCreateSyncKhrFunc,
626 "eglCreateSyncKHR");
627
628 POSTSANDBOX_DLSYM(libmali_handle,
629 egl_destroy_sync_khr,
630 EglDestroySyncKhrFunc,
631 "eglDestroySyncKHR");
632
633 POSTSANDBOX_DLSYM(libmali_handle,
634 egl_client_wait_sync_khr,
635 EglClientWaitSyncKhrFunc,
636 "eglClientWaitSyncKHR");
637
638 POSTSANDBOX_DLSYM(libmali_handle,
639 gl_egl_image_target_texture_2d_oes,
640 GlEglImageTargetTexture2dOesFunc,
641 "glEGLImageTargetTexture2DOES");
642
643 return true;
644 }
645
646 void ExynosVideoDecodeAccelerator::DecodeTask(
647 scoped_ptr<BitstreamBufferRef> bitstream_record) {
648 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id;
649 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
650 DCHECK_NE(decoder_state_, kUninitialized);
651 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id",
652 bitstream_record->input_id);
653
654 if (decoder_state_ == kResetting || decoder_flushing_) {
655 // In the case that we're resetting or flushing, we need to delay decoding
656 // the BitstreamBuffers that come after the Reset() or Flush() call. When
657 // we're here, we know that this DecodeTask() was scheduled by a Decode()
658 // call that came after (in the client thread) the Reset() or Flush() call;
659 // thus set up the delay if necessary.
660 if (decoder_delay_bitstream_buffer_id_ == -1)
661 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
662 } else if (decoder_state_ == kError) {
663 DVLOG(2) << "DecodeTask(): early out: kError state";
664 return;
665 }
666
667 decoder_input_queue_.push_back(
668 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
669 decoder_decode_buffer_tasks_scheduled_++;
670 DecodeBufferTask();
671 }
672
673 void ExynosVideoDecodeAccelerator::DecodeBufferTask() {
674 DVLOG(3) << "DecodeBufferTask()";
675 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
676 DCHECK_NE(decoder_state_, kUninitialized);
677 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask");
678
679 decoder_decode_buffer_tasks_scheduled_--;
680
681 if (decoder_state_ == kResetting) {
682 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
683 return;
684 } else if (decoder_state_ == kError) {
685 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
686 return;
687 }
688
689 if (decoder_current_bitstream_buffer_ == NULL) {
690 if (decoder_input_queue_.empty()) {
691 // We're waiting for a new buffer -- exit without scheduling a new task.
692 return;
693 }
694 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
695 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
696 // We're asked to delay decoding on this and subsequent buffers.
697 return;
698 }
699
700 // Setup to use the next buffer.
701 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
702 decoder_input_queue_.pop_front();
703 DVLOG(3) << "DecodeBufferTask(): reading input_id="
704 << decoder_current_bitstream_buffer_->input_id
705 << ", addr=" << decoder_current_bitstream_buffer_->shm->memory()
706 << ", size=" << decoder_current_bitstream_buffer_->size;
707 }
708 bool schedule_task = false;
709 const size_t size = decoder_current_bitstream_buffer_->size;
710 size_t decoded_size = 0;
711 if (size == 0) {
712 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
713 if (input_id >= 0) {
714 // This is a buffer queued from the client that has zero size. Skip.
715 schedule_task = true;
716 } else {
717 // This is a buffer of zero size, queued to flush the pipe. Flush.
718 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
719 static_cast<base::SharedMemory*>(NULL));
720 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
721 // current input, enqueue no data to the next frame, then flush that down.
722 schedule_task = true;
723 if (decoder_current_input_buffer_ != -1 &&
724 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id !=
725 kFlushBufferId)
726 schedule_task = FlushInputFrame();
727
728 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
729 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
730 schedule_task = true;
731 } else {
732 // If we failed to enqueue the empty buffer (due to pipeline
733 // backpressure), don't advance the bitstream buffer queue, and don't
734 // schedule the next task. This bitstream buffer queue entry will get
735 // reprocessed when the pipeline frees up.
736 schedule_task = false;
737 }
738 }
739 } else {
740 // This is a buffer queued from the client, with actual contents. Decode.
741 const uint8* const data =
742 reinterpret_cast<const uint8*>(
743 decoder_current_bitstream_buffer_->shm->memory()) +
744 decoder_current_bitstream_buffer_->bytes_used;
745 const size_t data_size =
746 decoder_current_bitstream_buffer_->size -
747 decoder_current_bitstream_buffer_->bytes_used;
748 if (!FindFrameFragment(data, data_size, &decoded_size)) {
749 NOTIFY_ERROR(UNREADABLE_INPUT);
750 return;
751 }
752 DCHECK_LE(decoded_size, data_size);
piman 2013/01/15 05:11:29 nit: make it a real CHECK and I'll take it.
sheu 2013/01/15 05:26:31 Done.
753
754 switch (decoder_state_) {
755 case kInitialized:
756 case kAfterReset:
757 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
758 break;
759 case kDecoding:
760 schedule_task = DecodeBufferContinue(data, decoded_size);
761 break;
762 default:
763 NOTIFY_ERROR(ILLEGAL_STATE);
764 return;
765 }
766 }
767 if (decoder_state_ == kError) {
768 // Failed during decode.
769 return;
770 }
771
772 if (schedule_task) {
773 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
774 if (decoder_current_bitstream_buffer_->bytes_used ==
775 decoder_current_bitstream_buffer_->size) {
776 // Our current bitstream buffer is done; return it.
777 int32 input_id = decoder_current_bitstream_buffer_->input_id;
778 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
779 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
780 decoder_current_bitstream_buffer_.reset();
781 }
782 ScheduleDecodeBufferTaskIfNeeded();
783 }
784 }
785
786 bool ExynosVideoDecodeAccelerator::FindFrameFragment(
787 const uint8* data,
788 size_t size,
789 size_t* endpos) {
790 if (video_profile_ >= media::H264PROFILE_MIN &&
791 video_profile_ <= media::H264PROFILE_MAX) {
792 // For H264, we need to feed HW one frame at a time. This is going to take
793 // some parsing of our input stream.
794 decoder_h264_parser_->SetStream(data, size);
795 content::H264NALU nalu;
796 content::H264Parser::Result result;
797
798 // Find the first NAL.
799 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
800 if (result == content::H264Parser::kInvalidStream ||
801 result == content::H264Parser::kUnsupportedStream)
802 return false;
803 *endpos = (nalu.data + nalu.size) - data;
804 if (result == content::H264Parser::kEOStream)
805 return true;
806
807 // Keep on peeking the next NALs while they don't indicate a frame
808 // boundary.
809 for (;;) {
810 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
811 if (result == content::H264Parser::kInvalidStream ||
812 result == content::H264Parser::kUnsupportedStream)
813 return false;
814 if (result == content::H264Parser::kEOStream)
815 return true;
816 switch (nalu.nal_unit_type) {
817 case content::H264NALU::kNonIDRSlice:
818 case content::H264NALU::kIDRSlice:
819 if (nalu.size < 1)
820 return false;
821 // For these two, if the "first_mb_in_slice" field is zero, start a
822 // new frame and return. This field is Exp-Golomb coded starting on
823 // the eighth data bit of the NAL; a zero value is encoded with a
824 // leading '1' bit in the byte, which we can detect as the byte being
825 // (unsigned) greater than or equal to 0x80.
826 if (nalu.data[1] >= 0x80)
827 return true;
828 break;
829 case content::H264NALU::kSPS:
830 case content::H264NALU::kPPS:
831 case content::H264NALU::kEOSeq:
832 case content::H264NALU::kEOStream:
833 // These unconditionally signal a frame boundary.
834 return true;
835 default:
836 // For all others, keep going.
837 break;
838 }
839 *endpos = (nalu.data + nalu.size) - reinterpret_cast<const uint8*>(data);
piman 2013/01/15 05:11:29 nit: actually data is a const uint8* already so no
sheu 2013/01/15 05:26:31 Done.
840 }
841 NOTREACHED();
842 return false;
843 } else {
844 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
845 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
846 // For VP8, we can just dump the entire buffer. No fragmentation needed.
847 *endpos = size;
848 return true;
849 }
850 }
851
852 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
853 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
854
855 // If we're behind on tasks, schedule another one.
856 int buffers_to_decode = decoder_input_queue_.size();
857 if (decoder_current_bitstream_buffer_ != NULL)
858 buffers_to_decode++;
859 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
860 decoder_decode_buffer_tasks_scheduled_++;
861 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
862 &ExynosVideoDecodeAccelerator::DecodeBufferTask,
863 base::Unretained(this)));
864 }
865 }
866
867 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial(
868 const void* data, size_t size, size_t* endpos) {
869 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
870 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
871 DCHECK_NE(decoder_state_, kUninitialized);
872 DCHECK_NE(decoder_state_, kDecoding);
873 DCHECK(!device_poll_thread_.IsRunning());
874 // Initial decode. We haven't been able to get output stream format info yet.
875 // Get it, and start decoding.
876
877 // Copy in and send to HW.
878 if (!AppendToInputFrame(data, size) || !FlushInputFrame())
879 return false;
880
881 // Recycle buffers.
882 DequeueMfc();
883
884 // Check and see if we have format info yet.
885 struct v4l2_format format;
886 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
887 if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) {
888 if (errno == EINVAL) {
889 // We will get EINVAL if we haven't seen sufficient stream to decode the
890 // format. Return true and schedule the next buffer.
891 *endpos = size;
892 return true;
893 } else {
894 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT";
895 NOTIFY_ERROR(PLATFORM_FAILURE);
896 return false;
897 }
898 }
899
900 // Run this initialization only on first startup.
901 if (decoder_state_ == kInitialized) {
902 DVLOG(3) << "DecodeBufferInitial(): running one-time initialization";
903 // Success! Setup our parameters.
904 CHECK_EQ(format.fmt.pix_mp.num_planes, 2);
905 frame_buffer_size_.SetSize(
906 format.fmt.pix_mp.width, format.fmt.pix_mp.height);
907 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage;
908 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage;
909 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
910 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16);
911
912 // Create our other buffers.
913 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() ||
914 !CreateGscOutputBuffers())
915 return false;
916
917 // MFC expects to process the initial buffer once during stream init to
918 // configure stream parameters, but will not consume the steam data on that
919 // iteration. Subsequent iterations (including after reset) do not require
920 // the stream init step.
921 *endpos = 0;
922 } else {
923 *endpos = size;
924 }
925
926 // StartDevicePoll will raise the error if there is one.
927 if (!StartDevicePoll())
928 return false;
929
930 decoder_state_ = kDecoding;
931 ScheduleDecodeBufferTaskIfNeeded();
932 return true;
933 }
934
935 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue(
936 const void* data, size_t size) {
937 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
938 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
939 DCHECK_EQ(decoder_state_, kDecoding);
940
941 // Both of these calls will set kError state if they fail.
942 return (AppendToInputFrame(data, size) && FlushInputFrame());
943 }
944
945 bool ExynosVideoDecodeAccelerator::AppendToInputFrame(
946 const void* data, size_t size) {
947 DVLOG(3) << "AppendToInputFrame()";
948 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
949 DCHECK_NE(decoder_state_, kUninitialized);
950 DCHECK_NE(decoder_state_, kResetting);
951 DCHECK_NE(decoder_state_, kError);
952 // This routine can handle data == NULL and size == 0, which occurs when
953 // we queue an empty buffer for the purposes of flushing the pipe.
954
955 // Flush if we're too big
956 if (decoder_current_input_buffer_ != -1) {
957 MfcInputRecord& input_record =
958 mfc_input_buffer_map_[decoder_current_input_buffer_];
959 if (input_record.bytes_used + size > input_record.length) {
960 if (!FlushInputFrame())
961 return false;
962 decoder_current_input_buffer_ = -1;
963 }
964 }
965
966 // Try to get an available input buffer
967 if (decoder_current_input_buffer_ == -1) {
968 if (mfc_free_input_buffers_.empty()) {
969 // See if we can get more free buffers from HW
970 DequeueMfc();
971 if (mfc_free_input_buffers_.empty()) {
972 // Nope!
973 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
974 return false;
975 }
976 }
977 decoder_current_input_buffer_ = mfc_free_input_buffers_.back();
978 mfc_free_input_buffers_.pop_back();
979 MfcInputRecord& input_record =
980 mfc_input_buffer_map_[decoder_current_input_buffer_];
981 DCHECK_EQ(input_record.bytes_used, 0);
982 DCHECK_EQ(input_record.input_id, -1);
983 DCHECK(decoder_current_bitstream_buffer_ != NULL);
984 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
985 }
986
987 DCHECK_EQ(data == NULL, size == 0);
988 if (size == 0) {
989 // If we asked for an empty buffer, return now. We return only after
990 // getting the next input buffer, since we might actually want an empty
991 // input buffer for flushing purposes.
992 return true;
993 }
994
995 // Copy in to the buffer.
996 MfcInputRecord& input_record =
997 mfc_input_buffer_map_[decoder_current_input_buffer_];
998 if (size > input_record.length - input_record.bytes_used) {
999 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
1000 NOTIFY_ERROR(UNREADABLE_INPUT);
1001 return false;
1002 }
1003 memcpy(
1004 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
1005 data,
1006 size);
1007 input_record.bytes_used += size;
1008
1009 return true;
1010 }
1011
1012 bool ExynosVideoDecodeAccelerator::FlushInputFrame() {
1013 DVLOG(3) << "FlushInputFrame()";
1014 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1015 DCHECK_NE(decoder_state_, kUninitialized);
1016 DCHECK_NE(decoder_state_, kResetting);
1017 DCHECK_NE(decoder_state_, kError);
1018
1019 if (decoder_current_input_buffer_ == -1)
1020 return true;
1021
1022 MfcInputRecord& input_record =
1023 mfc_input_buffer_map_[decoder_current_input_buffer_];
1024 DCHECK_NE(input_record.input_id, -1);
1025 DCHECK_EQ(input_record.input_id == kFlushBufferId,
1026 input_record.bytes_used == 0);
1027 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
1028 // got from the client. We can skip it if it is empty.
1029 // * if input_id < 0 (should be kFlushBufferId in this case), this input
1030 // buffer was prompted by a flush buffer, and should be queued even when
1031 // empty.
1032 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1033 input_record.input_id = -1;
1034 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_);
1035 decoder_current_input_buffer_ = -1;
1036 return true;
1037 }
1038
1039 // Queue it to MFC.
1040 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_);
1041 decoder_current_input_buffer_ = -1;
1042 DVLOG(3) << "FlushInputFrame(): submitting input_id="
1043 << input_record.input_id;
1044 // Kick the MFC once since there's new available input for it.
1045 EnqueueMfc();
1046
1047 return (decoder_state_ != kError);
1048 }
1049
1050 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask(
1051 scoped_ptr<PictureBufferArrayRef> pic_buffers) {
1052 DVLOG(3) << "AssignPictureBuffersTask()";
1053 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1054 DCHECK_NE(decoder_state_, kUninitialized);
1055 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask");
1056
1057 // We run AssignPictureBuffersTask even if we're in kResetting.
1058 if (decoder_state_ == kError) {
1059 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state";
1060 return;
1061 }
1062
1063 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size());
1064 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1065 // We should be blank right now.
1066 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
1067 DCHECK_EQ(output_record.fd, -1);
1068 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1069 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1070 DCHECK_EQ(output_record.picture_id, -1);
1071 PictureBufferArrayRef::PictureBufferRef& buffer =
1072 pic_buffers->picture_buffers[i];
1073 output_record.fd = buffer.egl_image_fd;
1074 output_record.egl_image = buffer.egl_image;
1075 output_record.picture_id = buffer.client_id;
1076
1077 // Take ownership of the EGLImage and fd.
1078 buffer.egl_image = EGL_NO_IMAGE_KHR;
1079 buffer.egl_image_fd = -1;
1080 // And add this buffer to the free list.
1081 gsc_free_output_buffers_.push_back(i);
1082 }
1083
1084 // We got buffers! Kick the GSC.
1085 EnqueueGsc();
1086 }
1087
1088 void ExynosVideoDecodeAccelerator::ServiceDeviceTask() {
1089 DVLOG(3) << "ServiceDeviceTask()";
1090 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1091 DCHECK_NE(decoder_state_, kUninitialized);
1092 DCHECK_NE(decoder_state_, kInitialized);
1093 DCHECK_NE(decoder_state_, kAfterReset);
1094 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask");
1095
1096 if (decoder_state_ == kResetting) {
1097 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
1098 return;
1099 } else if (decoder_state_ == kError) {
1100 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
1101 return;
1102 }
1103
1104 DequeueMfc();
1105 DequeueGsc();
1106 EnqueueMfc();
1107 EnqueueGsc();
1108
1109 // Clear the interrupt fd.
1110 if (!ClearDevicePollInterrupt())
1111 return;
1112
1113 unsigned int poll_fds = 0;
1114 // Add MFC fd, if we should poll on it.
1115 // MFC can be polled as soon as either input or output buffers are queued.
1116 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
1117 poll_fds |= kPollMfc;
1118 // Add GSC fd, if we should poll on it.
1119 // GSC has to wait until both input and output buffers are queued.
1120 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
1121 poll_fds |= kPollGsc;
1122
1123 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
1124 // so either:
1125 // * device_poll_thread_ is running normally
1126 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1127 // shut it down, in which case we're either in kResetting or kError states
1128 // respectively, and we should have early-outed already.
1129 DCHECK(device_poll_thread_.message_loop());
1130 // Queue the DevicePollTask() now.
1131 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1132 &ExynosVideoDecodeAccelerator::DevicePollTask,
1133 base::Unretained(this),
1134 poll_fds));
1135
1136 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
1137 << decoder_input_queue_.size() << "->"
1138 << mfc_input_ready_queue_.size() << "] => MFC["
1139 << mfc_free_input_buffers_.size() << "+"
1140 << mfc_input_buffer_queued_count_ << "/"
1141 << mfc_input_buffer_count_ << "->"
1142 << mfc_free_output_buffers_.size() << "+"
1143 << mfc_output_buffer_queued_count_ << "/"
1144 << mfc_output_buffer_count_ << "] => "
1145 << mfc_output_gsc_input_queue_.size() << " => GSC["
1146 << gsc_free_input_buffers_.size() << "+"
1147 << gsc_input_buffer_queued_count_ << "/"
1148 << gsc_input_buffer_count_ << "->"
1149 << gsc_free_output_buffers_.size() << "+"
1150 << gsc_output_buffer_queued_count_ << "/"
1151 << gsc_output_buffer_count_ << "] => VDA["
1152 << decoder_frames_at_client_ << "]";
1153
1154 ScheduleDecodeBufferTaskIfNeeded();
1155 }
1156
1157 void ExynosVideoDecodeAccelerator::EnqueueMfc() {
1158 DVLOG(3) << "EnqueueMfc()";
1159 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1160 DCHECK_NE(decoder_state_, kUninitialized);
1161 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc");
1162
1163 // Drain the pipe of completed decode buffers.
1164 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
1165 while (!mfc_input_ready_queue_.empty()) {
1166 if (!EnqueueMfcInputRecord())
1167 return;
1168 }
1169 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
1170 // We just started up a previously empty queue.
1171 // Queue state changed; signal interrupt.
1172 if (!SetDevicePollInterrupt())
1173 return;
1174 // Start VIDIOC_STREAMON if we haven't yet.
1175 if (!mfc_input_streamon_) {
1176 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1177 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1178 mfc_input_streamon_ = true;
1179 }
1180 }
1181
1182 // Enqueue all the MFC outputs we can.
1183 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
1184 while (!mfc_free_output_buffers_.empty()) {
1185 if (!EnqueueMfcOutputRecord())
1186 return;
1187 }
1188 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
1189 // We just started up a previously empty queue.
1190 // Queue state changed; signal interrupt.
1191 if (!SetDevicePollInterrupt())
1192 return;
1193 // Start VIDIOC_STREAMON if we haven't yet.
1194 if (!mfc_output_streamon_) {
1195 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1196 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1197 mfc_output_streamon_ = true;
1198 }
1199 }
1200 }
1201
1202 void ExynosVideoDecodeAccelerator::DequeueMfc() {
1203 DVLOG(3) << "DequeueMfc()";
1204 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1205 DCHECK_NE(decoder_state_, kUninitialized);
1206 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc");
1207
1208 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
1209 // list.
1210 struct v4l2_buffer dqbuf;
1211 struct v4l2_plane planes[2];
1212 while (mfc_input_buffer_queued_count_ > 0) {
1213 DCHECK(mfc_input_streamon_);
1214 memset(&dqbuf, 0, sizeof(dqbuf));
1215 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1216 dqbuf.memory = V4L2_MEMORY_MMAP;
1217 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1218 if (errno == EAGAIN) {
1219 // EAGAIN if we're just out of buffers to dequeue.
1220 break;
1221 }
1222 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1223 NOTIFY_ERROR(PLATFORM_FAILURE);
1224 return;
1225 }
1226 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
1227 DCHECK(input_record.at_device);
1228 mfc_free_input_buffers_.push_back(dqbuf.index);
1229 input_record.at_device = false;
1230 input_record.bytes_used = 0;
1231 input_record.input_id = -1;
1232 mfc_input_buffer_queued_count_--;
1233 }
1234
1235 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
1236 // completed queue.
1237 while (mfc_output_buffer_queued_count_ > 0) {
1238 DCHECK(mfc_output_streamon_);
1239 memset(&dqbuf, 0, sizeof(dqbuf));
1240 memset(planes, 0, sizeof(planes));
1241 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1242 dqbuf.memory = V4L2_MEMORY_MMAP;
1243 dqbuf.m.planes = planes;
1244 dqbuf.length = 2;
1245 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1246 if (errno == EAGAIN) {
1247 // EAGAIN if we're just out of buffers to dequeue.
1248 break;
1249 }
1250 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1251 NOTIFY_ERROR(PLATFORM_FAILURE);
1252 return;
1253 }
1254 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
1255 DCHECK(output_record.at_device);
1256 output_record.at_device = false;
1257 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
1258 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
1259 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) {
1260 // This is an empty output buffer returned as part of a flush.
1261 mfc_free_output_buffers_.push_back(dqbuf.index);
1262 output_record.input_id = -1;
1263 } else {
1264 // This is an output buffer with contents to pass down the pipe.
1265 mfc_output_gsc_input_queue_.push_back(dqbuf.index);
1266 output_record.input_id = dqbuf.timestamp.tv_sec;
1267 DCHECK(output_record.input_id >= 0);
1268 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id;
1269 // We don't count this output buffer dequeued yet, or add it to the free
1270 // list, as it has data GSC needs to process.
1271
1272 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe.
1273 SetDevicePollInterrupt();
1274 }
1275 mfc_output_buffer_queued_count_--;
1276 }
1277
1278 NotifyFlushDoneIfNeeded();
1279 }
1280
1281 void ExynosVideoDecodeAccelerator::EnqueueGsc() {
1282 DVLOG(3) << "EnqueueGsc()";
1283 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1284 DCHECK_NE(decoder_state_, kUninitialized);
1285 DCHECK_NE(decoder_state_, kInitialized);
1286 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc");
1287
1288 // Drain the pipe of completed MFC output buffers.
1289 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
1290 while (!mfc_output_gsc_input_queue_.empty() &&
1291 !gsc_free_input_buffers_.empty()) {
1292 if (!EnqueueGscInputRecord())
1293 return;
1294 }
1295 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
1296 // We just started up a previously empty queue.
1297 // Queue state changed; signal interrupt.
1298 if (!SetDevicePollInterrupt())
1299 return;
1300 // Start VIDIOC_STREAMON if we haven't yet.
1301 if (!gsc_input_streamon_) {
1302 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1303 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1304 gsc_input_streamon_ = true;
1305 }
1306 }
1307
1308 // Enqueue a GSC output, only if we need one
1309 if (gsc_input_buffer_queued_count_ != 0 &&
1310 gsc_output_buffer_queued_count_ == 0 &&
1311 !gsc_free_output_buffers_.empty()) {
1312 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
1313 if (!EnqueueGscOutputRecord())
1314 return;
1315 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
1316 // We just started up a previously empty queue.
1317 // Queue state changed; signal interrupt.
1318 if (!SetDevicePollInterrupt())
1319 return;
1320 // Start VIDIOC_STREAMON if we haven't yet.
1321 if (!gsc_output_streamon_) {
1322 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1323 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1324 gsc_output_streamon_ = true;
1325 }
1326 }
1327 }
1328 // Bug check: GSC is liable to race conditions if more than one buffer is
1329 // simultaneously queued.
1330 DCHECK_GE(1, gsc_output_buffer_queued_count_);
1331 }
1332
1333 void ExynosVideoDecodeAccelerator::DequeueGsc() {
1334 DVLOG(3) << "DequeueGsc()";
1335 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1336 DCHECK_NE(decoder_state_, kUninitialized);
1337 DCHECK_NE(decoder_state_, kInitialized);
1338 DCHECK_NE(decoder_state_, kAfterReset);
1339 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc");
1340
1341 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
1342 // list. Also recycle the corresponding MFC output buffers at this time.
1343 struct v4l2_buffer dqbuf;
1344 while (gsc_input_buffer_queued_count_ > 0) {
1345 DCHECK(gsc_input_streamon_);
1346 memset(&dqbuf, 0, sizeof(dqbuf));
1347 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1348 dqbuf.memory = V4L2_MEMORY_DMABUF;
1349 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1350 if (errno == EAGAIN) {
1351 // EAGAIN if we're just out of buffers to dequeue.
1352 break;
1353 }
1354 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1355 NOTIFY_ERROR(PLATFORM_FAILURE);
1356 return;
1357 }
1358 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
1359 MfcOutputRecord& output_record =
1360 mfc_output_buffer_map_[input_record.mfc_output];
1361 DCHECK(input_record.at_device);
1362 gsc_free_input_buffers_.push_back(dqbuf.index);
1363 mfc_free_output_buffers_.push_back(input_record.mfc_output);
1364 input_record.at_device = false;
1365 input_record.mfc_output = -1;
1366 output_record.input_id = -1;
1367 gsc_input_buffer_queued_count_--;
1368 }
1369
1370 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to
1371 // the client. Don't recycle to its free list yet -- we can't do that until
1372 // ReusePictureBuffer() returns it to us.
1373 while (gsc_output_buffer_queued_count_ > 0) {
1374 DCHECK(gsc_output_streamon_);
1375 memset(&dqbuf, 0, sizeof(dqbuf));
1376 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1377 dqbuf.memory = V4L2_MEMORY_DMABUF;
1378 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1379 if (errno == EAGAIN) {
1380 // EAGAIN if we're just out of buffers to dequeue.
1381 break;
1382 }
1383 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1384 NOTIFY_ERROR(PLATFORM_FAILURE);
1385 return;
1386 }
1387 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
1388 DCHECK(output_record.at_device);
1389 DCHECK(!output_record.at_client);
1390 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1391 output_record.at_device = false;
1392 output_record.at_client = true;
1393 gsc_output_buffer_queued_count_--;
1394 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1395 &Client::PictureReady, client_, media::Picture(
1396 output_record.picture_id, dqbuf.timestamp.tv_sec)));
1397 decoder_frames_at_client_++;
1398 }
1399
1400 NotifyFlushDoneIfNeeded();
1401 }
1402
1403 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() {
1404 DVLOG(3) << "EnqueueMfcInputRecord()";
1405 DCHECK(!mfc_input_ready_queue_.empty());
1406
1407 // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
1408 const int buffer = mfc_input_ready_queue_.back();
1409 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer];
1410 DCHECK(!input_record.at_device);
1411 struct v4l2_buffer qbuf;
1412 struct v4l2_plane qbuf_plane;
1413 memset(&qbuf, 0, sizeof(qbuf));
1414 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1415 qbuf.index = buffer;
1416 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1417 qbuf.timestamp.tv_sec = input_record.input_id;
1418 qbuf.memory = V4L2_MEMORY_MMAP;
1419 qbuf.m.planes = &qbuf_plane;
1420 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1421 qbuf.length = 1;
1422 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1423 mfc_input_ready_queue_.pop_back();
1424 input_record.at_device = true;
1425 mfc_input_buffer_queued_count_++;
1426 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id="
1427 << input_record.input_id;
1428 return true;
1429 }
1430
1431 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() {
1432 DVLOG(3) << "EnqueueMfcOutputRecord()";
1433 DCHECK(!mfc_free_output_buffers_.empty());
1434
1435 // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
1436 const int buffer = mfc_free_output_buffers_.back();
1437 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer];
1438 DCHECK(!output_record.at_device);
1439 DCHECK_EQ(output_record.input_id, -1);
1440 struct v4l2_buffer qbuf;
1441 struct v4l2_plane qbuf_planes[2];
1442 memset(&qbuf, 0, sizeof(qbuf));
1443 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1444 qbuf.index = buffer;
1445 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1446 qbuf.memory = V4L2_MEMORY_MMAP;
1447 qbuf.m.planes = qbuf_planes;
1448 qbuf.length = 2;
1449 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1450 mfc_free_output_buffers_.pop_back();
1451 output_record.at_device = true;
1452 mfc_output_buffer_queued_count_++;
1453 return true;
1454 }
1455
1456 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() {
1457 DVLOG(3) << "EnqueueGscInputRecord()";
1458 DCHECK(!gsc_free_input_buffers_.empty());
1459
1460 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output
1461 // (VIDEO_CAPTURE) buffer.
1462 const int mfc_buffer = mfc_output_gsc_input_queue_.front();
1463 const int gsc_buffer = gsc_free_input_buffers_.back();
1464 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
1465 DCHECK(!output_record.at_device);
1466 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
1467 DCHECK(!input_record.at_device);
1468 DCHECK_EQ(input_record.mfc_output, -1);
1469 struct v4l2_buffer qbuf;
1470 struct v4l2_plane qbuf_planes[2];
1471 memset(&qbuf, 0, sizeof(qbuf));
1472 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1473 qbuf.index = gsc_buffer;
1474 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1475 qbuf.timestamp.tv_sec = output_record.input_id;
1476 qbuf.memory = V4L2_MEMORY_USERPTR;
1477 qbuf.m.planes = qbuf_planes;
1478 qbuf.m.planes[0].bytesused = output_record.bytes_used[0];
1479 qbuf.m.planes[0].length = mfc_output_buffer_size_[0];
1480 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0];
1481 qbuf.m.planes[1].bytesused = output_record.bytes_used[1];
1482 qbuf.m.planes[1].length = mfc_output_buffer_size_[1];
1483 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1];
1484 qbuf.length = 2;
1485 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1486 mfc_output_gsc_input_queue_.pop_front();
1487 gsc_free_input_buffers_.pop_back();
1488 input_record.at_device = true;
1489 input_record.mfc_output = mfc_buffer;
1490 output_record.bytes_used[0] = 0;
1491 output_record.bytes_used[1] = 0;
1492 gsc_input_buffer_queued_count_++;
1493 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id="
1494 << output_record.input_id;
1495 return true;
1496 }
1497
1498 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() {
1499 DVLOG(3) << "EnqueueGscOutputRecord()";
1500 DCHECK(!gsc_free_output_buffers_.empty());
1501
1502 // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
1503 const int buffer = gsc_free_output_buffers_.front();
1504 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer];
1505 DCHECK(!output_record.at_device);
1506 DCHECK(!output_record.at_client);
1507 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1508 TRACE_EVENT0(
1509 "Video Decoder",
1510 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR");
1511 // If we have to wait for completion, wait. Note that
1512 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the
1513 // buffer that has been in the queue the longest.
1514 egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0,
1515 EGL_FOREVER_KHR);
1516 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
1517 output_record.egl_sync = EGL_NO_SYNC_KHR;
1518 }
1519 struct v4l2_buffer qbuf;
1520 struct v4l2_plane qbuf_plane;
1521 memset(&qbuf, 0, sizeof(qbuf));
1522 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1523 qbuf.index = buffer;
1524 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1525 qbuf.memory = V4L2_MEMORY_DMABUF;
1526 qbuf.m.planes = &qbuf_plane;
1527 qbuf.m.planes[0].m.fd = output_record.fd;
1528 qbuf.length = 1;
1529 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1530 gsc_free_output_buffers_.pop_front();
1531 output_record.at_device = true;
1532 gsc_output_buffer_queued_count_++;
1533 return true;
1534 }
1535
1536 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask(
1537 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1538 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1539 << picture_buffer_id;
1540 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1541 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask");
1542
1543 // We run ReusePictureBufferTask even if we're in kResetting.
1544 if (decoder_state_ == kError) {
1545 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1546 return;
1547 }
1548
1549 size_t index;
1550 for (index = 0; index < gsc_output_buffer_map_.size(); ++index)
1551 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id)
1552 break;
1553
1554 if (index >= gsc_output_buffer_map_.size()) {
1555 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found";
1556 NOTIFY_ERROR(INVALID_ARGUMENT);
1557 return;
1558 }
1559
1560 GscOutputRecord& output_record = gsc_output_buffer_map_[index];
1561 if (output_record.at_device || !output_record.at_client) {
1562 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1563 NOTIFY_ERROR(INVALID_ARGUMENT);
1564 return;
1565 }
1566
1567 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1568 output_record.at_client = false;
1569 output_record.egl_sync = egl_sync_ref->egl_sync;
1570 gsc_free_output_buffers_.push_back(index);
1571 decoder_frames_at_client_--;
1572 // Take ownership of the EGLSync.
1573 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1574 // We got a buffer back, so kick the GSC.
1575 EnqueueGsc();
1576 }
1577
1578 void ExynosVideoDecodeAccelerator::FlushTask() {
1579 DVLOG(3) << "FlushTask()";
1580 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1581 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask");
1582
1583 // Flush outstanding buffers.
1584 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1585 // There's nothing in the pipe, so return done immediately.
1586 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1587 &Client::NotifyFlushDone, client_));
1588 return;
1589 } else if (decoder_state_ == kError) {
1590 DVLOG(2) << "FlushTask(): early out: kError state";
1591 return;
1592 }
1593
1594 // We don't support stacked flushing.
1595 DCHECK(!decoder_flushing_);
1596
1597 // Queue up an empty buffer -- this triggers the flush.
1598 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>(
1599 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0,
1600 kFlushBufferId)));
1601 decoder_flushing_ = true;
1602
1603 ScheduleDecodeBufferTaskIfNeeded();
1604 }
1605
1606 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1607 if (!decoder_flushing_)
1608 return;
1609
1610 // Pipeline is empty when:
1611 // * Decoder input queue is empty of non-delayed buffers.
1612 // * There is no currently filling input buffer.
1613 // * MFC input holding queue is empty.
1614 // * All MFC input (VIDEO_OUTPUT) buffers are returned.
1615 // * MFC -> GSC holding queue is empty.
1616 // * All GSC input (VIDEO_OUTPUT) buffers are returned.
1617 if (!decoder_input_queue_.empty()) {
1618 if (decoder_input_queue_.front()->input_id !=
1619 decoder_delay_bitstream_buffer_id_)
1620 return;
1621 }
1622 if (decoder_current_input_buffer_ != -1)
1623 return;
1624 if ((mfc_input_ready_queue_.size() +
1625 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() +
1626 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0)
1627 return;
1628
1629 decoder_delay_bitstream_buffer_id_ = -1;
1630 decoder_flushing_ = false;
1631 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1632 &Client::NotifyFlushDone, client_));
1633
1634 // While we were flushing, we early-outed DecodeBufferTask()s.
1635 ScheduleDecodeBufferTaskIfNeeded();
1636 }
1637
1638 void ExynosVideoDecodeAccelerator::ResetTask() {
1639 DVLOG(3) << "ResetTask()";
1640 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1641 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask");
1642
1643 if (decoder_state_ == kError) {
1644 DVLOG(2) << "ResetTask(): early out: kError state";
1645 return;
1646 }
1647
1648 // We stop streaming, but we _don't_ destroy our buffers.
1649 if (!StopDevicePoll())
1650 return;
1651
1652 decoder_current_bitstream_buffer_.reset();
1653 decoder_input_queue_.clear();
1654
1655 decoder_current_input_buffer_ = -1;
1656
1657 // If we were flushing, we'll never return any more BitstreamBuffers or
1658 // PictureBuffers; they have all been dropped and returned by now.
1659 NotifyFlushDoneIfNeeded();
1660
1661 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1662 // jobs will early-out in the kResetting state.
1663 decoder_state_ = kResetting;
1664 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1665 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1666 }
1667
1668 void ExynosVideoDecodeAccelerator::ResetDoneTask() {
1669 DVLOG(3) << "ResetDoneTask()";
1670 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1671 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask");
1672
1673 if (decoder_state_ == kError) {
1674 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1675 return;
1676 }
1677
1678 // Reset format-specific bits.
1679 if (video_profile_ >= media::H264PROFILE_MIN &&
1680 video_profile_ <= media::H264PROFILE_MAX) {
1681 decoder_h264_parser_.reset(new content::H264Parser());
1682 }
1683
1684 // Jobs drained, we're finished resetting.
1685 DCHECK_EQ(decoder_state_, kResetting);
1686 decoder_state_ = kAfterReset;
1687 decoder_delay_bitstream_buffer_id_ = -1;
1688 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1689 &Client::NotifyResetDone, client_));
1690
1691 // While we were resetting, we early-outed DecodeBufferTask()s.
1692 ScheduleDecodeBufferTaskIfNeeded();
1693 }
1694
1695 void ExynosVideoDecodeAccelerator::DestroyTask() {
1696 DVLOG(3) << "DestroyTask()";
1697 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask");
1698
1699 // DestroyTask() should run regardless of decoder_state_.
1700
1701 // Stop streaming and the device_poll_thread_.
1702 StopDevicePoll();
1703
1704 decoder_current_bitstream_buffer_.reset();
1705 decoder_current_input_buffer_ = -1;
1706 decoder_decode_buffer_tasks_scheduled_ = 0;
1707 decoder_frames_at_client_ = 0;
1708 decoder_input_queue_.clear();
1709 decoder_flushing_ = false;
1710
1711 // Set our state to kError. Just in case.
1712 decoder_state_ = kError;
1713 }
1714
1715 bool ExynosVideoDecodeAccelerator::StartDevicePoll() {
1716 DVLOG(3) << "StartDevicePoll()";
1717 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1718 DCHECK(!device_poll_thread_.IsRunning());
1719
1720 // Start up the device poll thread and schedule its first DevicePollTask().
1721 if (!device_poll_thread_.Start()) {
1722 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1723 NOTIFY_ERROR(PLATFORM_FAILURE);
1724 return false;
1725 }
1726 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1727 &ExynosVideoDecodeAccelerator::DevicePollTask,
1728 base::Unretained(this),
1729 0));
1730
1731 return true;
1732 }
1733
1734 bool ExynosVideoDecodeAccelerator::StopDevicePoll() {
1735 DVLOG(3) << "StopDevicePoll()";
1736 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1737
1738 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1739 if (!SetDevicePollInterrupt())
1740 return false;
1741 device_poll_thread_.Stop();
1742 // Clear the interrupt now, to be sure.
1743 if (!ClearDevicePollInterrupt())
1744 return false;
1745
1746 // Stop streaming.
1747 if (mfc_input_streamon_) {
1748 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1749 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1750 }
1751 mfc_input_streamon_ = false;
1752 if (mfc_output_streamon_) {
1753 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1754 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1755 }
1756 mfc_output_streamon_ = false;
1757 if (gsc_input_streamon_) {
1758 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1759 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1760 }
1761 gsc_input_streamon_ = false;
1762 if (gsc_output_streamon_) {
1763 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1764 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1765 }
1766 gsc_output_streamon_ = false;
1767
1768 // Reset all our accounting info.
1769 mfc_input_ready_queue_.clear();
1770 mfc_free_input_buffers_.clear();
1771 DCHECK_EQ(mfc_input_buffer_count_,
1772 static_cast<int>(mfc_input_buffer_map_.size()));
1773 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
1774 mfc_free_input_buffers_.push_back(i);
1775 mfc_input_buffer_map_[i].at_device = false;
1776 mfc_input_buffer_map_[i].bytes_used = 0;
1777 mfc_input_buffer_map_[i].input_id = -1;
1778 }
1779 mfc_input_buffer_queued_count_ = 0;
1780 mfc_free_output_buffers_.clear();
1781 DCHECK_EQ(mfc_output_buffer_count_,
1782 static_cast<int>(mfc_output_buffer_map_.size()));
1783 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
1784 mfc_free_output_buffers_.push_back(i);
1785 mfc_output_buffer_map_[i].at_device = false;
1786 mfc_output_buffer_map_[i].input_id = -1;
1787 }
1788 mfc_output_buffer_queued_count_ = 0;
1789 mfc_output_gsc_input_queue_.clear();
1790 gsc_free_input_buffers_.clear();
1791 DCHECK_EQ(gsc_input_buffer_count_,
1792 static_cast<int>(gsc_input_buffer_map_.size()));
1793 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
1794 gsc_free_input_buffers_.push_back(i);
1795 gsc_input_buffer_map_[i].at_device = false;
1796 gsc_input_buffer_map_[i].mfc_output = -1;
1797 }
1798 gsc_input_buffer_queued_count_ = 0;
1799 gsc_free_output_buffers_.clear();
1800 DCHECK_EQ(gsc_output_buffer_count_,
1801 static_cast<int>(gsc_output_buffer_map_.size()));
1802 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1803 // Only mark those free that aren't being held by the VDA.
1804 if (!gsc_output_buffer_map_[i].at_client) {
1805 gsc_free_output_buffers_.push_back(i);
1806 gsc_output_buffer_map_[i].at_device = false;
1807 }
1808 }
1809 gsc_output_buffer_queued_count_ = 0;
1810
1811 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1812 return true;
1813 }
1814
1815 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() {
1816 DVLOG(3) << "SetDevicePollInterrupt()";
1817 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1818
1819 const uint64 buf = 1;
1820 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1821 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
1822 NOTIFY_ERROR(PLATFORM_FAILURE);
1823 return false;
1824 }
1825 return true;
1826 }
1827
1828 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() {
1829 DVLOG(3) << "ClearDevicePollInterrupt()";
1830 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1831
1832 uint64 buf;
1833 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) {
1834 if (errno == EAGAIN) {
1835 // No interrupt flag set, and we're reading nonblocking. Not an error.
1836 return true;
1837 } else {
1838 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
1839 NOTIFY_ERROR(PLATFORM_FAILURE);
1840 return false;
1841 }
1842 }
1843 return true;
1844 }
1845
1846 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) {
1847 DVLOG(3) << "DevicePollTask()";
1848 DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current());
1849 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask");
1850
1851 // This routine just polls the set of device fds, and schedules a
1852 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur.
1853 // Other threads may notify this task to return early by writing to
1854 // device_poll_interrupt_fd_.
1855 struct pollfd pollfds[3];
1856 nfds_t nfds;
1857
1858 // Add device_poll_interrupt_fd_;
1859 pollfds[0].fd = device_poll_interrupt_fd_;
1860 pollfds[0].events = POLLIN | POLLERR;
1861 nfds = 1;
1862
1863 if (poll_fds & kPollMfc) {
1864 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
1865 pollfds[nfds].fd = mfc_fd_;
1866 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1867 nfds++;
1868 }
1869 // Add GSC fd, if we should poll on it.
1870 // GSC has to wait until both input and output buffers are queued.
1871 if (poll_fds & kPollGsc) {
1872 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
1873 pollfds[nfds].fd = gsc_fd_;
1874 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1875 nfds++;
1876 }
1877
1878 // Poll it!
1879 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) {
1880 DPLOG(ERROR) << "DevicePollTask(): poll() failed";
1881 NOTIFY_ERROR(PLATFORM_FAILURE);
1882 return;
1883 }
1884
1885 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1886 // touch decoder state from this thread.
1887 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1888 &ExynosVideoDecodeAccelerator::ServiceDeviceTask,
1889 base::Unretained(this)));
1890 }
1891
1892 void ExynosVideoDecodeAccelerator::NotifyError(Error error) {
1893 DVLOG(2) << "NotifyError()";
1894
1895 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1896 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1897 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error));
1898 return;
1899 }
1900
1901 if (client_) {
1902 client_->NotifyError(error);
1903 client_ptr_factory_.InvalidateWeakPtrs();
1904 }
1905 }
1906
1907 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) {
1908 DVLOG(3) << "SetDecoderState(): state=%d" << state;
1909
1910 // We can touch decoder_state_ only if this is the decoder thread or the
1911 // decoder thread isn't running.
1912 if (decoder_thread_.message_loop() != NULL &&
1913 decoder_thread_.message_loop() != MessageLoop::current()) {
1914 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1915 &ExynosVideoDecodeAccelerator::SetDecoderState,
1916 base::Unretained(this), state));
1917 } else {
1918 decoder_state_ = state;
1919 }
1920 }
1921
1922 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() {
1923 DVLOG(3) << "CreateMfcInputBuffers()";
1924 // We always run this as we prepare to initialize.
1925 DCHECK_EQ(decoder_state_, kUninitialized);
1926 DCHECK(!mfc_input_streamon_);
1927 DCHECK_EQ(mfc_input_buffer_count_, 0);
1928
1929 __u32 pixelformat = 0;
1930 if (video_profile_ >= media::H264PROFILE_MIN &&
1931 video_profile_ <= media::H264PROFILE_MAX) {
1932 pixelformat = V4L2_PIX_FMT_H264;
1933 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
1934 video_profile_ <= media::VP8PROFILE_MAX) {
1935 pixelformat = V4L2_PIX_FMT_VP8;
1936 } else {
1937 NOTREACHED();
1938 }
1939
1940 struct v4l2_format format;
1941 memset(&format, 0, sizeof(format));
1942 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1943 format.fmt.pix_mp.pixelformat = pixelformat;
1944 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize;
1945 format.fmt.pix_mp.num_planes = 1;
1946 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
1947
1948 struct v4l2_requestbuffers reqbufs;
1949 memset(&reqbufs, 0, sizeof(reqbufs));
1950 reqbufs.count = kMfcInputBufferCount;
1951 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1952 reqbufs.memory = V4L2_MEMORY_MMAP;
1953 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
1954 mfc_input_buffer_count_ = reqbufs.count;
1955 mfc_input_buffer_map_.resize(mfc_input_buffer_count_);
1956 for (int i = 0; i < mfc_input_buffer_count_; ++i) {
1957 mfc_free_input_buffers_.push_back(i);
1958
1959 // Query for the MEMORY_MMAP pointer.
1960 struct v4l2_plane planes[1];
1961 struct v4l2_buffer buffer;
1962 memset(&buffer, 0, sizeof(buffer));
1963 memset(planes, 0, sizeof(planes));
1964 buffer.index = i;
1965 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1966 buffer.memory = V4L2_MEMORY_MMAP;
1967 buffer.m.planes = planes;
1968 buffer.length = 1;
1969 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
1970 void* address = mmap(NULL, buffer.m.planes[0].length,
1971 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
1972 buffer.m.planes[0].m.mem_offset);
1973 if (address == MAP_FAILED) {
1974 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
1975 return false;
1976 }
1977 mfc_input_buffer_map_[i].address = address;
1978 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length;
1979 }
1980
1981 return true;
1982 }
1983
1984 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() {
1985 DVLOG(3) << "CreateMfcOutputBuffers()";
1986 DCHECK_EQ(decoder_state_, kInitialized);
1987 DCHECK(!mfc_output_streamon_);
1988 DCHECK_EQ(mfc_output_buffer_count_, 0);
1989
1990 // Number of MFC output buffers we need.
1991 struct v4l2_control ctrl;
1992 memset(&ctrl, 0, sizeof(ctrl));
1993 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1994 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl);
1995
1996 // Output format setup in Initialize().
1997
1998 // Allocate the output buffers.
1999 struct v4l2_requestbuffers reqbufs;
2000 memset(&reqbufs, 0, sizeof(reqbufs));
2001 reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount;
2002 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2003 reqbufs.memory = V4L2_MEMORY_MMAP;
2004 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
2005
2006 // Fill our free-buffers list, and create DMABUFs from them.
2007 mfc_output_buffer_count_ = reqbufs.count;
2008 mfc_output_buffer_map_.resize(mfc_output_buffer_count_);
2009 for (int i = 0; i < mfc_output_buffer_count_; ++i) {
2010 mfc_free_output_buffers_.push_back(i);
2011
2012 // Query for the MEMORY_MMAP pointer.
2013 struct v4l2_plane planes[2];
2014 struct v4l2_buffer buffer;
2015 memset(&buffer, 0, sizeof(buffer));
2016 memset(planes, 0, sizeof(planes));
2017 buffer.index = i;
2018 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2019 buffer.memory = V4L2_MEMORY_MMAP;
2020 buffer.m.planes = planes;
2021 buffer.length = 2;
2022 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2023
2024 // Get their user memory for GSC input.
2025 for (int j = 0; j < 2; ++j) {
2026 void* address = mmap(NULL, buffer.m.planes[j].length,
2027 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2028 buffer.m.planes[j].m.mem_offset);
2029 if (address == MAP_FAILED) {
2030 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2031 return false;
2032 }
2033 mfc_output_buffer_map_[i].address[j] = address;
2034 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length;
2035 }
2036 }
2037
2038 return true;
2039 }
2040
2041 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() {
2042 DVLOG(3) << "CreateGscInputBuffers()";
2043 DCHECK_EQ(decoder_state_, kInitialized);
2044 DCHECK(!gsc_input_streamon_);
2045 DCHECK_EQ(gsc_input_buffer_count_, 0);
2046
2047 struct v4l2_format format;
2048 memset(&format, 0, sizeof(format));
2049 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2050 format.fmt.pix_mp.width = frame_buffer_size_.width();
2051 format.fmt.pix_mp.height = frame_buffer_size_.height();
2052 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_;
2053 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0];
2054 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1];
2055 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much
2056 // sense. Convention seems to be to assume 8bpp for these tiled formats.
2057 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width();
2058 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width();
2059 format.fmt.pix_mp.num_planes = 2;
2060 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2061
2062 struct v4l2_control control;
2063 memset(&control, 0, sizeof(control));
2064 control.id = V4L2_CID_ROTATE;
2065 control.value = 0;
2066 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2067
2068 memset(&control, 0, sizeof(control));
2069 control.id = V4L2_CID_HFLIP;
2070 control.value = 0;
2071 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2072
2073 memset(&control, 0, sizeof(control));
2074 control.id = V4L2_CID_VFLIP;
2075 control.value = 0;
2076 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2077
2078 memset(&control, 0, sizeof(control));
2079 control.id = V4L2_CID_GLOBAL_ALPHA;
2080 control.value = 255;
2081 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2082
2083 struct v4l2_requestbuffers reqbufs;
2084 memset(&reqbufs, 0, sizeof(reqbufs));
2085 reqbufs.count = kGscInputBufferCount;
2086 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2087 reqbufs.memory = V4L2_MEMORY_USERPTR;
2088 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2089
2090 gsc_input_buffer_count_ = reqbufs.count;
2091 gsc_input_buffer_map_.resize(gsc_input_buffer_count_);
2092 for (int i = 0; i < gsc_input_buffer_count_; ++i) {
2093 gsc_free_input_buffers_.push_back(i);
2094 gsc_input_buffer_map_[i].mfc_output = -1;
2095 }
2096
2097 return true;
2098 }
2099
2100 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() {
2101 DVLOG(3) << "CreateGscOutputBuffers()";
2102 DCHECK_EQ(decoder_state_, kInitialized);
2103 DCHECK(!gsc_output_streamon_);
2104 DCHECK_EQ(gsc_output_buffer_count_, 0);
2105
2106 // GSC outputs into the EGLImages we create from the textures we are
2107 // assigned. Assume RGBA8888 format.
2108 struct v4l2_format format;
2109 memset(&format, 0, sizeof(format));
2110 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2111 format.fmt.pix_mp.width = frame_buffer_size_.width();
2112 format.fmt.pix_mp.height = frame_buffer_size_.height();
2113 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
2114 format.fmt.pix_mp.plane_fmt[0].sizeimage =
2115 frame_buffer_size_.width() * frame_buffer_size_.height() * 4;
2116 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4;
2117 format.fmt.pix_mp.num_planes = 1;
2118 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2119
2120 struct v4l2_requestbuffers reqbufs;
2121 memset(&reqbufs, 0, sizeof(reqbufs));
2122 reqbufs.count = kGscOutputBufferCount;
2123 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2124 reqbufs.memory = V4L2_MEMORY_DMABUF;
2125 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2126
2127 // We don't actually fill in the freelist or the map here. That happens once
2128 // we have actual usable buffers, after AssignPictureBuffers();
2129 gsc_output_buffer_count_ = reqbufs.count;
2130 gsc_output_buffer_map_.resize(gsc_output_buffer_count_);
2131
2132 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): "
2133 << "buffer_count=" << gsc_output_buffer_count_
2134 << ", width=" << frame_buffer_size_.width()
2135 << ", height=" << frame_buffer_size_.height();
2136 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2137 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_,
2138 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()),
2139 GL_TEXTURE_2D));
2140
2141 return true;
2142 }
2143
2144 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() {
2145 DVLOG(3) << "DestroyMfcInputBuffers()";
2146 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2147 DCHECK(!mfc_input_streamon_);
2148
2149 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2150 if (mfc_input_buffer_map_[i].address != NULL) {
2151 munmap(mfc_input_buffer_map_[i].address,
2152 mfc_input_buffer_map_[i].length);
2153 }
2154 }
2155
2156 struct v4l2_requestbuffers reqbufs;
2157 memset(&reqbufs, 0, sizeof(reqbufs));
2158 reqbufs.count = 0;
2159 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2160 reqbufs.memory = V4L2_MEMORY_MMAP;
2161 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2162 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2163
2164 mfc_input_buffer_map_.clear();
2165 mfc_free_input_buffers_.clear();
2166 mfc_input_buffer_count_ = 0;
2167 }
2168
2169 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() {
2170 DVLOG(3) << "DestroyMfcOutputBuffers()";
2171 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2172 DCHECK(!mfc_output_streamon_);
2173
2174 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2175 if (mfc_output_buffer_map_[i].address[0] != NULL)
2176 munmap(mfc_output_buffer_map_[i].address[0],
2177 mfc_output_buffer_map_[i].length[0]);
2178 if (mfc_output_buffer_map_[i].address[1] != NULL)
2179 munmap(mfc_output_buffer_map_[i].address[1],
2180 mfc_output_buffer_map_[i].length[1]);
2181 }
2182
2183 struct v4l2_requestbuffers reqbufs;
2184 memset(&reqbufs, 0, sizeof(reqbufs));
2185 reqbufs.count = 0;
2186 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2187 reqbufs.memory = V4L2_MEMORY_MMAP;
2188 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2189 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
2190
2191 mfc_output_buffer_map_.clear();
2192 mfc_free_output_buffers_.clear();
2193 mfc_output_buffer_count_ = 0;
2194 }
2195
2196 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() {
2197 DVLOG(3) << "DestroyGscInputBuffers()";
2198 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2199 DCHECK(!gsc_input_streamon_);
2200
2201 struct v4l2_requestbuffers reqbufs;
2202 memset(&reqbufs, 0, sizeof(reqbufs));
2203 reqbufs.count = 0;
2204 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2205 reqbufs.memory = V4L2_MEMORY_DMABUF;
2206 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2207 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2208
2209 gsc_input_buffer_map_.clear();
2210 gsc_free_input_buffers_.clear();
2211 gsc_input_buffer_count_ = 0;
2212 }
2213
2214 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() {
2215 DVLOG(3) << "DestroyGscOutputBuffers()";
2216 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2217 DCHECK(!gsc_output_streamon_);
2218
2219 if (gsc_output_buffer_map_.size() != 0) {
2220 if (!make_context_current_.Run())
2221 DLOG(ERROR) << "DestroyGscOutputBuffers(): "
2222 << "could not make context current";
2223
2224 size_t i = 0;
2225 do {
2226 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
2227 if (output_record.fd != -1)
2228 HANDLE_EINTR(close(output_record.fd));
2229 if (output_record.egl_image != EGL_NO_IMAGE_KHR)
2230 egl_destroy_image_khr(egl_display_, output_record.egl_image);
2231 if (output_record.egl_sync != EGL_NO_SYNC_KHR)
2232 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
2233 if (client_)
2234 client_->DismissPictureBuffer(output_record.picture_id);
2235 ++i;
2236 } while (i < gsc_output_buffer_map_.size());
2237 }
2238
2239 struct v4l2_requestbuffers reqbufs;
2240 memset(&reqbufs, 0, sizeof(reqbufs));
2241 reqbufs.count = 0;
2242 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2243 reqbufs.memory = V4L2_MEMORY_DMABUF;
2244 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2245 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2246
2247 gsc_output_buffer_map_.clear();
2248 gsc_free_output_buffers_.clear();
2249 gsc_output_buffer_count_ = 0;
2250 }
2251
2252 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/media/exynos_video_decode_accelerator.h ('k') | content/common/gpu/media/gpu_video_decode_accelerator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698