Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(285)

Side by Side Diff: content/common/gpu/media/exynos_video_decode_accelerator.cc

Issue 11198060: VDA implementation for Exynos, using V4L2 (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: Flush() path rework. Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
13
14 #include "base/bind.h"
15 #include "base/debug/trace_event.h"
16 #include "base/message_loop.h"
17 #include "base/message_loop_proxy.h"
18 #include "base/shared_memory.h"
19 #include "content/common/gpu/media/exynos_video_decode_accelerator.h"
20 #include "content/common/gpu/media/h264_parser.h"
21 #include "third_party/angle/include/GLES2/gl2.h"
22
23 namespace content {
24
25 #define EXYNOS_MFC_DEVICE "/dev/mfc-dec"
26 #define EXYNOS_GSC_DEVICE "/dev/gsc1"
27 #define EXYNOS_MALI_DRIVER "libmali.so"
28
29 #define NOTIFY_ERROR(x) \
30 do { \
31 SetDecoderState(kError); \
32 LOG(ERROR) << "calling NotifyError(): " << x; \
33 NotifyError(x); \
34 } while (0)
35
36 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \
37 do { \
38 if (ioctl(fd, type, arg) != 0) { \
39 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
40 NOTIFY_ERROR(PLATFORM_FAILURE); \
41 return; \
42 } \
43 } while (0)
44
45 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \
46 do { \
47 if (ioctl(fd, type, arg) != 0) { \
48 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
49 NOTIFY_ERROR(PLATFORM_FAILURE); \
50 return false; \
51 } \
52 } while (0)
53
54 #define POSTSANDBOX_DLSYM(lib, func, type, name) \
55 func = reinterpret_cast<type>(dlsym(lib, name)); \
56 if (func == NULL) { \
57 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " \
58 << name << ": " << dlerror(); \
59 return false; \
60 }
61
62 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef {
63 BitstreamBufferRef(
64 base::WeakPtr<Client>& client,
65 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
66 base::SharedMemory* shm,
67 size_t size,
68 int32 input_id);
69 ~BitstreamBufferRef();
70 const base::WeakPtr<Client> client;
71 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
72 const scoped_ptr<base::SharedMemory> shm;
73 const size_t size;
74 off_t bytes_used;
75 const int32 input_id;
76 };
77
78 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef {
79 PictureBufferArrayRef(EGLDisplay egl_display, EGLImageKHR egl_images[],
80 int egl_image_fds[], int32 client_ids[],
81 int egl_images_count);
82 ~PictureBufferArrayRef();
83 EGLDisplay const egl_display;
84 const scoped_ptr<EGLImageKHR[]> egl_images;
85 const scoped_ptr<int[]> egl_image_fds;
86 const scoped_ptr<int32[]> client_ids;
87 const int egl_images_count;
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 My point about vector<> was that if you used it yo
sheu 2013/01/11 10:41:42 Alright, I'll just fold it into an array-of-struct
88 };
89
90 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef {
91 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
92 ~EGLSyncKHRRef();
93 EGLDisplay const egl_display;
94 EGLSyncKHR egl_sync;
95 };
96
97 typedef void* GLeglImageOES;
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 What I meant was: are you sure it's legit / standa
sheu 2013/01/11 10:41:42 Done.
98 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*,
99 void*);
100 typedef EGLImageKHR (*EglCreateImageKhrFunc)(EGLDisplay, EGLContext, EGLenum,
101 EGLClientBuffer, const EGLint*);
102 typedef EGLBoolean (*EglDestroyImageKhrFunc)(EGLDisplay, EGLImageKHR);
103 typedef EGLSyncKHR (*EglCreateSyncKhrFunc)(EGLDisplay, EGLenum, const EGLint*);
104 typedef EGLBoolean (*EglDestroySyncKhrFunc)(EGLDisplay, EGLSyncKHR);
105 typedef EGLint (*EglClientWaitSyncKhrFunc)(EGLDisplay, EGLSyncKHR, EGLint,
106 EGLTimeKHR);
107 typedef void (*GlEglImageTargetTexture2dOesFunc)(GLenum, GLeglImageOES);
108
109 static void* libmali_handle = NULL;
110 static MaliEglImageGetBufferExtPhandleFunc
111 mali_egl_image_get_buffer_ext_phandle = NULL;
112 static EglCreateImageKhrFunc egl_create_image_khr = NULL;
113 static EglDestroyImageKhrFunc egl_destroy_image_khr = NULL;
114 static EglCreateSyncKhrFunc egl_create_sync_khr = NULL;
115 static EglDestroySyncKhrFunc egl_destroy_sync_khr = NULL;
116 static EglClientWaitSyncKhrFunc egl_client_wait_sync_khr = NULL;
117 static GlEglImageTargetTexture2dOesFunc
118 gl_egl_image_target_texture_2d_oes = NULL;
119
120 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
121 base::WeakPtr<Client>& client,
122 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
123 base::SharedMemory* shm, size_t size, int32 input_id)
124 : client(client),
125 client_message_loop_proxy(client_message_loop_proxy),
126 shm(shm),
127 size(size),
128 bytes_used(0),
129 input_id(input_id) {
130 }
131
132 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
133 if (input_id >= 0)
134 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
135 &Client::NotifyEndOfBitstreamBuffer, client, input_id));
136 }
137
138 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef(
139 EGLDisplay egl_display, EGLImageKHR egl_images[], int egl_image_fds[],
140 int32 client_ids[], int egl_images_count)
141 : egl_display(egl_display),
142 egl_images(egl_images),
143 egl_image_fds(egl_image_fds),
144 client_ids(client_ids),
145 egl_images_count(egl_images_count) {
146 }
147
148 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() {
149 DCHECK_EQ(egl_images != NULL, egl_image_fds != NULL);
150 if (egl_images == NULL)
151 return;
152
153 for (int i = 0; i < egl_images_count; ++i) {
154 if (egl_images[i] != EGL_NO_IMAGE_KHR)
155 egl_destroy_image_khr(egl_display, egl_images[i]);
156 if (egl_image_fds[i] != -1)
157 close(egl_image_fds[i]);
158 }
159 }
160
161 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
162 EGLDisplay egl_display, EGLSyncKHR egl_sync)
163 : egl_display(egl_display),
164 egl_sync(egl_sync) {
165 }
166
167 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
168 if (egl_sync != EGL_NO_SYNC_KHR)
169 egl_destroy_sync_khr(egl_display, egl_sync);
170 }
171
172 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord()
173 : at_device(false),
174 address(NULL),
175 length(0),
176 bytes_used(0),
177 input_id(-1) {
178 }
179
180 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() {
181 }
182
183 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord()
184 : at_device(false),
185 input_id(-1) {
186 bytes_used[0] = 0;
187 bytes_used[1] = 0;
188 address[0] = NULL;
189 address[1] = NULL;
190 length[0] = 0;
191 length[1] = 0;
192 }
193
194 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() {
195 }
196
197 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord()
198 : at_device(false),
199 mfc_output(-1) {
200 }
201
202 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() {
203 }
204
205 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord()
206 : at_device(false),
207 at_client(false),
208 fd(-1),
209 egl_image(EGL_NO_IMAGE_KHR),
210 egl_sync(EGL_NO_SYNC_KHR),
211 picture_id(-1) {
212 }
213
214 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() {
215 }
216
217 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator(
218 EGLDisplay egl_display,
219 EGLContext egl_context,
220 Client* client,
221 const base::Callback<bool(void)>& make_context_current)
222 : child_message_loop_proxy_(base::MessageLoopProxy::current()),
223 weak_this_(base::AsWeakPtr(this)),
224 client_ptr_factory_(client),
225 client_(client_ptr_factory_.GetWeakPtr()),
226 decoder_thread_("ExynosDecoderThread"),
227 decoder_state_(kUninitialized),
228 decoder_current_bitstream_buffer_(NULL),
229 decoder_delay_bitstream_buffer_id_(-1),
230 decoder_current_input_buffer_(-1),
231 decoder_decode_buffer_tasks_scheduled_(0),
232 decoder_frames_at_client_(0),
233 decoder_flushing_(false),
234 mfc_fd_(-1),
235 mfc_input_streamon_(false),
236 mfc_input_buffer_count_(0),
237 mfc_input_buffer_queued_count_(0),
238 mfc_output_streamon_(false),
239 mfc_output_buffer_count_(0),
240 mfc_output_buffer_queued_count_(0),
241 mfc_output_buffer_pixelformat_(0),
242 gsc_fd_(-1),
243 gsc_input_streamon_(false),
244 gsc_input_buffer_count_(0),
245 gsc_input_buffer_queued_count_(0),
246 gsc_output_streamon_(false),
247 gsc_output_buffer_count_(0),
248 gsc_output_buffer_queued_count_(0),
249 device_poll_thread_("ExynosDevicePollThread"),
250 device_poll_interrupt_fd_(-1),
251 make_context_current_(make_context_current),
252 egl_display_(egl_display),
253 egl_context_(egl_context),
254 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) {
255 }
256
257 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() {
258 DCHECK(!decoder_thread_.message_loop());
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 s/message_loop/IsRunning/ here and in the next lin
sheu 2013/01/11 10:41:42 Done.
259 DCHECK(!device_poll_thread_.message_loop());
260 // Nuke the entire site from orbit -- it's the only way to be sure.
261 if (device_poll_interrupt_fd_ != -1) {
262 close(device_poll_interrupt_fd_);
263 device_poll_interrupt_fd_ = -1;
264 }
265 if (gsc_fd_ != -1) {
266 DestroyGscInputBuffers();
267 DestroyGscOutputBuffers();
268 close(gsc_fd_);
269 gsc_fd_ = -1;
270 }
271 if (mfc_fd_ != -1) {
272 DestroyMfcInputBuffers();
273 DestroyMfcOutputBuffers();
274 close(mfc_fd_);
275 mfc_fd_ = -1;
276 }
277
278 // These maps have members that should be manually destroyed, e.g. file
279 // descriptors, mmap() segments, etc.
280 DCHECK(mfc_input_buffer_map_.empty());
281 DCHECK(mfc_output_buffer_map_.empty());
282 DCHECK(gsc_input_buffer_map_.empty());
283 DCHECK(gsc_output_buffer_map_.empty());
284 }
285
286 bool ExynosVideoDecodeAccelerator::Initialize(
287 media::VideoCodecProfile profile) {
288 DVLOG(3) << "Initialize()";
289 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
290 DCHECK_EQ(decoder_state_, kUninitialized);
291
292 switch (profile) {
293 case media::H264PROFILE_BASELINE:
294 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
295 break;
296 case media::H264PROFILE_MAIN:
297 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
298 break;
299 case media::H264PROFILE_HIGH:
300 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
301 break;
302 case media::VP8PROFILE_MAIN:
303 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN";
304 break;
305 default:
306 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
307 return false;
308 };
309 video_profile_ = profile;
310
311 static bool sandbox_initialized = PostSandboxInitialization();
312 if (!sandbox_initialized) {
313 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed";
314 NOTIFY_ERROR(PLATFORM_FAILURE);
315 return false;
316 }
317
318 if (egl_display_ == EGL_NO_DISPLAY) {
319 DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
320 NOTIFY_ERROR(PLATFORM_FAILURE);
321 return false;
322 }
323
324 if (egl_context_ == EGL_NO_CONTEXT) {
325 DLOG(ERROR) << "Initialize(): could not get EGLContext";
326 NOTIFY_ERROR(PLATFORM_FAILURE);
327 return false;
328 }
329
330 // Open the video devices.
331 DVLOG(2) << "Initialize(): opening MFC device: " << EXYNOS_MFC_DEVICE;
332 mfc_fd_ = open(EXYNOS_MFC_DEVICE, O_RDWR | O_NONBLOCK | O_CLOEXEC);
333 if (mfc_fd_ == -1) {
334 DPLOG(ERROR) << "Initialize(): could not open MFC device: "
335 << EXYNOS_MFC_DEVICE;
336 NOTIFY_ERROR(PLATFORM_FAILURE);
337 return false;
338 }
339 DVLOG(2) << "Initialize(): opening GSC device: " << EXYNOS_GSC_DEVICE;
340 gsc_fd_ = open(EXYNOS_GSC_DEVICE, O_RDWR | O_NONBLOCK | O_CLOEXEC);
341 if (gsc_fd_ == -1) {
342 DPLOG(ERROR) << "Initialize(): could not open GSC device: "
343 << EXYNOS_GSC_DEVICE;
344 NOTIFY_ERROR(PLATFORM_FAILURE);
345 return false;
346 }
347
348 // Create the interrupt fd.
349 DCHECK_EQ(device_poll_interrupt_fd_, -1);
350 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC);
351 if (device_poll_interrupt_fd_ == -1) {
352 DPLOG(ERROR) << "Initialize(): eventfd() failed";
353 NOTIFY_ERROR(PLATFORM_FAILURE);
354 return false;
355 }
356
357 // Capabilities check.
358 struct v4l2_capability caps;
359 const __u32 kCapsRequired =
360 V4L2_CAP_VIDEO_CAPTURE_MPLANE |
361 V4L2_CAP_VIDEO_OUTPUT_MPLANE |
362 V4L2_CAP_STREAMING;
363 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps);
364 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
365 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
366 ", caps check failed: 0x" << std::hex << caps.capabilities;
367 NOTIFY_ERROR(PLATFORM_FAILURE);
368 return false;
369 }
370 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps);
371 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
372 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
373 ", caps check failed: 0x" << std::hex << caps.capabilities;
374 NOTIFY_ERROR(PLATFORM_FAILURE);
375 return false;
376 }
377
378 // Some random ioctls that Exynos requires.
379 struct v4l2_control control;
380 memset(&control, 0, sizeof(control));
381 control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8
382 control.value = 8; // Magic number from Samsung folks.
383 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CTRL, &control);
384
385 if (!make_context_current_.Run()) {
386 DLOG(ERROR) << "Initialize(): could not make context current";
387 NOTIFY_ERROR(PLATFORM_FAILURE);
388 return false;
389 }
390
391 if (!CreateMfcInputBuffers())
392 return false;
393
394 // MFC output format has to be setup before streaming starts.
395 struct v4l2_format format;
396 memset(&format, 0, sizeof(format));
397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
398 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16;
399 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
400
401 // Initialize format-specific bits.
402 if (video_profile_ >= media::H264PROFILE_MIN &&
403 video_profile_ <= media::H264PROFILE_MAX) {
404 decoder_h264_parser_.reset(new content::H264Parser());
405 }
406
407 if (!decoder_thread_.Start()) {
408 DLOG(ERROR) << "Initialize(): decoder thread failed to start";
409 NOTIFY_ERROR(PLATFORM_FAILURE);
410 return false;
411 }
412
413 SetDecoderState(kInitialized);
414
415 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
416 &Client::NotifyInitializeDone, client_));
417 return true;
418 }
419
420 void ExynosVideoDecodeAccelerator::Decode(
421 const media::BitstreamBuffer& bitstream_buffer) {
422 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
423 << ", size=" << bitstream_buffer.size();
424 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
425
426 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
427 client_, child_message_loop_proxy_,
428 new base::SharedMemory(bitstream_buffer.handle(), true),
429 bitstream_buffer.size(), bitstream_buffer.id()));
430 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
431 DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
432 NOTIFY_ERROR(UNREADABLE_INPUT);
433 return;
434 }
435 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory();
436
437 // DecodeTask() will take care of running a DecodeBufferTask().
438 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
439 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this),
440 base::Passed(&bitstream_record)));
441 }
442
443 void ExynosVideoDecodeAccelerator::AssignPictureBuffers(
444 const std::vector<media::PictureBuffer>& buffers) {
445 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
446 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
447
448 if (static_cast<int>(buffers.size()) != gsc_output_buffer_count_) {
449 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer_count";
450 NOTIFY_ERROR(INVALID_ARGUMENT);
451 return;
452 }
453
454 if (!make_context_current_.Run()) {
455 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
456 NOTIFY_ERROR(PLATFORM_FAILURE);
457 return;
458 }
459
460 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref(
461 new PictureBufferArrayRef(
462 egl_display_, new EGLImageKHR[buffers.size()],
463 new int[buffers.size()], new int32[buffers.size()], buffers.size()));
464 for (int i = 0; i < pic_buffers_ref->egl_images_count; ++i) {
465 pic_buffers_ref->egl_images[i] = EGL_NO_IMAGE_KHR;
466 pic_buffers_ref->egl_image_fds[i] = -1;
467 pic_buffers_ref->client_ids[i] = -1;
468 }
469
470 const static EGLint kImageAttrs[] = {
471 EGL_IMAGE_PRESERVED_KHR, 0,
472 EGL_NONE,
473 };
474 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay();
475 glActiveTexture(GL_TEXTURE0);
476 for (int i = 0; i < pic_buffers_ref->egl_images_count; ++i) {
477 // Create the X pixmap and then create an EGLImageKHR from it, so we can
478 // get dma_buf backing.
479 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0),
480 buffers[i].size().width(), buffers[i].size().height(), 32);
481 if (!pixmap) {
482 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap";
483 NOTIFY_ERROR(PLATFORM_FAILURE);
484 return;
485 }
486 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id());
487 EGLImageKHR egl_image;
488 egl_image = egl_create_image_khr(
489 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR,
490 (EGLClientBuffer)pixmap, kImageAttrs);
491 // We can free the X pixmap immediately -- according to the
492 // EGL_KHR_image_base spec, the backing storage does not go away until the
493 // last referencing EGLImage is destroyed.
494 XFreePixmap(x_display, pixmap);
495 if (egl_image == EGL_NO_IMAGE_KHR) {
496 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
497 NOTIFY_ERROR(PLATFORM_FAILURE);
498 return;
499 }
500 pic_buffers_ref->egl_images[i] = egl_image;
501 int fd;
502 if (!mali_egl_image_get_buffer_ext_phandle(
503 pic_buffers_ref->egl_images[i], NULL, &fd)) {
504 DLOG(ERROR) << "AssignPictureBuffers(): "
505 << "could not get EGLImageKHR dmabuf fd";
506 NOTIFY_ERROR(PLATFORM_FAILURE);
507 return;
508 }
509 pic_buffers_ref->egl_image_fds[i] = fd;
510 gl_egl_image_target_texture_2d_oes(GL_TEXTURE_2D, egl_image);
511 pic_buffers_ref->client_ids[i] = buffers[i].id();
512 }
513 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
514 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask,
515 base::Unretained(this), base::Passed(&pic_buffers_ref)));
516 }
517
518 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
519 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
520 // Must be run on child thread, as we'll insert a sync in the EGL context.
521 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
522
523 if (!make_context_current_.Run()) {
524 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
525 NOTIFY_ERROR(PLATFORM_FAILURE);
526 return;
527 }
528
529 EGLSyncKHR egl_sync;
530 egl_sync = egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
531 if (egl_sync == EGL_NO_SYNC_KHR) {
532 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
533 NOTIFY_ERROR(PLATFORM_FAILURE);
534 return;
535 }
536
537 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
538 egl_display_, egl_sync));
539 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
540 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask,
541 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
542 }
543
544 void ExynosVideoDecodeAccelerator::Flush() {
545 DVLOG(3) << "Flush()";
546 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
547 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
548 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this)));
549 }
550
551 void ExynosVideoDecodeAccelerator::Reset() {
552 DVLOG(3) << "Reset()";
553 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
554 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
555 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this)));
556 }
557
558 void ExynosVideoDecodeAccelerator::Destroy() {
559 DVLOG(3) << "Destroy()";
560 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
561
562 // We're destroying; cancel all callbacks.
563 client_ptr_factory_.InvalidateWeakPtrs();
564
565 // If the decoder thread is running, destroy using posted task.
566 if (decoder_thread_.IsRunning()) {
567 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
568 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
569 // DestroyTask() will cause the decoder_thread_ to flush all tasks.
570 decoder_thread_.Stop();
571 } else {
572 // Otherwise, call the destroy task directly.
573 DestroyTask();
574 }
575
576 // Set to kError state just in case.
577 SetDecoderState(kError);
578
579 delete this;
580 }
581
582 // static
583 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() {
584 DVLOG(3) << "PreSandboxInitialization()";
585 dlerror();
586
587 libmali_handle = dlopen(EXYNOS_MALI_DRIVER, RTLD_LAZY | RTLD_LOCAL);
588 if (libmali_handle == NULL) {
589 DPLOG(ERROR) << "failed to dlopen() " << EXYNOS_MALI_DRIVER
590 << ": " << dlerror();
591 }
592 }
593
594 // static
595 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() {
596 DVLOG(3) << "PostSandboxInitialization()";
597 if (libmali_handle == NULL) {
598 DLOG(ERROR) << "PostSandboxInitialization(): no " << EXYNOS_MALI_DRIVER
599 << " driver handle";
600 return false;
601 }
602
603 dlerror();
604
605 POSTSANDBOX_DLSYM(libmali_handle,
606 mali_egl_image_get_buffer_ext_phandle,
607 MaliEglImageGetBufferExtPhandleFunc,
608 "mali_egl_image_get_buffer_ext_phandle");
609
610 POSTSANDBOX_DLSYM(libmali_handle,
611 egl_create_image_khr,
612 EglCreateImageKhrFunc,
613 "eglCreateImageKHR");
614
615 POSTSANDBOX_DLSYM(libmali_handle,
616 egl_destroy_image_khr,
617 EglDestroyImageKhrFunc,
618 "eglDestroyImageKHR");
619
620 POSTSANDBOX_DLSYM(libmali_handle,
621 egl_create_sync_khr,
622 EglCreateSyncKhrFunc,
623 "eglCreateSyncKHR");
624
625 POSTSANDBOX_DLSYM(libmali_handle,
626 egl_destroy_sync_khr,
627 EglDestroySyncKhrFunc,
628 "eglDestroySyncKHR");
629
630 POSTSANDBOX_DLSYM(libmali_handle,
631 egl_client_wait_sync_khr,
632 EglClientWaitSyncKhrFunc,
633 "eglClientWaitSyncKHR");
634
635 POSTSANDBOX_DLSYM(libmali_handle,
636 gl_egl_image_target_texture_2d_oes,
637 GlEglImageTargetTexture2dOesFunc,
638 "glEGLImageTargetTexture2DOES");
639
640 return true;
641 }
642
643 void ExynosVideoDecodeAccelerator::DecodeTask(
644 scoped_ptr<BitstreamBufferRef> bitstream_record) {
645 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id;
646 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
647 DCHECK_NE(decoder_state_, kUninitialized);
648 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id",
649 bitstream_record->input_id);
650
651 if (decoder_state_ == kResetting || decoder_flushing_) {
652 // In the case that we're resetting or flushing, we need to delay decoding
653 // the BitstreamBuffers that come after the Reset() or Flush() call. When
654 // we're here, we know that this DecodeTask() was scheduled by a Decode()
655 // call that came after (in the client thread) the Reset() or Flush() call;
656 // thus set up the delay if necessary.
657 if (decoder_delay_bitstream_buffer_id_ == -1)
658 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
659 } else if (decoder_state_ == kError) {
660 DVLOG(2) << "DecodeTask(): early out: kError state";
661 return;
662 }
663
664 decoder_input_queue_.push_back(
665 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
666 decoder_decode_buffer_tasks_scheduled_++;
667 DecodeBufferTask();
668 }
669
670 void ExynosVideoDecodeAccelerator::DecodeBufferTask() {
671 DVLOG(3) << "DecodeBufferTask()";
672 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
673 DCHECK_NE(decoder_state_, kUninitialized);
674 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask");
675
676 decoder_decode_buffer_tasks_scheduled_--;
677
678 if (decoder_state_ == kResetting) {
679 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
680 return;
681 } else if (decoder_state_ == kError) {
682 DVLOG(2) << "DecodeBufferTask(): early out: kError state";
683 return;
684 }
685
686 if (decoder_current_bitstream_buffer_ == NULL) {
687 if (decoder_input_queue_.empty()) {
688 // We're waiting for a new buffer -- exit without scheduling a new task.
689 return;
690 }
691 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
692 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
693 // We're asked to delay decoding on this and subsequent buffers.
694 return;
695 }
696
697 // Setup to use the next buffer.
698 decoder_current_bitstream_buffer_.reset(buffer_ref.release());
699 decoder_input_queue_.pop_front();
700 DVLOG(3) << "DecodeBufferTask(): reading input_id="
701 << decoder_current_bitstream_buffer_->input_id
702 << ", addr=" << decoder_current_bitstream_buffer_->shm->memory()
703 << ", size=" << decoder_current_bitstream_buffer_->size;
704 }
705 bool schedule_task = false;
706 const size_t size = decoder_current_bitstream_buffer_->size;
707 size_t decoded_size;
708 if (size == 0) {
709 const int32 input_id = decoder_current_bitstream_buffer_->input_id;
710 decoded_size = 0;
711 if (input_id >= 0) {
712 // This is a buffer queued from the client that has zero size. Skip.
713 schedule_task = true;
714 } else {
715 // This is a buffer of zero size, queued to flush the pipe. Flush.
716 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
717 static_cast<base::SharedMemory*>(NULL));
718 // Enqueue a buffer guaranteed to be empty. To do that, we flush the
719 // current input, enqueue no data to the next flame, then flush that down.
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 typo: flame
sheu 2013/01/11 10:41:42 Done.
720 if (!FlushInputFrame() || !AppendToInputFrame(NULL, 0) ||
721 !FlushInputFrame()) {
722 // If we failed to enqueue the empty buffer (due to pipeline
723 // backpressure), don't advance the bitstream buffer queue, and don't
724 // schedule the next task. This bitstream buffer queue entry will get
725 // reprocessed when the pipeline frees up.
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 So it's ok that if the first FIF() and ATIF() retu
sheu 2013/01/11 10:41:42 We'll just end up with two flushing frame in the p
726 schedule_task = false;
727 } else {
728 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
729 schedule_task = true;
730 }
731 }
732 } else {
733 // This is a buffer queued from the client, with actual contents. Decode.
734 const void* const data =
735 reinterpret_cast<const uint8*>(
736 decoder_current_bitstream_buffer_->shm->memory()) +
737 decoder_current_bitstream_buffer_->bytes_used;
738 const size_t data_size =
739 decoder_current_bitstream_buffer_->size -
740 decoder_current_bitstream_buffer_->bytes_used;
741 if (!FindFrameFragment(reinterpret_cast<const uint8*>(data), data_size,
742 &decoded_size))
743 return;
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 Should this notify error? E.g. if the bitstream is
sheu 2013/01/11 10:41:42 I suppose we can error here, instead of trying to
744 switch (decoder_state_) {
745 case kInitialized:
746 case kAfterReset:
747 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
748 break;
749 case kDecoding:
750 schedule_task = DecodeBufferContinue(data, decoded_size, &decoded_size);
751 break;
752 default:
753 NOTIFY_ERROR(ILLEGAL_STATE);
754 return;
755 }
756 }
757 if (decoder_state_ == kError) {
758 // Failed during decode.
759 return;
760 }
761
762 if (schedule_task) {
763 decoder_current_bitstream_buffer_->bytes_used += decoded_size;
764 if (decoder_current_bitstream_buffer_->bytes_used ==
765 decoder_current_bitstream_buffer_->size) {
766 // Our current bitstream buffer is done; return it.
767 int32 input_id = decoder_current_bitstream_buffer_->input_id;
768 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
769 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
770 decoder_current_bitstream_buffer_.reset();
771 }
772 ScheduleDecodeBufferTaskIfNeeded();
773 }
774 }
775
776 bool ExynosVideoDecodeAccelerator::FindFrameFragment(
777 const uint8* data,
778 size_t size,
779 size_t* endpos) {
780 if (video_profile_ >= media::H264PROFILE_MIN &&
781 video_profile_ <= media::H264PROFILE_MAX) {
782 // For H264, we need to feed HW one frame at a time. This is going to take
783 // some parsing of our input stream.
784 decoder_h264_parser_->SetStream(data, size);
785 content::H264NALU nalu;
786 content::H264Parser::Result result;
787
788 // Find the first NAL.
789 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
790 if (result == content::H264Parser::kInvalidStream ||
791 result == content::H264Parser::kUnsupportedStream)
792 return false;
793 *endpos = (nalu.data + nalu.size) - data;
794 if (result == content::H264Parser::kEOStream)
795 return true;
796
797 // Keep on peeking the next NALs while they don't indicate a frame
798 // boundary.
799 for (;;) {
800 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
801 if (result == content::H264Parser::kInvalidStream ||
802 result == content::H264Parser::kUnsupportedStream)
803 return false;
804 if (result == content::H264Parser::kEOStream)
805 return true;
806 switch (nalu.nal_unit_type) {
807 case content::H264NALU::kNonIDRSlice:
808 case content::H264NALU::kIDRSlice:
809 // For these two, if the "first_mb_in_slice" field is zero, start a
810 // new frame and return. This field is Exp-Golomb coded starting on
811 // the eighth data bit of the NAL; a zero value is encoded with a
812 // leading '1' bit in the byte, which we can detect as the byte being
813 // (unsigned) greater than or equal to 0x80.
814 if (nalu.data[1] >= 0x80)
815 return true;
816 break;
817 case content::H264NALU::kSPS:
818 case content::H264NALU::kPPS:
819 case content::H264NALU::kEOSeq:
820 case content::H264NALU::kEOStream:
821 // These unconditionally signal a frame boundary.
822 return true;
823 default:
824 // For all others, keep going.
825 break;
826 }
827 *endpos = (nalu.data + nalu.size) - reinterpret_cast<const uint8*>(data);
828 }
829 NOTREACHED();
830 return false;
831 } else {
832 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
833 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
834 // For VP8, we can just dump the entire buffer. No fragmentation needed.
835 *endpos = size;
836 return true;
837 }
838 }
839
840 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
841 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
842
843 // If we're behind on tasks, schedule another one.
844 int buffers_to_decode = decoder_input_queue_.size();
845 if (decoder_current_bitstream_buffer_ != NULL)
846 buffers_to_decode++;
847 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
848 decoder_decode_buffer_tasks_scheduled_++;
849 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
850 &ExynosVideoDecodeAccelerator::DecodeBufferTask,
851 base::Unretained(this)));
852 }
853 }
854
855 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial(
856 const void* data, size_t size, size_t* endpos) {
857 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
858 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
859 DCHECK_NE(decoder_state_, kUninitialized);
860 DCHECK_NE(decoder_state_, kDecoding);
861 DCHECK(!device_poll_thread_.IsRunning());
862 // Initial decode. We haven't been able to get output stream format info yet.
863 // Get it, and start decoding.
864
865 // Copy in and send to HW.
866 if (!AppendToInputFrame(data, size) || !FlushInputFrame())
867 return false;
868
869 // Recycle buffers.
870 DequeueMfc();
871
872 // Check and see if we have format info yet.
873 struct v4l2_format format;
874 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
875 if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) {
876 if (errno == EINVAL) {
877 // We will get EINVAL if we haven't seen sufficient stream to decode the
878 // format. Return true and schedule the next buffer.
879 *endpos = size;
880 return true;
881 } else {
882 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT";
883 NOTIFY_ERROR(PLATFORM_FAILURE);
884 return false;
885 }
886 }
887
888 // Run this initialization only on first startup.
889 if (decoder_state_ == kInitialized) {
890 DVLOG(3) << "DecodeBufferInitial(): running one-time initialization";
891 // Success! Setup our parameters.
892 CHECK_EQ(format.fmt.pix_mp.num_planes, 2);
893 frame_buffer_size_.SetSize(
894 format.fmt.pix_mp.width, format.fmt.pix_mp.height);
895 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage;
896 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage;
897 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
898 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16);
899
900 // Create our other buffers.
901 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() ||
902 !CreateGscOutputBuffers())
903 return false;
904
905 // MFC expects to process the initial buffer once during stream init to
906 // configure stream parameters, but will not consume the steam data on that
907 // iteration. Subsequent iterations (including after reset) do not require
908 // the stream init step.
909 *endpos = 0;
910 } else {
911 *endpos = size;
912 }
913
914 // StartDevicePoll will raise the error if there is one.
915 if (!StartDevicePoll())
916 return false;
917
918 decoder_state_ = kDecoding;
919 ScheduleDecodeBufferTaskIfNeeded();
920 return true;
921 }
922
923 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue(
924 const void* data, size_t size, size_t* endpos) {
925 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
926 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
927 DCHECK_EQ(decoder_state_, kDecoding);
928
929 // We've already setup our output stream parameters, so just keep on truckin'.
930 *endpos = size;
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 Symmetry is nice and all, but this is just a pure
sheu 2013/01/11 10:41:42 Fine :-P
931 // Both of these calls will set kError state if they fail.
932 return (AppendToInputFrame(data, size) && FlushInputFrame());
933 }
934
935 bool ExynosVideoDecodeAccelerator::AppendToInputFrame(
936 const void* data, size_t size) {
937 DVLOG(3) << "AppendToInputFrame()";
938 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
939 DCHECK_NE(decoder_state_, kUninitialized);
940 DCHECK_NE(decoder_state_, kResetting);
941 DCHECK_NE(decoder_state_, kError);
942 // This routine can handle data == NULL and size == 0, which occurs when
943 // we queue an empty buffer for the purposes of flushing the pipe.
944
945 // Flush if we're too big
946 if (decoder_current_input_buffer_ != -1) {
947 MfcInputRecord& input_record =
948 mfc_input_buffer_map_[decoder_current_input_buffer_];
949 if (input_record.bytes_used + size > input_record.length) {
950 if (!FlushInputFrame())
951 return false;
952 decoder_current_input_buffer_ = -1;
953 }
954 }
955
956 // Try to get an available input buffer
957 if (decoder_current_input_buffer_ == -1) {
958 if (mfc_free_input_buffers_.empty()) {
959 // See if we can get more free buffers from HW
960 DequeueMfc();
961 if (mfc_free_input_buffers_.empty()) {
962 // Nope!
963 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
964 return false;
965 }
966 }
967 decoder_current_input_buffer_ = mfc_free_input_buffers_.back();
968 mfc_free_input_buffers_.pop_back();
969 MfcInputRecord& input_record =
970 mfc_input_buffer_map_[decoder_current_input_buffer_];
971 DCHECK_EQ(input_record.bytes_used, 0);
972 DCHECK_EQ(input_record.input_id, -1);
973 DCHECK(decoder_current_bitstream_buffer_ != NULL);
974 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
975 }
976
977 DCHECK_EQ(data == NULL, size == 0);
978 if (size == 0) {
979 // If we asked for an empty buffer, return now. We return only after
980 // getting the next input buffer, since we might actually want an empty
981 // input buffer for flushing purposes.
982 return true;
983 }
984
985 // Copy in to the buffer.
986 MfcInputRecord& input_record =
987 mfc_input_buffer_map_[decoder_current_input_buffer_];
988 if (size > input_record.length - input_record.bytes_used) {
989 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
990 NOTIFY_ERROR(UNREADABLE_INPUT);
991 return false;
992 }
993 memcpy((char*)input_record.address + input_record.bytes_used, data, size);
994 input_record.bytes_used += size;
995
996 return true;
997 }
998
999 bool ExynosVideoDecodeAccelerator::FlushInputFrame() {
1000 DVLOG(3) << "FlushInputFrame()";
1001 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1002 DCHECK_NE(decoder_state_, kUninitialized);
1003 DCHECK_NE(decoder_state_, kResetting);
1004 DCHECK_NE(decoder_state_, kError);
1005
1006 if (decoder_current_input_buffer_ == -1)
1007 return true;
1008
1009 MfcInputRecord& input_record =
1010 mfc_input_buffer_map_[decoder_current_input_buffer_];
1011 // If input_id >= 0, this input buffer was promped by a bitstream buffer we
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 typo: promped
sheu 2013/01/11 10:41:42 Done.
1012 // got from the client. We can skip it if it is empty.
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 I don't understand this comment and the following
sheu 2013/01/11 10:41:42 I reworded and reversed the order of the condition
1013 if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
1014 input_record.input_id = -1;
1015 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_);
1016 decoder_current_input_buffer_ = -1;
1017 return true;
1018 }
1019
1020 // Queue it to MFC.
1021 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_);
1022 decoder_current_input_buffer_ = -1;
1023 DVLOG(3) << "FlushInputFrame(): submitting input_id="
1024 << input_record.input_id;
1025 // Kick the MFC once since there's new available input for it.
1026 EnqueueMfc();
1027
1028 return (decoder_state_ != kError);
1029 }
1030
1031 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask(
1032 scoped_ptr<PictureBufferArrayRef> pic_buffers) {
1033 DVLOG(3) << "AssignPictureBuffersTask()";
1034 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1035 DCHECK_NE(decoder_state_, kUninitialized);
1036 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask");
1037
1038 // We run AssignPictureBuffersTask even if we're in kResetting.
1039 if (decoder_state_ == kError) {
1040 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state";
1041 return;
1042 }
1043
1044 DCHECK_EQ(pic_buffers->egl_images_count,
1045 static_cast<int>(gsc_output_buffer_map_.size()));
1046 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1047 // We should be blank right now.
1048 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
1049 DCHECK_EQ(output_record.fd, -1);
1050 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1051 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1052 DCHECK_EQ(output_record.picture_id, -1);
1053 output_record.fd = pic_buffers->egl_image_fds[i];
1054 output_record.egl_image = pic_buffers->egl_images[i];
1055 output_record.picture_id = pic_buffers->client_ids[i];
1056
1057 // Take ownership of the EGLImage and fd.
1058 pic_buffers->egl_images[i] = EGL_NO_IMAGE_KHR;
1059 pic_buffers->egl_image_fds[i] = -1;
1060 // And add this buffer to the free list.
1061 gsc_free_output_buffers_.push_back(i);
1062 }
1063
1064 // We got buffers! Kick the GSC.
1065 EnqueueGsc();
1066 }
1067
1068 void ExynosVideoDecodeAccelerator::ServiceDeviceTask() {
1069 DVLOG(3) << "ServiceDeviceTask()";
1070 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1071 DCHECK_NE(decoder_state_, kUninitialized);
1072 DCHECK_NE(decoder_state_, kInitialized);
1073 DCHECK_NE(decoder_state_, kAfterReset);
1074 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask");
1075
1076 if (decoder_state_ == kResetting) {
1077 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
1078 return;
1079 } else if (decoder_state_ == kError) {
1080 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
1081 return;
1082 }
1083
1084 DequeueMfc();
1085 DequeueGsc();
1086 EnqueueMfc();
1087 EnqueueGsc();
1088
1089 // Clear the interrupt fd.
1090 if (!ClearDevicePollInterrupt())
1091 return;
1092
1093 unsigned int poll_fds = 0;
1094 // Add MFC fd, if we should poll on it.
1095 // MFC can be polled as soon as either input or output buffers are queued.
1096 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0)
1097 poll_fds |= kPollMfc;
1098 // Add GSC fd, if we should poll on it.
1099 // GSC has to wait until both input and output buffers are queued.
1100 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0)
1101 poll_fds |= kPollGsc;
1102
1103 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
1104 // so either:
1105 // * device_poll_thread_ is running normally
1106 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
1107 // shut it down, in which case we're either in kResetting or kError states
1108 // respectively, and we should have early-outed already.
1109 DCHECK(device_poll_thread_.message_loop());
1110 // Queue the DevicePollTask() now.
1111 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1112 &ExynosVideoDecodeAccelerator::DevicePollTask,
1113 base::Unretained(this),
1114 poll_fds));
1115
1116 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
1117 << decoder_input_queue_.size() << "->"
1118 << mfc_input_ready_queue_.size() << "] => MFC["
1119 << mfc_free_input_buffers_.size() << "+"
1120 << mfc_input_buffer_queued_count_ << "/"
1121 << mfc_input_buffer_count_ << "->"
1122 << mfc_free_output_buffers_.size() << "+"
1123 << mfc_output_buffer_queued_count_ << "/"
1124 << mfc_output_buffer_count_ << "] => "
1125 << mfc_output_gsc_input_queue_.size() << " => GSC["
1126 << gsc_free_input_buffers_.size() << "+"
1127 << gsc_input_buffer_queued_count_ << "/"
1128 << gsc_input_buffer_count_ << "->"
1129 << gsc_free_output_buffers_.size() << "+"
1130 << gsc_output_buffer_queued_count_ << "/"
1131 << gsc_output_buffer_count_ << "] => VDA["
1132 << decoder_frames_at_client_ << "]";
1133
1134 ScheduleDecodeBufferTaskIfNeeded();
1135 }
1136
1137 void ExynosVideoDecodeAccelerator::EnqueueMfc() {
1138 DVLOG(3) << "EnqueueMfc()";
1139 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1140 DCHECK_NE(decoder_state_, kUninitialized);
1141 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc");
1142
1143 // Drain the pipe of completed decode buffers.
1144 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_;
1145 while (!mfc_input_ready_queue_.empty()) {
1146 if (!EnqueueMfcInputRecord())
1147 return;
1148 }
1149 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) {
1150 // We just started up a previously empty queue.
1151 // Queue state changed; signal interrupt.
1152 if (!SetDevicePollInterrupt())
1153 return;
1154 // Start VIDIOC_STREAMON if we haven't yet.
1155 if (!mfc_input_streamon_) {
1156 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1157 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1158 mfc_input_streamon_ = true;
1159 }
1160 }
1161
1162 // Enqueue all the MFC outputs we can.
1163 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_;
1164 while (!mfc_free_output_buffers_.empty()) {
1165 if (!EnqueueMfcOutputRecord())
1166 return;
1167 }
1168 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) {
1169 // We just started up a previously empty queue.
1170 // Queue state changed; signal interrupt.
1171 if (!SetDevicePollInterrupt())
1172 return;
1173 // Start VIDIOC_STREAMON if we haven't yet.
1174 if (!mfc_output_streamon_) {
1175 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1176 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type);
1177 mfc_output_streamon_ = true;
1178 }
1179 }
1180 }
1181
1182 void ExynosVideoDecodeAccelerator::DequeueMfc() {
1183 DVLOG(3) << "DequeueMfc()";
1184 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1185 DCHECK_NE(decoder_state_, kUninitialized);
1186 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc");
1187
1188 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free
1189 // list.
1190 struct v4l2_buffer dqbuf;
1191 struct v4l2_plane planes[2];
1192 while (mfc_input_buffer_queued_count_ > 0) {
1193 DCHECK(mfc_input_streamon_);
1194 memset(&dqbuf, 0, sizeof(dqbuf));
1195 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1196 dqbuf.memory = V4L2_MEMORY_MMAP;
1197 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1198 if (errno == EAGAIN) {
1199 // EAGAIN if we're just out of buffers to dequeue.
1200 break;
1201 }
1202 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1203 NOTIFY_ERROR(PLATFORM_FAILURE);
1204 return;
1205 }
1206 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index];
1207 DCHECK(input_record.at_device);
1208 mfc_free_input_buffers_.push_back(dqbuf.index);
1209 input_record.at_device = false;
1210 input_record.bytes_used = 0;
1211 input_record.input_id = -1;
1212 mfc_input_buffer_queued_count_--;
1213 }
1214
1215 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the
1216 // completed queue.
1217 while (mfc_output_buffer_queued_count_ > 0) {
1218 DCHECK(mfc_output_streamon_);
1219 memset(&dqbuf, 0, sizeof(dqbuf));
1220 memset(planes, 0, sizeof(planes));
1221 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1222 dqbuf.memory = V4L2_MEMORY_MMAP;
1223 dqbuf.m.planes = planes;
1224 dqbuf.length = 2;
1225 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1226 if (errno == EAGAIN) {
1227 // EAGAIN if we're just out of buffers to dequeue.
1228 break;
1229 }
1230 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF";
1231 NOTIFY_ERROR(PLATFORM_FAILURE);
1232 return;
1233 }
1234 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index];
1235 DCHECK(output_record.at_device);
1236 if (dqbuf.m.planes[0].bytesused + dqbuf.m.planes[1].bytesused == 0) {
1237 // This is an empty output buffer returned as part of a flush.
1238 mfc_free_output_buffers_.push_back(dqbuf.index);
1239 output_record.at_device = false;
1240 output_record.input_id = -1;
1241 output_record.bytes_used[0] = 0;
1242 output_record.bytes_used[1] = 0;
1243 } else {
1244 // This is an output buffer with contents to pass down the pipe.
1245 const long int input_id = dqbuf.timestamp.tv_sec;
1246 DCHECK(input_id >= 0);
1247 mfc_output_gsc_input_queue_.push_back(dqbuf.index);
1248 output_record.at_device = false;
1249 output_record.input_id = input_id;
1250 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused;
1251 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused;
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 I think you missed this comment: AFAICT l.1207-121
sheu 2013/01/11 10:41:42 Sounds good.
1252 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << input_id;
1253 // We don't count this output buffer dequeued yet, or add it to the free
1254 // list, as it has data GSC needs to process.
1255
1256 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe.
1257 SetDevicePollInterrupt();
1258 }
1259 mfc_output_buffer_queued_count_--;
1260 }
1261
1262 NotifyFlushDoneIfNeeded();
1263 }
1264
1265 void ExynosVideoDecodeAccelerator::EnqueueGsc() {
1266 DVLOG(3) << "EnqueueGsc()";
1267 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1268 DCHECK_NE(decoder_state_, kUninitialized);
1269 DCHECK_NE(decoder_state_, kInitialized);
1270 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc");
1271
1272 // Drain the pipe of completed MFC output buffers.
1273 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_;
1274 while (!mfc_output_gsc_input_queue_.empty()) {
1275 if (gsc_free_input_buffers_.empty())
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 I think you missed the comment: while (foo()) {
sheu 2013/01/11 10:41:42 Done.
1276 break;
1277 if (!EnqueueGscInputRecord())
1278 return;
1279 }
1280 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) {
1281 // We just started up a previously empty queue.
1282 // Queue state changed; signal interrupt.
1283 if (!SetDevicePollInterrupt())
1284 return;
1285 // Start VIDIOC_STREAMON if we haven't yet.
1286 if (!gsc_input_streamon_) {
1287 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1288 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1289 gsc_input_streamon_ = true;
1290 }
1291 }
1292
1293 // Enqueue a GSC output, only if we need one
1294 if (gsc_input_buffer_queued_count_ != 0 &&
1295 gsc_output_buffer_queued_count_ == 0 &&
1296 !gsc_free_output_buffers_.empty()) {
1297 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_;
1298 if (!EnqueueGscOutputRecord())
1299 return;
1300 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) {
1301 // We just started up a previously empty queue.
1302 // Queue state changed; signal interrupt.
1303 if (!SetDevicePollInterrupt())
1304 return;
1305 // Start VIDIOC_STREAMON if we haven't yet.
1306 if (!gsc_output_streamon_) {
1307 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1308 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type);
1309 gsc_output_streamon_ = true;
1310 }
1311 }
1312 }
1313 // Bug check: GSC is liable to race conditions if more than one buffer is
1314 // simultaneously queued.
1315 DCHECK_GE(1, gsc_output_buffer_queued_count_);
1316 }
1317
1318 void ExynosVideoDecodeAccelerator::DequeueGsc() {
1319 DVLOG(3) << "DequeueGsc()";
1320 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1321 DCHECK_NE(decoder_state_, kUninitialized);
1322 DCHECK_NE(decoder_state_, kInitialized);
1323 DCHECK_NE(decoder_state_, kAfterReset);
1324 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc");
1325
1326 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free
1327 // list. Also recycle the corresponding MFC output buffers at this time.
1328 struct v4l2_buffer dqbuf;
1329 while (gsc_input_buffer_queued_count_ > 0) {
1330 DCHECK(gsc_input_streamon_);
1331 memset(&dqbuf, 0, sizeof(dqbuf));
1332 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1333 dqbuf.memory = V4L2_MEMORY_DMABUF;
1334 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1335 if (errno == EAGAIN) {
1336 // EAGAIN if we're just out of buffers to dequeue.
1337 break;
1338 }
1339 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1340 NOTIFY_ERROR(PLATFORM_FAILURE);
1341 return;
1342 }
1343 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index];
1344 MfcOutputRecord& output_record =
1345 mfc_output_buffer_map_[input_record.mfc_output];
1346 DCHECK(input_record.at_device);
1347 gsc_free_input_buffers_.push_back(dqbuf.index);
1348 mfc_free_output_buffers_.push_back(input_record.mfc_output);
1349 input_record.at_device = false;
1350 input_record.mfc_output = -1;
1351 output_record.input_id = -1;
1352 gsc_input_buffer_queued_count_--;
1353 }
1354
1355 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to
1356 // the client. Don't recycle to its free list yet -- we can't do that until
1357 // ReusePictureBuffer() returns it to us.
1358 while (gsc_output_buffer_queued_count_ > 0) {
1359 DCHECK(gsc_output_streamon_);
1360 memset(&dqbuf, 0, sizeof(dqbuf));
1361 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1362 dqbuf.memory = V4L2_MEMORY_DMABUF;
1363 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) {
1364 if (errno == EAGAIN) {
1365 // EAGAIN if we're just out of buffers to dequeue.
1366 break;
1367 }
1368 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF";
1369 NOTIFY_ERROR(PLATFORM_FAILURE);
1370 return;
1371 }
1372 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index];
1373 DCHECK(output_record.at_device);
1374 DCHECK(!output_record.at_client);
1375 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1376 output_record.at_device = false;
1377 output_record.at_client = true;
1378 gsc_output_buffer_queued_count_--;
1379 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1380 &Client::PictureReady, client_, media::Picture(
1381 output_record.picture_id, dqbuf.timestamp.tv_sec)));
1382 decoder_frames_at_client_++;
1383 }
1384
1385 NotifyFlushDoneIfNeeded();
1386 }
1387
1388 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1389 if (!decoder_flushing_)
1390 return;
1391
1392 // Pipeline is empty when:
1393 // * There is no currently filling input buffer.
1394 // * MFC input holding queue is empty.
1395 // * All MFC input (VIDEO_OUTPUT) buffers are returned.
1396 // * MFC -> GSC holding queue is empty.
1397 // * All GSC input (VIDEO_OUTPUT) buffers are returned.
1398 if (decoder_current_input_buffer_ != -1)
1399 return;
1400 if ((mfc_input_ready_queue_.size() +
1401 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() +
1402 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0)
1403 return;
1404
1405 decoder_delay_bitstream_buffer_id_ = -1;
1406 decoder_flushing_ = false;
1407 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1408 &Client::NotifyFlushDone, client_));
1409
1410 // While we were flushing, we early-outed DecodeBufferTask()s.
1411 ScheduleDecodeBufferTaskIfNeeded();
1412 }
1413
1414 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() {
1415 DVLOG(3) << "EnqueueMfcInputRecord()";
1416 DCHECK(!mfc_input_ready_queue_.empty());
1417
1418 // Enqueue a MFC input (VIDEO_OUTPUT) buffer.
1419 const int buffer = mfc_input_ready_queue_.back();
1420 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer];
1421 DCHECK(!input_record.at_device);
1422 struct v4l2_buffer qbuf;
1423 struct v4l2_plane qbuf_plane;
1424 memset(&qbuf, 0, sizeof(qbuf));
1425 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1426 qbuf.index = buffer;
1427 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1428 qbuf.timestamp.tv_sec = input_record.input_id;
1429 qbuf.memory = V4L2_MEMORY_MMAP;
1430 qbuf.m.planes = &qbuf_plane;
1431 qbuf.m.planes[0].bytesused = input_record.bytes_used;
1432 qbuf.length = 1;
1433 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1434 mfc_input_ready_queue_.pop_back();
1435 input_record.at_device = true;
1436 mfc_input_buffer_queued_count_++;
1437 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id="
1438 << input_record.input_id;
1439 return true;
1440 }
1441
1442 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() {
1443 DVLOG(3) << "EnqueueMfcOutputRecord()";
1444 DCHECK(!mfc_free_output_buffers_.empty());
1445
1446 // Enqueue a MFC output (VIDEO_CAPTURE) buffer.
1447 const int buffer = mfc_free_output_buffers_.back();
1448 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer];
1449 DCHECK(!output_record.at_device);
1450 DCHECK_EQ(output_record.input_id, -1);
1451 struct v4l2_buffer qbuf;
1452 struct v4l2_plane qbuf_planes[2];
1453 memset(&qbuf, 0, sizeof(qbuf));
1454 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1455 qbuf.index = buffer;
1456 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1457 qbuf.memory = V4L2_MEMORY_MMAP;
1458 qbuf.m.planes = qbuf_planes;
1459 qbuf.length = 2;
1460 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf);
1461 mfc_free_output_buffers_.pop_back();
1462 output_record.at_device = true;
1463 mfc_output_buffer_queued_count_++;
1464 return true;
1465 }
1466
1467 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() {
1468 DVLOG(3) << "EnqueueGscInputRecord()";
1469 DCHECK(!gsc_free_input_buffers_.empty());
1470
1471 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output
1472 // (VIDEO_CAPTURE) buffer.
1473 const int mfc_buffer = mfc_output_gsc_input_queue_.front();
1474 const int gsc_buffer = gsc_free_input_buffers_.back();
1475 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer];
1476 DCHECK(!output_record.at_device);
1477 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer];
1478 DCHECK(!input_record.at_device);
1479 DCHECK_EQ(input_record.mfc_output, -1);
1480 struct v4l2_buffer qbuf;
1481 struct v4l2_plane qbuf_planes[2];
1482 memset(&qbuf, 0, sizeof(qbuf));
1483 memset(qbuf_planes, 0, sizeof(qbuf_planes));
1484 qbuf.index = gsc_buffer;
1485 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1486 qbuf.timestamp.tv_sec = output_record.input_id;
1487 qbuf.memory = V4L2_MEMORY_USERPTR;
1488 qbuf.m.planes = qbuf_planes;
1489 qbuf.m.planes[0].bytesused = output_record.bytes_used[0];
1490 qbuf.m.planes[0].length = mfc_output_buffer_size_[0];
1491 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0];
1492 qbuf.m.planes[1].bytesused = output_record.bytes_used[1];
1493 qbuf.m.planes[1].length = mfc_output_buffer_size_[1];
1494 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1];
1495 qbuf.length = 2;
1496 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1497 mfc_output_gsc_input_queue_.pop_front();
1498 gsc_free_input_buffers_.pop_back();
1499 input_record.at_device = true;
1500 input_record.mfc_output = mfc_buffer;
1501 output_record.bytes_used[0] = 0;
1502 output_record.bytes_used[1] = 0;
1503 gsc_input_buffer_queued_count_++;
1504 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id="
1505 << output_record.input_id;
1506 return true;
1507 }
1508
1509 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() {
1510 DVLOG(3) << "EnqueueGscOutputRecord()";
1511 DCHECK(!gsc_free_output_buffers_.empty());
1512
1513 // Enqueue a GSC output (VIDEO_CAPTURE) buffer.
1514 const int buffer = gsc_free_output_buffers_.front();
1515 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer];
1516 DCHECK(!output_record.at_device);
1517 DCHECK(!output_record.at_client);
1518 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1519 TRACE_EVENT0(
1520 "Video Decoder",
1521 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR");
1522 // If we have to wait for completion, wait. Note that
1523 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the
1524 // buffer that has been in the queue the longest.
1525 egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0,
1526 EGL_FOREVER_KHR);
1527 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
1528 output_record.egl_sync = EGL_NO_SYNC_KHR;
1529 }
1530 struct v4l2_buffer qbuf;
1531 struct v4l2_plane qbuf_plane;
1532 memset(&qbuf, 0, sizeof(qbuf));
1533 memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1534 qbuf.index = buffer;
1535 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1536 qbuf.memory = V4L2_MEMORY_DMABUF;
1537 qbuf.m.planes = &qbuf_plane;
1538 qbuf.m.planes[0].m.fd = output_record.fd;
1539 qbuf.length = 1;
1540 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf);
1541 gsc_free_output_buffers_.pop_front();
1542 output_record.at_device = true;
1543 gsc_output_buffer_queued_count_++;
1544 return true;
1545 }
1546
1547 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask(
1548 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1549 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1550 << picture_buffer_id;
1551 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1552 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask");
1553
1554 // We run ReusePictureBufferTask even if we're in kResetting.
1555 if (decoder_state_ == kError) {
1556 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1557 return;
1558 }
1559
1560 size_t index;
1561 for (index = 0; index < gsc_output_buffer_map_.size(); ++index)
1562 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id)
1563 break;
1564
1565 if (index >= gsc_output_buffer_map_.size()) {
1566 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found";
1567 NOTIFY_ERROR(INVALID_ARGUMENT);
1568 return;
1569 }
1570
1571 GscOutputRecord& output_record = gsc_output_buffer_map_[index];
1572 if (output_record.at_device || !output_record.at_client) {
1573 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1574 NOTIFY_ERROR(INVALID_ARGUMENT);
1575 return;
1576 }
1577
1578 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1579 output_record.at_client = false;
1580 output_record.egl_sync = egl_sync_ref->egl_sync;
1581 gsc_free_output_buffers_.push_back(index);
1582 decoder_frames_at_client_--;
1583 // Take ownership of the EGLSync.
1584 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1585 // We got a buffer back, so kick the GSC.
1586 EnqueueGsc();
1587 }
1588
1589 void ExynosVideoDecodeAccelerator::FlushTask() {
1590 DVLOG(3) << "FlushTask()";
1591 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1592 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask");
1593
1594 // Flush outstanding buffers.
1595 if (decoder_state_ == kResetting || decoder_state_ == kInitialized ||
1596 decoder_state_ == kAfterReset) {
1597 // Nothing in the pipe; return done immediately.
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 In kResetting how do you know there's "nothing in
sheu 2013/01/11 10:41:42 If we're resetting, ResetTask() has already wiped
1598 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1599 &Client::NotifyFlushDone, client_));
1600 return;
1601 } else if (decoder_state_ == kError) {
1602 DVLOG(2) << "FlushTask(): early out: kError state";
1603 return;
1604 }
1605
1606 // We don't support stacked flushing.
1607 DCHECK(!decoder_flushing_);
1608
1609 // Queue up an empty buffer -- this triggers the flush.
1610 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>(
1611 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0,
1612 kFlushBufferId)));
1613 decoder_flushing_ = true;
1614
1615 ScheduleDecodeBufferTaskIfNeeded();
1616 }
1617
1618 void ExynosVideoDecodeAccelerator::ResetTask() {
1619 DVLOG(3) << "ResetTask()";
1620 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1621 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask");
1622
1623 if (decoder_state_ == kError) {
1624 DVLOG(2) << "ResetTask(): early out: kError state";
1625 return;
1626 }
1627
1628 // We stop streaming, but we _don't_ destroy our buffers.
1629 if (!StopDevicePoll())
1630 return;
1631
1632 decoder_current_bitstream_buffer_.reset();
1633 decoder_input_queue_.clear();
1634
1635 decoder_current_input_buffer_ = -1;
1636 decoder_decode_buffer_tasks_scheduled_ = 0;
1637
1638 // If we were flushing, we'll never return any more BitstreamBuffers or
1639 // PictureBuffers; they have all been dropped and returned by now.
1640 NotifyFlushDoneIfNeeded();
1641
1642 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening
1643 // jobs will early-out in the kResetting state.
1644 decoder_state_ = kResetting;
1645 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1646 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1647 }
1648
1649 void ExynosVideoDecodeAccelerator::ResetDoneTask() {
1650 DVLOG(3) << "ResetDoneTask()";
1651 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1652 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask");
1653
1654 if (decoder_state_ == kError) {
1655 DVLOG(2) << "ResetDoneTask(): early out: kError state";
1656 return;
1657 }
1658
1659 // Reset format-specific bits.
1660 if (video_profile_ >= media::H264PROFILE_MIN &&
1661 video_profile_ <= media::H264PROFILE_MAX) {
1662 decoder_h264_parser_.reset(new content::H264Parser());
1663 }
1664
1665 // Jobs drained, we're finished resetting.
1666 DCHECK_EQ(decoder_state_, kResetting);
1667 decoder_state_ = kAfterReset;
1668 decoder_delay_bitstream_buffer_id_ = -1;
1669 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1670 &Client::NotifyResetDone, client_));
1671
1672 // While we were resetting, we early-outed DecodeBufferTask()s.
1673 ScheduleDecodeBufferTaskIfNeeded();
1674 }
1675
1676 void ExynosVideoDecodeAccelerator::DestroyTask() {
1677 DVLOG(3) << "DestroyTask()";
1678 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask");
1679
1680 // DestroyTask() should run regardless of decoder_state_.
1681
1682 // Stop streaming and the device_poll_thread_.
1683 StopDevicePoll();
1684
1685 decoder_current_bitstream_buffer_.reset();
1686 decoder_current_input_buffer_ = -1;
1687 decoder_decode_buffer_tasks_scheduled_ = 0;
1688 decoder_frames_at_client_ = 0;
1689 decoder_input_queue_.clear();
1690 decoder_flushing_ = false;
1691
1692 // Set our state to kError. Just in case.
1693 decoder_state_ = kError;
1694 }
1695
1696 bool ExynosVideoDecodeAccelerator::StartDevicePoll() {
1697 DVLOG(3) << "StartDevicePoll()";
1698 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1699 DCHECK(!device_poll_thread_.IsRunning());
1700
1701 // Start up the device poll thread and schedule its first DevicePollTask().
1702 if (!device_poll_thread_.Start()) {
1703 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1704 NOTIFY_ERROR(PLATFORM_FAILURE);
1705 return false;
1706 }
1707 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1708 &ExynosVideoDecodeAccelerator::DevicePollTask,
1709 base::Unretained(this),
1710 0));
1711
1712 return true;
1713 }
1714
1715 bool ExynosVideoDecodeAccelerator::StopDevicePoll() {
1716 DVLOG(3) << "StopDevicePoll()";
1717 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1718
1719 // Signal the DevicePollTask() to stop, and stop the device poll thread.
1720 if (!SetDevicePollInterrupt())
1721 return false;
1722 device_poll_thread_.Stop();
1723 // Clear the interrupt now, to be sure.
1724 if (!ClearDevicePollInterrupt())
1725 return false;
1726
1727 // Stop streaming.
1728 if (mfc_input_streamon_) {
1729 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1730 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1731 }
1732 mfc_input_streamon_ = false;
1733 if (mfc_output_streamon_) {
1734 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1735 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type);
1736 }
1737 mfc_output_streamon_ = false;
1738 if (gsc_input_streamon_) {
1739 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1740 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1741 }
1742 gsc_input_streamon_ = false;
1743 if (gsc_output_streamon_) {
1744 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1745 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type);
1746 }
1747 gsc_output_streamon_ = false;
1748
1749 // Reset all our accounting info.
1750 mfc_input_ready_queue_.clear();
1751 mfc_free_input_buffers_.clear();
1752 DCHECK_EQ(mfc_input_buffer_count_,
1753 static_cast<int>(mfc_input_buffer_map_.size()));
1754 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
1755 mfc_free_input_buffers_.push_back(i);
1756 mfc_input_buffer_map_[i].at_device = false;
1757 mfc_input_buffer_map_[i].bytes_used = 0;
1758 mfc_input_buffer_map_[i].input_id = -1;
1759 }
1760 mfc_input_buffer_queued_count_ = 0;
1761 mfc_free_output_buffers_.clear();
1762 DCHECK_EQ(mfc_output_buffer_count_,
1763 static_cast<int>(mfc_output_buffer_map_.size()));
1764 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
1765 mfc_free_output_buffers_.push_back(i);
1766 mfc_output_buffer_map_[i].at_device = false;
1767 mfc_output_buffer_map_[i].input_id = -1;
1768 }
1769 mfc_output_buffer_queued_count_ = 0;
1770 mfc_output_gsc_input_queue_.clear();
1771 gsc_free_input_buffers_.clear();
1772 DCHECK_EQ(gsc_input_buffer_count_,
1773 static_cast<int>(gsc_input_buffer_map_.size()));
1774 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) {
1775 gsc_free_input_buffers_.push_back(i);
1776 gsc_input_buffer_map_[i].at_device = false;
1777 gsc_input_buffer_map_[i].mfc_output = -1;
1778 }
1779 gsc_input_buffer_queued_count_ = 0;
1780 gsc_free_output_buffers_.clear();
1781 DCHECK_EQ(gsc_output_buffer_count_,
1782 static_cast<int>(gsc_output_buffer_map_.size()));
1783 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) {
1784 // Only mark those free that aren't being held by the VDA.
1785 if (!gsc_output_buffer_map_[i].at_client) {
1786 gsc_free_output_buffers_.push_back(i);
1787 gsc_output_buffer_map_[i].at_device = false;
1788 }
1789 }
1790 gsc_output_buffer_queued_count_ = 0;
1791
1792 DVLOG(3) << "StopDevicePoll(): device poll stopped";
1793 return true;
1794 }
1795
1796 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() {
1797 DVLOG(3) << "SetDevicePollInterrupt()";
1798 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1799
1800 ssize_t ret;
1801 const uint64 buf = 1;
1802 ret = write(device_poll_interrupt_fd_, &buf, sizeof(buf));
1803 if (ret == -1) {
1804 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed";
1805 NOTIFY_ERROR(PLATFORM_FAILURE);
1806 return false;
1807 }
1808 return true;
1809 }
1810
1811 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() {
1812 DVLOG(3) << "ClearDevicePollInterrupt()";
1813 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current());
1814
1815 int ret;
1816 uint64 buf;
1817 ret = read(device_poll_interrupt_fd_, &buf, sizeof(buf));
Ami GONE FROM CHROMIUM 2013/01/11 00:46:33 buf is "used" before ret (param is eval'd pre-disp
sheu 2013/01/11 10:41:42 Alrighty then, fixed here and above.
1818 if (ret == -1) {
1819 if (errno == EAGAIN) {
1820 // No interrupt flag set, and we're reading nonblocking. Not an error.
1821 return true;
1822 } else {
1823 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed";
1824 NOTIFY_ERROR(PLATFORM_FAILURE);
1825 return false;
1826 }
1827 }
1828 return true;
1829 }
1830
1831 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) {
1832 DVLOG(3) << "DevicePollTask()";
1833 DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current());
1834 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask");
1835
1836 // This routine just polls the set of device fds, and schedules a
1837 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur.
1838 // Other threads may notify this task to return early by writing to
1839 // device_poll_interrupt_fd_.
1840 struct pollfd pollfds[3];
1841 nfds_t nfds;
1842
1843 // Add device_poll_interrupt_fd_;
1844 pollfds[0].fd = device_poll_interrupt_fd_;
1845 pollfds[0].events = POLLIN | POLLERR;
1846 nfds = 1;
1847
1848 if (poll_fds & kPollMfc) {
1849 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set";
1850 pollfds[nfds].fd = mfc_fd_;
1851 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1852 nfds++;
1853 }
1854 // Add GSC fd, if we should poll on it.
1855 // GSC has to wait until both input and output buffers are queued.
1856 if (poll_fds & kPollGsc) {
1857 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set";
1858 pollfds[nfds].fd = gsc_fd_;
1859 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR;
1860 nfds++;
1861 }
1862
1863 // Poll it!
1864 int ret;
1865 do {
1866 ret = poll(pollfds, nfds, -1);
1867 } while (ret < 1 && errno == EINTR);
1868 if (ret == -1) {
1869 DPLOG(ERROR) << "DevicePollTask(): poll() failed";
1870 NOTIFY_ERROR(PLATFORM_FAILURE);
1871 return;
1872 }
1873
1874 // All processing should happen on ServiceDeviceTask(), since we shouldn't
1875 // touch decoder state from this thread.
1876 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1877 &ExynosVideoDecodeAccelerator::ServiceDeviceTask,
1878 base::Unretained(this)));
1879 }
1880
1881 void ExynosVideoDecodeAccelerator::NotifyError(Error error) {
1882 DVLOG(2) << "NotifyError()";
1883
1884 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1885 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1886 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error));
1887 return;
1888 }
1889
1890 if (client_) {
1891 client_->NotifyError(error);
1892 client_ptr_factory_.InvalidateWeakPtrs();
1893 }
1894 }
1895
1896 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) {
1897 DVLOG(3) << "SetDecoderState(): state=%d" << state;
1898
1899 // We can touch decoder_state_ only if this is the decoder thread or the
1900 // decoder thread isn't running.
1901 if (decoder_thread_.message_loop() != NULL &&
1902 decoder_thread_.message_loop() != MessageLoop::current()) {
1903 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1904 &ExynosVideoDecodeAccelerator::SetDecoderState,
1905 base::Unretained(this), state));
1906 } else {
1907 decoder_state_ = state;
1908 }
1909 }
1910
1911 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() {
1912 DVLOG(3) << "CreateMfcInputBuffers()";
1913 // We always run this as we prepare to initialize.
1914 DCHECK_EQ(decoder_state_, kUninitialized);
1915 DCHECK(!mfc_input_streamon_);
1916 DCHECK_EQ(mfc_input_buffer_count_, 0);
1917
1918 __u32 pixelformat = 0;
1919 if (video_profile_ >= media::H264PROFILE_MIN &&
1920 video_profile_ <= media::H264PROFILE_MAX) {
1921 pixelformat = V4L2_PIX_FMT_H264;
1922 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
1923 video_profile_ <= media::VP8PROFILE_MAX) {
1924 pixelformat = V4L2_PIX_FMT_VP8;
1925 } else {
1926 NOTREACHED();
1927 }
1928
1929 struct v4l2_format format;
1930 memset(&format, 0, sizeof(format));
1931 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1932 format.fmt.pix_mp.pixelformat = pixelformat;
1933 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize;
1934 format.fmt.pix_mp.num_planes = 1;
1935 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format);
1936
1937 struct v4l2_requestbuffers reqbufs;
1938 memset(&reqbufs, 0, sizeof(reqbufs));
1939 reqbufs.count = kMfcInputBufferCount;
1940 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1941 reqbufs.memory = V4L2_MEMORY_MMAP;
1942 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
1943 mfc_input_buffer_count_ = reqbufs.count;
1944 mfc_input_buffer_map_.resize(mfc_input_buffer_count_);
1945 for (int i = 0; i < mfc_input_buffer_count_; ++i) {
1946 mfc_free_input_buffers_.push_back(i);
1947
1948 // Query for the MEMORY_MMAP pointer.
1949 struct v4l2_plane planes[1];
1950 struct v4l2_buffer buffer;
1951 memset(&buffer, 0, sizeof(buffer));
1952 memset(planes, 0, sizeof(planes));
1953 buffer.index = i;
1954 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1955 buffer.memory = V4L2_MEMORY_MMAP;
1956 buffer.m.planes = planes;
1957 buffer.length = 1;
1958 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
1959 void* address = mmap(NULL, buffer.m.planes[0].length,
1960 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
1961 buffer.m.planes[0].m.mem_offset);
1962 if (address == MAP_FAILED) {
1963 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
1964 return false;
1965 }
1966 mfc_input_buffer_map_[i].address = address;
1967 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length;
1968 }
1969
1970 return true;
1971 }
1972
1973 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() {
1974 DVLOG(3) << "CreateMfcOutputBuffers()";
1975 DCHECK_EQ(decoder_state_, kInitialized);
1976 DCHECK(!mfc_output_streamon_);
1977 DCHECK_EQ(mfc_output_buffer_count_, 0);
1978
1979 // Number of MFC output buffers we need.
1980 struct v4l2_control ctrl;
1981 memset(&ctrl, 0, sizeof(ctrl));
1982 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1983 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl);
1984
1985 // Output format setup in Initialize().
1986
1987 // Allocate the output buffers.
1988 struct v4l2_requestbuffers reqbufs;
1989 memset(&reqbufs, 0, sizeof(reqbufs));
1990 reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount;
1991 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1992 reqbufs.memory = V4L2_MEMORY_MMAP;
1993 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs);
1994
1995 // Fill our free-buffers list, and create DMABUFs from them.
1996 mfc_output_buffer_count_ = reqbufs.count;
1997 mfc_output_buffer_map_.resize(mfc_output_buffer_count_);
1998 for (int i = 0; i < mfc_output_buffer_count_; ++i) {
1999 mfc_free_output_buffers_.push_back(i);
2000
2001 // Query for the MEMORY_MMAP pointer.
2002 struct v4l2_plane planes[2];
2003 struct v4l2_buffer buffer;
2004 memset(&buffer, 0, sizeof(buffer));
2005 memset(planes, 0, sizeof(planes));
2006 buffer.index = i;
2007 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2008 buffer.memory = V4L2_MEMORY_MMAP;
2009 buffer.m.planes = planes;
2010 buffer.length = 2;
2011 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer);
2012
2013 // Get their user memory for GSC input.
2014 for (int j = 0; j < 2; ++j) {
2015 void* address = mmap(NULL, buffer.m.planes[j].length,
2016 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_,
2017 buffer.m.planes[j].m.mem_offset);
2018 if (address == MAP_FAILED) {
2019 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed";
2020 return false;
2021 }
2022 mfc_output_buffer_map_[i].address[j] = address;
2023 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length;
2024 }
2025 }
2026
2027 return true;
2028 }
2029
2030 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() {
2031 DVLOG(3) << "CreateGscInputBuffers()";
2032 DCHECK_EQ(decoder_state_, kInitialized);
2033 DCHECK(!gsc_input_streamon_);
2034 DCHECK_EQ(gsc_input_buffer_count_, 0);
2035
2036 struct v4l2_format format;
2037 memset(&format, 0, sizeof(format));
2038 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2039 format.fmt.pix_mp.width = frame_buffer_size_.width();
2040 format.fmt.pix_mp.height = frame_buffer_size_.height();
2041 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_;
2042 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0];
2043 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1];
2044 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much
2045 // sense. Convention seems to be to assume 8bpp for these tiled formats.
2046 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width();
2047 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width();
2048 format.fmt.pix_mp.num_planes = 2;
2049 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2050
2051 struct v4l2_control control;
2052 memset(&control, 0, sizeof(control));
2053 control.id = V4L2_CID_ROTATE;
2054 control.value = 0;
2055 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2056
2057 memset(&control, 0, sizeof(control));
2058 control.id = V4L2_CID_HFLIP;
2059 control.value = 0;
2060 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2061
2062 memset(&control, 0, sizeof(control));
2063 control.id = V4L2_CID_VFLIP;
2064 control.value = 0;
2065 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2066
2067 memset(&control, 0, sizeof(control));
2068 control.id = V4L2_CID_GLOBAL_ALPHA;
2069 control.value = 255;
2070 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control);
2071
2072 struct v4l2_requestbuffers reqbufs;
2073 memset(&reqbufs, 0, sizeof(reqbufs));
2074 reqbufs.count = kGscInputBufferCount;
2075 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2076 reqbufs.memory = V4L2_MEMORY_USERPTR;
2077 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2078
2079 gsc_input_buffer_count_ = reqbufs.count;
2080 gsc_input_buffer_map_.resize(gsc_input_buffer_count_);
2081 for (int i = 0; i < gsc_input_buffer_count_; ++i) {
2082 gsc_free_input_buffers_.push_back(i);
2083 gsc_input_buffer_map_[i].mfc_output = -1;
2084 }
2085
2086 return true;
2087 }
2088
2089 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() {
2090 DVLOG(3) << "CreateGscOutputBuffers()";
2091 DCHECK_EQ(decoder_state_, kInitialized);
2092 DCHECK(!gsc_output_streamon_);
2093 DCHECK_EQ(gsc_output_buffer_count_, 0);
2094
2095 // GSC outputs into the EGLImages we create from the textures we are
2096 // assigned. Assume RGBA8888 format.
2097 struct v4l2_format format;
2098 memset(&format, 0, sizeof(format));
2099 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2100 format.fmt.pix_mp.width = frame_buffer_size_.width();
2101 format.fmt.pix_mp.height = frame_buffer_size_.height();
2102 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32;
2103 format.fmt.pix_mp.plane_fmt[0].sizeimage =
2104 frame_buffer_size_.width() * frame_buffer_size_.height() * 4;
2105 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4;
2106 format.fmt.pix_mp.num_planes = 1;
2107 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format);
2108
2109 struct v4l2_requestbuffers reqbufs;
2110 memset(&reqbufs, 0, sizeof(reqbufs));
2111 reqbufs.count = kGscOutputBufferCount;
2112 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2113 reqbufs.memory = V4L2_MEMORY_DMABUF;
2114 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs);
2115
2116 // We don't actually fill in the freelist or the map here. That happens once
2117 // we have actual usable buffers, after AssignPictureBuffers();
2118 gsc_output_buffer_count_ = reqbufs.count;
2119 gsc_output_buffer_map_.resize(gsc_output_buffer_count_);
2120
2121 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): "
2122 << "buffer_count=" << gsc_output_buffer_count_
2123 << ", width=" << frame_buffer_size_.width()
2124 << ", height=" << frame_buffer_size_.height();
2125 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
2126 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_,
2127 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()),
2128 GL_TEXTURE_2D));
2129
2130 return true;
2131 }
2132
2133 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() {
2134 DVLOG(3) << "DestroyMfcInputBuffers()";
2135 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2136 DCHECK(!mfc_input_streamon_);
2137
2138 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) {
2139 if (mfc_input_buffer_map_[i].address != NULL) {
2140 munmap(mfc_input_buffer_map_[i].address,
2141 mfc_input_buffer_map_[i].length);
2142 }
2143 }
2144
2145 struct v4l2_requestbuffers reqbufs;
2146 memset(&reqbufs, 0, sizeof(reqbufs));
2147 reqbufs.count = 0;
2148 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2149 reqbufs.memory = V4L2_MEMORY_MMAP;
2150 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2151 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2152
2153 mfc_input_buffer_map_.clear();
2154 mfc_free_input_buffers_.clear();
2155 mfc_input_buffer_count_ = 0;
2156 }
2157
2158 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() {
2159 DVLOG(3) << "DestroyMfcOutputBuffers()";
2160 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2161 DCHECK(!mfc_output_streamon_);
2162
2163 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) {
2164 if (mfc_output_buffer_map_[i].address[0] != NULL)
2165 munmap(mfc_output_buffer_map_[i].address[0],
2166 mfc_output_buffer_map_[i].length[0]);
2167 if (mfc_output_buffer_map_[i].address[1] != NULL)
2168 munmap(mfc_output_buffer_map_[i].address[1],
2169 mfc_output_buffer_map_[i].length[1]);
2170 }
2171
2172 struct v4l2_requestbuffers reqbufs;
2173 memset(&reqbufs, 0, sizeof(reqbufs));
2174 reqbufs.count = 0;
2175 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2176 reqbufs.memory = V4L2_MEMORY_MMAP;
2177 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2178 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
2179
2180 mfc_output_buffer_map_.clear();
2181 mfc_free_output_buffers_.clear();
2182 mfc_output_buffer_count_ = 0;
2183 }
2184
2185 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() {
2186 DVLOG(3) << "DestroyGscInputBuffers()";
2187 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2188 DCHECK(!gsc_input_streamon_);
2189
2190 struct v4l2_requestbuffers reqbufs;
2191 memset(&reqbufs, 0, sizeof(reqbufs));
2192 reqbufs.count = 0;
2193 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2194 reqbufs.memory = V4L2_MEMORY_DMABUF;
2195 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2196 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2197
2198 gsc_input_buffer_map_.clear();
2199 gsc_free_input_buffers_.clear();
2200 gsc_input_buffer_count_ = 0;
2201 }
2202
2203 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() {
2204 DVLOG(3) << "DestroyGscOutputBuffers()";
2205 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
2206 DCHECK(!gsc_output_streamon_);
2207
2208 if (gsc_output_buffer_map_.size() != 0) {
2209 if (!make_context_current_.Run())
2210 DLOG(ERROR) << "DestroyGscOutputBuffers(): "
2211 << "could not make context current";
2212
2213 size_t i = 0;
2214 do {
2215 GscOutputRecord& output_record = gsc_output_buffer_map_[i];
2216 if (output_record.fd != -1)
2217 close(output_record.fd);
2218 if (output_record.egl_image != EGL_NO_IMAGE_KHR)
2219 egl_destroy_image_khr(egl_display_, output_record.egl_image);
2220 if (output_record.egl_sync != EGL_NO_SYNC_KHR)
2221 egl_destroy_sync_khr(egl_display_, output_record.egl_sync);
2222 if (client_)
2223 client_->DismissPictureBuffer(output_record.picture_id);
2224 ++i;
2225 } while (i < gsc_output_buffer_map_.size());
2226 }
2227
2228 struct v4l2_requestbuffers reqbufs;
2229 memset(&reqbufs, 0, sizeof(reqbufs));
2230 reqbufs.count = 0;
2231 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2232 reqbufs.memory = V4L2_MEMORY_DMABUF;
2233 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0)
2234 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS";
2235
2236 gsc_output_buffer_map_.clear();
2237 gsc_free_output_buffers_.clear();
2238 gsc_output_buffer_count_ = 0;
2239 }
2240
2241 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698