OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include <dlfcn.h> |
| 6 #include <errno.h> |
| 7 #include <fcntl.h> |
| 8 #include <linux/videodev2.h> |
| 9 #include <poll.h> |
| 10 #include <sys/eventfd.h> |
| 11 #include <sys/ioctl.h> |
| 12 #include <sys/mman.h> |
| 13 |
| 14 #include "base/bind.h" |
| 15 #include "base/debug/trace_event.h" |
| 16 #include "base/message_loop.h" |
| 17 #include "base/message_loop_proxy.h" |
| 18 #include "base/posix/eintr_wrapper.h" |
| 19 #include "base/shared_memory.h" |
| 20 #include "content/common/gpu/gl_scoped_binders.h" |
| 21 #include "content/common/gpu/media/exynos_video_decode_accelerator.h" |
| 22 #include "content/common/gpu/media/h264_parser.h" |
| 23 #include "third_party/angle/include/GLES2/gl2.h" |
| 24 |
| 25 namespace content { |
| 26 |
| 27 #define NOTIFY_ERROR(x) \ |
| 28 do { \ |
| 29 SetDecoderState(kError); \ |
| 30 DLOG(ERROR) << "calling NotifyError(): " << x; \ |
| 31 NotifyError(x); \ |
| 32 } while (0) |
| 33 |
| 34 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \ |
| 35 do { \ |
| 36 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 37 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 38 NOTIFY_ERROR(PLATFORM_FAILURE); \ |
| 39 return; \ |
| 40 } \ |
| 41 } while (0) |
| 42 |
| 43 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \ |
| 44 do { \ |
| 45 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ |
| 46 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
| 47 NOTIFY_ERROR(PLATFORM_FAILURE); \ |
| 48 return false; \ |
| 49 } \ |
| 50 } while (0) |
| 51 |
| 52 #define POSTSANDBOX_DLSYM(lib, func, type, name) \ |
| 53 func = reinterpret_cast<type>(dlsym(lib, name)); \ |
| 54 if (func == NULL) { \ |
| 55 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " \ |
| 56 << name << ": " << dlerror(); \ |
| 57 return false; \ |
| 58 } |
| 59 |
| 60 namespace { |
| 61 |
| 62 const char kExynosMfcDevice[] = "/dev/mfc-dec"; |
| 63 const char kExynosGscDevice[] = "/dev/gsc1"; |
| 64 const char kMaliDriver[] = "libmali.so"; |
| 65 |
| 66 // TODO(sheu): fix OpenGL ES header includes, remove unnecessary redefinitions. |
| 67 // http://crbug.com/169433 |
| 68 typedef void* GLeglImageOES; |
| 69 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*, |
| 70 void*); |
| 71 typedef EGLImageKHR (*EglCreateImageKhrFunc)(EGLDisplay, EGLContext, EGLenum, |
| 72 EGLClientBuffer, const EGLint*); |
| 73 typedef EGLBoolean (*EglDestroyImageKhrFunc)(EGLDisplay, EGLImageKHR); |
| 74 typedef EGLSyncKHR (*EglCreateSyncKhrFunc)(EGLDisplay, EGLenum, const EGLint*); |
| 75 typedef EGLBoolean (*EglDestroySyncKhrFunc)(EGLDisplay, EGLSyncKHR); |
| 76 typedef EGLint (*EglClientWaitSyncKhrFunc)(EGLDisplay, EGLSyncKHR, EGLint, |
| 77 EGLTimeKHR); |
| 78 typedef void (*GlEglImageTargetTexture2dOesFunc)(GLenum, GLeglImageOES); |
| 79 |
| 80 void* libmali_handle = NULL; |
| 81 MaliEglImageGetBufferExtPhandleFunc |
| 82 mali_egl_image_get_buffer_ext_phandle = NULL; |
| 83 EglCreateImageKhrFunc egl_create_image_khr = NULL; |
| 84 EglDestroyImageKhrFunc egl_destroy_image_khr = NULL; |
| 85 EglCreateSyncKhrFunc egl_create_sync_khr = NULL; |
| 86 EglDestroySyncKhrFunc egl_destroy_sync_khr = NULL; |
| 87 EglClientWaitSyncKhrFunc egl_client_wait_sync_khr = NULL; |
| 88 GlEglImageTargetTexture2dOesFunc gl_egl_image_target_texture_2d_oes = NULL; |
| 89 |
| 90 } // anonymous namespace |
| 91 |
| 92 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef { |
| 93 BitstreamBufferRef( |
| 94 base::WeakPtr<Client>& client, |
| 95 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy, |
| 96 base::SharedMemory* shm, |
| 97 size_t size, |
| 98 int32 input_id); |
| 99 ~BitstreamBufferRef(); |
| 100 const base::WeakPtr<Client> client; |
| 101 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy; |
| 102 const scoped_ptr<base::SharedMemory> shm; |
| 103 const size_t size; |
| 104 off_t bytes_used; |
| 105 const int32 input_id; |
| 106 }; |
| 107 |
| 108 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef { |
| 109 PictureBufferArrayRef(EGLDisplay egl_display, size_t count); |
| 110 ~PictureBufferArrayRef(); |
| 111 |
| 112 struct PictureBufferRef { |
| 113 EGLImageKHR egl_image; |
| 114 int egl_image_fd; |
| 115 int32 client_id; |
| 116 }; |
| 117 |
| 118 EGLDisplay const egl_display; |
| 119 std::vector<PictureBufferRef> picture_buffers; |
| 120 }; |
| 121 |
| 122 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef { |
| 123 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); |
| 124 ~EGLSyncKHRRef(); |
| 125 EGLDisplay const egl_display; |
| 126 EGLSyncKHR egl_sync; |
| 127 }; |
| 128 |
| 129 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef( |
| 130 base::WeakPtr<Client>& client, |
| 131 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy, |
| 132 base::SharedMemory* shm, size_t size, int32 input_id) |
| 133 : client(client), |
| 134 client_message_loop_proxy(client_message_loop_proxy), |
| 135 shm(shm), |
| 136 size(size), |
| 137 bytes_used(0), |
| 138 input_id(input_id) { |
| 139 } |
| 140 |
| 141 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() { |
| 142 if (input_id >= 0) { |
| 143 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind( |
| 144 &Client::NotifyEndOfBitstreamBuffer, client, input_id)); |
| 145 } |
| 146 } |
| 147 |
| 148 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef( |
| 149 EGLDisplay egl_display, size_t count) |
| 150 : egl_display(egl_display), |
| 151 picture_buffers(count) { |
| 152 for (size_t i = 0; i < picture_buffers.size(); ++i) { |
| 153 PictureBufferRef& buffer = picture_buffers[i]; |
| 154 buffer.egl_image = EGL_NO_IMAGE_KHR; |
| 155 buffer.egl_image_fd = -1; |
| 156 buffer.client_id = -1; |
| 157 } |
| 158 } |
| 159 |
| 160 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() { |
| 161 for (size_t i = 0; i < picture_buffers.size(); ++i) { |
| 162 PictureBufferRef& buffer = picture_buffers[i]; |
| 163 if (buffer.egl_image != EGL_NO_IMAGE_KHR) |
| 164 egl_destroy_image_khr(egl_display, buffer.egl_image); |
| 165 if (buffer.egl_image_fd != -1) |
| 166 HANDLE_EINTR(close(buffer.egl_image_fd)); |
| 167 } |
| 168 } |
| 169 |
| 170 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef( |
| 171 EGLDisplay egl_display, EGLSyncKHR egl_sync) |
| 172 : egl_display(egl_display), |
| 173 egl_sync(egl_sync) { |
| 174 } |
| 175 |
| 176 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() { |
| 177 if (egl_sync != EGL_NO_SYNC_KHR) |
| 178 egl_destroy_sync_khr(egl_display, egl_sync); |
| 179 } |
| 180 |
| 181 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord() |
| 182 : at_device(false), |
| 183 address(NULL), |
| 184 length(0), |
| 185 bytes_used(0), |
| 186 input_id(-1) { |
| 187 } |
| 188 |
| 189 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() { |
| 190 } |
| 191 |
| 192 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord() |
| 193 : at_device(false), |
| 194 input_id(-1) { |
| 195 bytes_used[0] = 0; |
| 196 bytes_used[1] = 0; |
| 197 address[0] = NULL; |
| 198 address[1] = NULL; |
| 199 length[0] = 0; |
| 200 length[1] = 0; |
| 201 } |
| 202 |
| 203 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() { |
| 204 } |
| 205 |
| 206 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord() |
| 207 : at_device(false), |
| 208 mfc_output(-1) { |
| 209 } |
| 210 |
| 211 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() { |
| 212 } |
| 213 |
| 214 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord() |
| 215 : at_device(false), |
| 216 at_client(false), |
| 217 fd(-1), |
| 218 egl_image(EGL_NO_IMAGE_KHR), |
| 219 egl_sync(EGL_NO_SYNC_KHR), |
| 220 picture_id(-1) { |
| 221 } |
| 222 |
| 223 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() { |
| 224 } |
| 225 |
| 226 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator( |
| 227 EGLDisplay egl_display, |
| 228 EGLContext egl_context, |
| 229 Client* client, |
| 230 const base::Callback<bool(void)>& make_context_current) |
| 231 : child_message_loop_proxy_(base::MessageLoopProxy::current()), |
| 232 weak_this_(base::AsWeakPtr(this)), |
| 233 client_ptr_factory_(client), |
| 234 client_(client_ptr_factory_.GetWeakPtr()), |
| 235 decoder_thread_("ExynosDecoderThread"), |
| 236 decoder_state_(kUninitialized), |
| 237 decoder_current_bitstream_buffer_(NULL), |
| 238 decoder_delay_bitstream_buffer_id_(-1), |
| 239 decoder_current_input_buffer_(-1), |
| 240 decoder_decode_buffer_tasks_scheduled_(0), |
| 241 decoder_frames_at_client_(0), |
| 242 decoder_flushing_(false), |
| 243 mfc_fd_(-1), |
| 244 mfc_input_streamon_(false), |
| 245 mfc_input_buffer_count_(0), |
| 246 mfc_input_buffer_queued_count_(0), |
| 247 mfc_output_streamon_(false), |
| 248 mfc_output_buffer_count_(0), |
| 249 mfc_output_buffer_queued_count_(0), |
| 250 mfc_output_buffer_pixelformat_(0), |
| 251 gsc_fd_(-1), |
| 252 gsc_input_streamon_(false), |
| 253 gsc_input_buffer_count_(0), |
| 254 gsc_input_buffer_queued_count_(0), |
| 255 gsc_output_streamon_(false), |
| 256 gsc_output_buffer_count_(0), |
| 257 gsc_output_buffer_queued_count_(0), |
| 258 device_poll_thread_("ExynosDevicePollThread"), |
| 259 device_poll_interrupt_fd_(-1), |
| 260 make_context_current_(make_context_current), |
| 261 egl_display_(egl_display), |
| 262 egl_context_(egl_context), |
| 263 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) { |
| 264 } |
| 265 |
| 266 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() { |
| 267 DCHECK(!decoder_thread_.IsRunning()); |
| 268 DCHECK(!device_poll_thread_.IsRunning()); |
| 269 |
| 270 if (device_poll_interrupt_fd_ != -1) { |
| 271 HANDLE_EINTR(close(device_poll_interrupt_fd_)); |
| 272 device_poll_interrupt_fd_ = -1; |
| 273 } |
| 274 if (gsc_fd_ != -1) { |
| 275 DestroyGscInputBuffers(); |
| 276 DestroyGscOutputBuffers(); |
| 277 HANDLE_EINTR(close(gsc_fd_)); |
| 278 gsc_fd_ = -1; |
| 279 } |
| 280 if (mfc_fd_ != -1) { |
| 281 DestroyMfcInputBuffers(); |
| 282 DestroyMfcOutputBuffers(); |
| 283 HANDLE_EINTR(close(mfc_fd_)); |
| 284 mfc_fd_ = -1; |
| 285 } |
| 286 |
| 287 // These maps have members that should be manually destroyed, e.g. file |
| 288 // descriptors, mmap() segments, etc. |
| 289 DCHECK(mfc_input_buffer_map_.empty()); |
| 290 DCHECK(mfc_output_buffer_map_.empty()); |
| 291 DCHECK(gsc_input_buffer_map_.empty()); |
| 292 DCHECK(gsc_output_buffer_map_.empty()); |
| 293 } |
| 294 |
| 295 bool ExynosVideoDecodeAccelerator::Initialize( |
| 296 media::VideoCodecProfile profile) { |
| 297 DVLOG(3) << "Initialize()"; |
| 298 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 299 DCHECK_EQ(decoder_state_, kUninitialized); |
| 300 |
| 301 switch (profile) { |
| 302 case media::H264PROFILE_BASELINE: |
| 303 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE"; |
| 304 break; |
| 305 case media::H264PROFILE_MAIN: |
| 306 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN"; |
| 307 break; |
| 308 case media::H264PROFILE_HIGH: |
| 309 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH"; |
| 310 break; |
| 311 case media::VP8PROFILE_MAIN: |
| 312 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN"; |
| 313 break; |
| 314 default: |
| 315 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile; |
| 316 return false; |
| 317 }; |
| 318 video_profile_ = profile; |
| 319 |
| 320 static bool sandbox_initialized = PostSandboxInitialization(); |
| 321 if (!sandbox_initialized) { |
| 322 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed"; |
| 323 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 324 return false; |
| 325 } |
| 326 |
| 327 if (egl_display_ == EGL_NO_DISPLAY) { |
| 328 DLOG(ERROR) << "Initialize(): could not get EGLDisplay"; |
| 329 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 330 return false; |
| 331 } |
| 332 |
| 333 if (egl_context_ == EGL_NO_CONTEXT) { |
| 334 DLOG(ERROR) << "Initialize(): could not get EGLContext"; |
| 335 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 336 return false; |
| 337 } |
| 338 |
| 339 // Open the video devices. |
| 340 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice; |
| 341 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice, |
| 342 O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 343 if (mfc_fd_ == -1) { |
| 344 DPLOG(ERROR) << "Initialize(): could not open MFC device: " |
| 345 << kExynosMfcDevice; |
| 346 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 347 return false; |
| 348 } |
| 349 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice; |
| 350 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice, |
| 351 O_RDWR | O_NONBLOCK | O_CLOEXEC)); |
| 352 if (gsc_fd_ == -1) { |
| 353 DPLOG(ERROR) << "Initialize(): could not open GSC device: " |
| 354 << kExynosGscDevice; |
| 355 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 356 return false; |
| 357 } |
| 358 |
| 359 // Create the interrupt fd. |
| 360 DCHECK_EQ(device_poll_interrupt_fd_, -1); |
| 361 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); |
| 362 if (device_poll_interrupt_fd_ == -1) { |
| 363 DPLOG(ERROR) << "Initialize(): eventfd() failed"; |
| 364 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 365 return false; |
| 366 } |
| 367 |
| 368 // Capabilities check. |
| 369 struct v4l2_capability caps; |
| 370 const __u32 kCapsRequired = |
| 371 V4L2_CAP_VIDEO_CAPTURE_MPLANE | |
| 372 V4L2_CAP_VIDEO_OUTPUT_MPLANE | |
| 373 V4L2_CAP_STREAMING; |
| 374 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps); |
| 375 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 376 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| 377 ", caps check failed: 0x" << std::hex << caps.capabilities; |
| 378 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 379 return false; |
| 380 } |
| 381 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps); |
| 382 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 383 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
| 384 ", caps check failed: 0x" << std::hex << caps.capabilities; |
| 385 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 386 return false; |
| 387 } |
| 388 |
| 389 // Some random ioctls that Exynos requires. |
| 390 struct v4l2_control control; |
| 391 memset(&control, 0, sizeof(control)); |
| 392 control.id = V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY; // also VP8 |
| 393 control.value = 8; // Magic number from Samsung folks. |
| 394 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_CTRL, &control); |
| 395 |
| 396 if (!make_context_current_.Run()) { |
| 397 DLOG(ERROR) << "Initialize(): could not make context current"; |
| 398 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 399 return false; |
| 400 } |
| 401 |
| 402 if (!CreateMfcInputBuffers()) |
| 403 return false; |
| 404 |
| 405 // MFC output format has to be setup before streaming starts. |
| 406 struct v4l2_format format; |
| 407 memset(&format, 0, sizeof(format)); |
| 408 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 409 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16; |
| 410 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 411 |
| 412 // Initialize format-specific bits. |
| 413 if (video_profile_ >= media::H264PROFILE_MIN && |
| 414 video_profile_ <= media::H264PROFILE_MAX) { |
| 415 decoder_h264_parser_.reset(new content::H264Parser()); |
| 416 } |
| 417 |
| 418 if (!decoder_thread_.Start()) { |
| 419 DLOG(ERROR) << "Initialize(): decoder thread failed to start"; |
| 420 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 421 return false; |
| 422 } |
| 423 |
| 424 SetDecoderState(kInitialized); |
| 425 |
| 426 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 427 &Client::NotifyInitializeDone, client_)); |
| 428 return true; |
| 429 } |
| 430 |
| 431 void ExynosVideoDecodeAccelerator::Decode( |
| 432 const media::BitstreamBuffer& bitstream_buffer) { |
| 433 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() |
| 434 << ", size=" << bitstream_buffer.size(); |
| 435 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 436 |
| 437 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( |
| 438 client_, child_message_loop_proxy_, |
| 439 new base::SharedMemory(bitstream_buffer.handle(), true), |
| 440 bitstream_buffer.size(), bitstream_buffer.id())); |
| 441 if (!bitstream_record->shm->Map(bitstream_buffer.size())) { |
| 442 DLOG(ERROR) << "Decode(): could not map bitstream_buffer"; |
| 443 NOTIFY_ERROR(UNREADABLE_INPUT); |
| 444 return; |
| 445 } |
| 446 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory(); |
| 447 |
| 448 // DecodeTask() will take care of running a DecodeBufferTask(). |
| 449 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 450 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this), |
| 451 base::Passed(&bitstream_record))); |
| 452 } |
| 453 |
| 454 void ExynosVideoDecodeAccelerator::AssignPictureBuffers( |
| 455 const std::vector<media::PictureBuffer>& buffers) { |
| 456 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size(); |
| 457 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 458 |
| 459 if (static_cast<int>(buffers.size()) != gsc_output_buffer_count_) { |
| 460 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer_count"; |
| 461 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 462 return; |
| 463 } |
| 464 |
| 465 if (!make_context_current_.Run()) { |
| 466 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current"; |
| 467 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 468 return; |
| 469 } |
| 470 |
| 471 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref( |
| 472 new PictureBufferArrayRef(egl_display_, buffers.size())); |
| 473 |
| 474 const static EGLint kImageAttrs[] = { |
| 475 EGL_IMAGE_PRESERVED_KHR, 0, |
| 476 EGL_NONE, |
| 477 }; |
| 478 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay(); |
| 479 ScopedTextureBinder bind_restore(0); |
| 480 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) { |
| 481 PictureBufferArrayRef::PictureBufferRef& buffer = |
| 482 pic_buffers_ref->picture_buffers[i]; |
| 483 // Create the X pixmap and then create an EGLImageKHR from it, so we can |
| 484 // get dma_buf backing. |
| 485 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0), |
| 486 buffers[i].size().width(), buffers[i].size().height(), 32); |
| 487 if (!pixmap) { |
| 488 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap"; |
| 489 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 490 return; |
| 491 } |
| 492 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id()); |
| 493 EGLImageKHR egl_image = egl_create_image_khr( |
| 494 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR, |
| 495 (EGLClientBuffer)pixmap, kImageAttrs); |
| 496 // We can free the X pixmap immediately -- according to the |
| 497 // EGL_KHR_image_base spec, the backing storage does not go away until the |
| 498 // last referencing EGLImage is destroyed. |
| 499 XFreePixmap(x_display, pixmap); |
| 500 if (egl_image == EGL_NO_IMAGE_KHR) { |
| 501 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR"; |
| 502 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 503 return; |
| 504 } |
| 505 buffer.egl_image = egl_image; |
| 506 int fd; |
| 507 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) { |
| 508 DLOG(ERROR) << "AssignPictureBuffers(): " |
| 509 << "could not get EGLImageKHR dmabuf fd"; |
| 510 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 511 return; |
| 512 } |
| 513 buffer.egl_image_fd = fd; |
| 514 gl_egl_image_target_texture_2d_oes(GL_TEXTURE_2D, egl_image); |
| 515 buffer.client_id = buffers[i].id(); |
| 516 } |
| 517 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 518 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask, |
| 519 base::Unretained(this), base::Passed(&pic_buffers_ref))); |
| 520 } |
| 521 |
| 522 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) { |
| 523 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id; |
| 524 // Must be run on child thread, as we'll insert a sync in the EGL context. |
| 525 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 526 |
| 527 if (!make_context_current_.Run()) { |
| 528 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current"; |
| 529 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 530 return; |
| 531 } |
| 532 |
| 533 EGLSyncKHR egl_sync = |
| 534 egl_create_sync_khr(egl_display_, EGL_SYNC_FENCE_KHR, NULL); |
| 535 if (egl_sync == EGL_NO_SYNC_KHR) { |
| 536 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed"; |
| 537 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 538 return; |
| 539 } |
| 540 |
| 541 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef( |
| 542 egl_display_, egl_sync)); |
| 543 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 544 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask, |
| 545 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref))); |
| 546 } |
| 547 |
| 548 void ExynosVideoDecodeAccelerator::Flush() { |
| 549 DVLOG(3) << "Flush()"; |
| 550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 551 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 552 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this))); |
| 553 } |
| 554 |
| 555 void ExynosVideoDecodeAccelerator::Reset() { |
| 556 DVLOG(3) << "Reset()"; |
| 557 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 558 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 559 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this))); |
| 560 } |
| 561 |
| 562 void ExynosVideoDecodeAccelerator::Destroy() { |
| 563 DVLOG(3) << "Destroy()"; |
| 564 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 565 |
| 566 // We're destroying; cancel all callbacks. |
| 567 client_ptr_factory_.InvalidateWeakPtrs(); |
| 568 |
| 569 // If the decoder thread is running, destroy using posted task. |
| 570 if (decoder_thread_.IsRunning()) { |
| 571 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 572 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this))); |
| 573 // DestroyTask() will cause the decoder_thread_ to flush all tasks. |
| 574 decoder_thread_.Stop(); |
| 575 } else { |
| 576 // Otherwise, call the destroy task directly. |
| 577 DestroyTask(); |
| 578 } |
| 579 |
| 580 // Set to kError state just in case. |
| 581 SetDecoderState(kError); |
| 582 |
| 583 delete this; |
| 584 } |
| 585 |
| 586 // static |
| 587 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() { |
| 588 DVLOG(3) << "PreSandboxInitialization()"; |
| 589 dlerror(); |
| 590 |
| 591 libmali_handle = dlopen(kMaliDriver, RTLD_LAZY | RTLD_LOCAL); |
| 592 if (libmali_handle == NULL) { |
| 593 DPLOG(ERROR) << "failed to dlopen() " << kMaliDriver << ": " << dlerror(); |
| 594 } |
| 595 } |
| 596 |
| 597 // static |
| 598 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() { |
| 599 DVLOG(3) << "PostSandboxInitialization()"; |
| 600 if (libmali_handle == NULL) { |
| 601 DLOG(ERROR) << "PostSandboxInitialization(): no " << kMaliDriver |
| 602 << " driver handle"; |
| 603 return false; |
| 604 } |
| 605 |
| 606 dlerror(); |
| 607 |
| 608 POSTSANDBOX_DLSYM(libmali_handle, |
| 609 mali_egl_image_get_buffer_ext_phandle, |
| 610 MaliEglImageGetBufferExtPhandleFunc, |
| 611 "mali_egl_image_get_buffer_ext_phandle"); |
| 612 |
| 613 POSTSANDBOX_DLSYM(libmali_handle, |
| 614 egl_create_image_khr, |
| 615 EglCreateImageKhrFunc, |
| 616 "eglCreateImageKHR"); |
| 617 |
| 618 POSTSANDBOX_DLSYM(libmali_handle, |
| 619 egl_destroy_image_khr, |
| 620 EglDestroyImageKhrFunc, |
| 621 "eglDestroyImageKHR"); |
| 622 |
| 623 POSTSANDBOX_DLSYM(libmali_handle, |
| 624 egl_create_sync_khr, |
| 625 EglCreateSyncKhrFunc, |
| 626 "eglCreateSyncKHR"); |
| 627 |
| 628 POSTSANDBOX_DLSYM(libmali_handle, |
| 629 egl_destroy_sync_khr, |
| 630 EglDestroySyncKhrFunc, |
| 631 "eglDestroySyncKHR"); |
| 632 |
| 633 POSTSANDBOX_DLSYM(libmali_handle, |
| 634 egl_client_wait_sync_khr, |
| 635 EglClientWaitSyncKhrFunc, |
| 636 "eglClientWaitSyncKHR"); |
| 637 |
| 638 POSTSANDBOX_DLSYM(libmali_handle, |
| 639 gl_egl_image_target_texture_2d_oes, |
| 640 GlEglImageTargetTexture2dOesFunc, |
| 641 "glEGLImageTargetTexture2DOES"); |
| 642 |
| 643 return true; |
| 644 } |
| 645 |
| 646 void ExynosVideoDecodeAccelerator::DecodeTask( |
| 647 scoped_ptr<BitstreamBufferRef> bitstream_record) { |
| 648 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id; |
| 649 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 650 DCHECK_NE(decoder_state_, kUninitialized); |
| 651 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id", |
| 652 bitstream_record->input_id); |
| 653 |
| 654 if (decoder_state_ == kResetting || decoder_flushing_) { |
| 655 // In the case that we're resetting or flushing, we need to delay decoding |
| 656 // the BitstreamBuffers that come after the Reset() or Flush() call. When |
| 657 // we're here, we know that this DecodeTask() was scheduled by a Decode() |
| 658 // call that came after (in the client thread) the Reset() or Flush() call; |
| 659 // thus set up the delay if necessary. |
| 660 if (decoder_delay_bitstream_buffer_id_ == -1) |
| 661 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id; |
| 662 } else if (decoder_state_ == kError) { |
| 663 DVLOG(2) << "DecodeTask(): early out: kError state"; |
| 664 return; |
| 665 } |
| 666 |
| 667 decoder_input_queue_.push_back( |
| 668 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); |
| 669 decoder_decode_buffer_tasks_scheduled_++; |
| 670 DecodeBufferTask(); |
| 671 } |
| 672 |
| 673 void ExynosVideoDecodeAccelerator::DecodeBufferTask() { |
| 674 DVLOG(3) << "DecodeBufferTask()"; |
| 675 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 676 DCHECK_NE(decoder_state_, kUninitialized); |
| 677 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask"); |
| 678 |
| 679 decoder_decode_buffer_tasks_scheduled_--; |
| 680 |
| 681 if (decoder_state_ == kResetting) { |
| 682 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state"; |
| 683 return; |
| 684 } else if (decoder_state_ == kError) { |
| 685 DVLOG(2) << "DecodeBufferTask(): early out: kError state"; |
| 686 return; |
| 687 } |
| 688 |
| 689 if (decoder_current_bitstream_buffer_ == NULL) { |
| 690 if (decoder_input_queue_.empty()) { |
| 691 // We're waiting for a new buffer -- exit without scheduling a new task. |
| 692 return; |
| 693 } |
| 694 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front(); |
| 695 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) { |
| 696 // We're asked to delay decoding on this and subsequent buffers. |
| 697 return; |
| 698 } |
| 699 |
| 700 // Setup to use the next buffer. |
| 701 decoder_current_bitstream_buffer_.reset(buffer_ref.release()); |
| 702 decoder_input_queue_.pop_front(); |
| 703 DVLOG(3) << "DecodeBufferTask(): reading input_id=" |
| 704 << decoder_current_bitstream_buffer_->input_id |
| 705 << ", addr=" << decoder_current_bitstream_buffer_->shm->memory() |
| 706 << ", size=" << decoder_current_bitstream_buffer_->size; |
| 707 } |
| 708 bool schedule_task = false; |
| 709 const size_t size = decoder_current_bitstream_buffer_->size; |
| 710 size_t decoded_size = 0; |
| 711 if (size == 0) { |
| 712 const int32 input_id = decoder_current_bitstream_buffer_->input_id; |
| 713 if (input_id >= 0) { |
| 714 // This is a buffer queued from the client that has zero size. Skip. |
| 715 schedule_task = true; |
| 716 } else { |
| 717 // This is a buffer of zero size, queued to flush the pipe. Flush. |
| 718 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(), |
| 719 static_cast<base::SharedMemory*>(NULL)); |
| 720 // Enqueue a buffer guaranteed to be empty. To do that, we flush the |
| 721 // current input, enqueue no data to the next frame, then flush that down. |
| 722 schedule_task = true; |
| 723 if (decoder_current_input_buffer_ != -1 && |
| 724 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id != |
| 725 kFlushBufferId) |
| 726 schedule_task = FlushInputFrame(); |
| 727 |
| 728 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) { |
| 729 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer"; |
| 730 schedule_task = true; |
| 731 } else { |
| 732 // If we failed to enqueue the empty buffer (due to pipeline |
| 733 // backpressure), don't advance the bitstream buffer queue, and don't |
| 734 // schedule the next task. This bitstream buffer queue entry will get |
| 735 // reprocessed when the pipeline frees up. |
| 736 schedule_task = false; |
| 737 } |
| 738 } |
| 739 } else { |
| 740 // This is a buffer queued from the client, with actual contents. Decode. |
| 741 const uint8* const data = |
| 742 reinterpret_cast<const uint8*>( |
| 743 decoder_current_bitstream_buffer_->shm->memory()) + |
| 744 decoder_current_bitstream_buffer_->bytes_used; |
| 745 const size_t data_size = |
| 746 decoder_current_bitstream_buffer_->size - |
| 747 decoder_current_bitstream_buffer_->bytes_used; |
| 748 if (!FindFrameFragment(data, data_size, &decoded_size)) { |
| 749 NOTIFY_ERROR(UNREADABLE_INPUT); |
| 750 return; |
| 751 } |
| 752 // FindFrameFragment should not return a size larger than the buffer size, |
| 753 // even on invalid data. |
| 754 CHECK_LE(decoded_size, data_size); |
| 755 |
| 756 switch (decoder_state_) { |
| 757 case kInitialized: |
| 758 case kAfterReset: |
| 759 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size); |
| 760 break; |
| 761 case kDecoding: |
| 762 schedule_task = DecodeBufferContinue(data, decoded_size); |
| 763 break; |
| 764 default: |
| 765 NOTIFY_ERROR(ILLEGAL_STATE); |
| 766 return; |
| 767 } |
| 768 } |
| 769 if (decoder_state_ == kError) { |
| 770 // Failed during decode. |
| 771 return; |
| 772 } |
| 773 |
| 774 if (schedule_task) { |
| 775 decoder_current_bitstream_buffer_->bytes_used += decoded_size; |
| 776 if (decoder_current_bitstream_buffer_->bytes_used == |
| 777 decoder_current_bitstream_buffer_->size) { |
| 778 // Our current bitstream buffer is done; return it. |
| 779 int32 input_id = decoder_current_bitstream_buffer_->input_id; |
| 780 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id; |
| 781 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer(). |
| 782 decoder_current_bitstream_buffer_.reset(); |
| 783 } |
| 784 ScheduleDecodeBufferTaskIfNeeded(); |
| 785 } |
| 786 } |
| 787 |
| 788 bool ExynosVideoDecodeAccelerator::FindFrameFragment( |
| 789 const uint8* data, |
| 790 size_t size, |
| 791 size_t* endpos) { |
| 792 if (video_profile_ >= media::H264PROFILE_MIN && |
| 793 video_profile_ <= media::H264PROFILE_MAX) { |
| 794 // For H264, we need to feed HW one frame at a time. This is going to take |
| 795 // some parsing of our input stream. |
| 796 decoder_h264_parser_->SetStream(data, size); |
| 797 content::H264NALU nalu; |
| 798 content::H264Parser::Result result; |
| 799 |
| 800 // Find the first NAL. |
| 801 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu); |
| 802 if (result == content::H264Parser::kInvalidStream || |
| 803 result == content::H264Parser::kUnsupportedStream) |
| 804 return false; |
| 805 *endpos = (nalu.data + nalu.size) - data; |
| 806 if (result == content::H264Parser::kEOStream) |
| 807 return true; |
| 808 |
| 809 // Keep on peeking the next NALs while they don't indicate a frame |
| 810 // boundary. |
| 811 for (;;) { |
| 812 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu); |
| 813 if (result == content::H264Parser::kInvalidStream || |
| 814 result == content::H264Parser::kUnsupportedStream) |
| 815 return false; |
| 816 if (result == content::H264Parser::kEOStream) |
| 817 return true; |
| 818 switch (nalu.nal_unit_type) { |
| 819 case content::H264NALU::kNonIDRSlice: |
| 820 case content::H264NALU::kIDRSlice: |
| 821 if (nalu.size < 1) |
| 822 return false; |
| 823 // For these two, if the "first_mb_in_slice" field is zero, start a |
| 824 // new frame and return. This field is Exp-Golomb coded starting on |
| 825 // the eighth data bit of the NAL; a zero value is encoded with a |
| 826 // leading '1' bit in the byte, which we can detect as the byte being |
| 827 // (unsigned) greater than or equal to 0x80. |
| 828 if (nalu.data[1] >= 0x80) |
| 829 return true; |
| 830 break; |
| 831 case content::H264NALU::kSPS: |
| 832 case content::H264NALU::kPPS: |
| 833 case content::H264NALU::kEOSeq: |
| 834 case content::H264NALU::kEOStream: |
| 835 // These unconditionally signal a frame boundary. |
| 836 return true; |
| 837 default: |
| 838 // For all others, keep going. |
| 839 break; |
| 840 } |
| 841 *endpos = (nalu.data + nalu.size) - data; |
| 842 } |
| 843 NOTREACHED(); |
| 844 return false; |
| 845 } else { |
| 846 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN); |
| 847 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX); |
| 848 // For VP8, we can just dump the entire buffer. No fragmentation needed. |
| 849 *endpos = size; |
| 850 return true; |
| 851 } |
| 852 } |
| 853 |
| 854 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() { |
| 855 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 856 |
| 857 // If we're behind on tasks, schedule another one. |
| 858 int buffers_to_decode = decoder_input_queue_.size(); |
| 859 if (decoder_current_bitstream_buffer_ != NULL) |
| 860 buffers_to_decode++; |
| 861 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) { |
| 862 decoder_decode_buffer_tasks_scheduled_++; |
| 863 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 864 &ExynosVideoDecodeAccelerator::DecodeBufferTask, |
| 865 base::Unretained(this))); |
| 866 } |
| 867 } |
| 868 |
| 869 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial( |
| 870 const void* data, size_t size, size_t* endpos) { |
| 871 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size; |
| 872 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 873 DCHECK_NE(decoder_state_, kUninitialized); |
| 874 DCHECK_NE(decoder_state_, kDecoding); |
| 875 DCHECK(!device_poll_thread_.IsRunning()); |
| 876 // Initial decode. We haven't been able to get output stream format info yet. |
| 877 // Get it, and start decoding. |
| 878 |
| 879 // Copy in and send to HW. |
| 880 if (!AppendToInputFrame(data, size) || !FlushInputFrame()) |
| 881 return false; |
| 882 |
| 883 // Recycle buffers. |
| 884 DequeueMfc(); |
| 885 |
| 886 // Check and see if we have format info yet. |
| 887 struct v4l2_format format; |
| 888 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 889 if (ioctl(mfc_fd_, VIDIOC_G_FMT, &format) != 0) { |
| 890 if (errno == EINVAL) { |
| 891 // We will get EINVAL if we haven't seen sufficient stream to decode the |
| 892 // format. Return true and schedule the next buffer. |
| 893 *endpos = size; |
| 894 return true; |
| 895 } else { |
| 896 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT"; |
| 897 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 898 return false; |
| 899 } |
| 900 } |
| 901 |
| 902 // Run this initialization only on first startup. |
| 903 if (decoder_state_ == kInitialized) { |
| 904 DVLOG(3) << "DecodeBufferInitial(): running one-time initialization"; |
| 905 // Success! Setup our parameters. |
| 906 CHECK_EQ(format.fmt.pix_mp.num_planes, 2); |
| 907 frame_buffer_size_.SetSize( |
| 908 format.fmt.pix_mp.width, format.fmt.pix_mp.height); |
| 909 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage; |
| 910 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage; |
| 911 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat; |
| 912 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16); |
| 913 |
| 914 // Create our other buffers. |
| 915 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() || |
| 916 !CreateGscOutputBuffers()) |
| 917 return false; |
| 918 |
| 919 // MFC expects to process the initial buffer once during stream init to |
| 920 // configure stream parameters, but will not consume the steam data on that |
| 921 // iteration. Subsequent iterations (including after reset) do not require |
| 922 // the stream init step. |
| 923 *endpos = 0; |
| 924 } else { |
| 925 *endpos = size; |
| 926 } |
| 927 |
| 928 // StartDevicePoll will raise the error if there is one. |
| 929 if (!StartDevicePoll()) |
| 930 return false; |
| 931 |
| 932 decoder_state_ = kDecoding; |
| 933 ScheduleDecodeBufferTaskIfNeeded(); |
| 934 return true; |
| 935 } |
| 936 |
| 937 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue( |
| 938 const void* data, size_t size) { |
| 939 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size; |
| 940 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 941 DCHECK_EQ(decoder_state_, kDecoding); |
| 942 |
| 943 // Both of these calls will set kError state if they fail. |
| 944 return (AppendToInputFrame(data, size) && FlushInputFrame()); |
| 945 } |
| 946 |
| 947 bool ExynosVideoDecodeAccelerator::AppendToInputFrame( |
| 948 const void* data, size_t size) { |
| 949 DVLOG(3) << "AppendToInputFrame()"; |
| 950 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 951 DCHECK_NE(decoder_state_, kUninitialized); |
| 952 DCHECK_NE(decoder_state_, kResetting); |
| 953 DCHECK_NE(decoder_state_, kError); |
| 954 // This routine can handle data == NULL and size == 0, which occurs when |
| 955 // we queue an empty buffer for the purposes of flushing the pipe. |
| 956 |
| 957 // Flush if we're too big |
| 958 if (decoder_current_input_buffer_ != -1) { |
| 959 MfcInputRecord& input_record = |
| 960 mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| 961 if (input_record.bytes_used + size > input_record.length) { |
| 962 if (!FlushInputFrame()) |
| 963 return false; |
| 964 decoder_current_input_buffer_ = -1; |
| 965 } |
| 966 } |
| 967 |
| 968 // Try to get an available input buffer |
| 969 if (decoder_current_input_buffer_ == -1) { |
| 970 if (mfc_free_input_buffers_.empty()) { |
| 971 // See if we can get more free buffers from HW |
| 972 DequeueMfc(); |
| 973 if (mfc_free_input_buffers_.empty()) { |
| 974 // Nope! |
| 975 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers"; |
| 976 return false; |
| 977 } |
| 978 } |
| 979 decoder_current_input_buffer_ = mfc_free_input_buffers_.back(); |
| 980 mfc_free_input_buffers_.pop_back(); |
| 981 MfcInputRecord& input_record = |
| 982 mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| 983 DCHECK_EQ(input_record.bytes_used, 0); |
| 984 DCHECK_EQ(input_record.input_id, -1); |
| 985 DCHECK(decoder_current_bitstream_buffer_ != NULL); |
| 986 input_record.input_id = decoder_current_bitstream_buffer_->input_id; |
| 987 } |
| 988 |
| 989 DCHECK_EQ(data == NULL, size == 0); |
| 990 if (size == 0) { |
| 991 // If we asked for an empty buffer, return now. We return only after |
| 992 // getting the next input buffer, since we might actually want an empty |
| 993 // input buffer for flushing purposes. |
| 994 return true; |
| 995 } |
| 996 |
| 997 // Copy in to the buffer. |
| 998 MfcInputRecord& input_record = |
| 999 mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| 1000 if (size > input_record.length - input_record.bytes_used) { |
| 1001 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring"; |
| 1002 NOTIFY_ERROR(UNREADABLE_INPUT); |
| 1003 return false; |
| 1004 } |
| 1005 memcpy( |
| 1006 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used, |
| 1007 data, |
| 1008 size); |
| 1009 input_record.bytes_used += size; |
| 1010 |
| 1011 return true; |
| 1012 } |
| 1013 |
| 1014 bool ExynosVideoDecodeAccelerator::FlushInputFrame() { |
| 1015 DVLOG(3) << "FlushInputFrame()"; |
| 1016 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1017 DCHECK_NE(decoder_state_, kUninitialized); |
| 1018 DCHECK_NE(decoder_state_, kResetting); |
| 1019 DCHECK_NE(decoder_state_, kError); |
| 1020 |
| 1021 if (decoder_current_input_buffer_ == -1) |
| 1022 return true; |
| 1023 |
| 1024 MfcInputRecord& input_record = |
| 1025 mfc_input_buffer_map_[decoder_current_input_buffer_]; |
| 1026 DCHECK_NE(input_record.input_id, -1); |
| 1027 DCHECK_EQ(input_record.input_id == kFlushBufferId, |
| 1028 input_record.bytes_used == 0); |
| 1029 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we |
| 1030 // got from the client. We can skip it if it is empty. |
| 1031 // * if input_id < 0 (should be kFlushBufferId in this case), this input |
| 1032 // buffer was prompted by a flush buffer, and should be queued even when |
| 1033 // empty. |
| 1034 if (input_record.input_id >= 0 && input_record.bytes_used == 0) { |
| 1035 input_record.input_id = -1; |
| 1036 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_); |
| 1037 decoder_current_input_buffer_ = -1; |
| 1038 return true; |
| 1039 } |
| 1040 |
| 1041 // Queue it to MFC. |
| 1042 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_); |
| 1043 decoder_current_input_buffer_ = -1; |
| 1044 DVLOG(3) << "FlushInputFrame(): submitting input_id=" |
| 1045 << input_record.input_id; |
| 1046 // Kick the MFC once since there's new available input for it. |
| 1047 EnqueueMfc(); |
| 1048 |
| 1049 return (decoder_state_ != kError); |
| 1050 } |
| 1051 |
| 1052 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask( |
| 1053 scoped_ptr<PictureBufferArrayRef> pic_buffers) { |
| 1054 DVLOG(3) << "AssignPictureBuffersTask()"; |
| 1055 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1056 DCHECK_NE(decoder_state_, kUninitialized); |
| 1057 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask"); |
| 1058 |
| 1059 // We run AssignPictureBuffersTask even if we're in kResetting. |
| 1060 if (decoder_state_ == kError) { |
| 1061 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state"; |
| 1062 return; |
| 1063 } |
| 1064 |
| 1065 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size()); |
| 1066 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 1067 // We should be blank right now. |
| 1068 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 1069 DCHECK_EQ(output_record.fd, -1); |
| 1070 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); |
| 1071 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| 1072 DCHECK_EQ(output_record.picture_id, -1); |
| 1073 PictureBufferArrayRef::PictureBufferRef& buffer = |
| 1074 pic_buffers->picture_buffers[i]; |
| 1075 output_record.fd = buffer.egl_image_fd; |
| 1076 output_record.egl_image = buffer.egl_image; |
| 1077 output_record.picture_id = buffer.client_id; |
| 1078 |
| 1079 // Take ownership of the EGLImage and fd. |
| 1080 buffer.egl_image = EGL_NO_IMAGE_KHR; |
| 1081 buffer.egl_image_fd = -1; |
| 1082 // And add this buffer to the free list. |
| 1083 gsc_free_output_buffers_.push_back(i); |
| 1084 } |
| 1085 |
| 1086 // We got buffers! Kick the GSC. |
| 1087 EnqueueGsc(); |
| 1088 } |
| 1089 |
| 1090 void ExynosVideoDecodeAccelerator::ServiceDeviceTask() { |
| 1091 DVLOG(3) << "ServiceDeviceTask()"; |
| 1092 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1093 DCHECK_NE(decoder_state_, kUninitialized); |
| 1094 DCHECK_NE(decoder_state_, kInitialized); |
| 1095 DCHECK_NE(decoder_state_, kAfterReset); |
| 1096 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask"); |
| 1097 |
| 1098 if (decoder_state_ == kResetting) { |
| 1099 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state"; |
| 1100 return; |
| 1101 } else if (decoder_state_ == kError) { |
| 1102 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; |
| 1103 return; |
| 1104 } |
| 1105 |
| 1106 DequeueMfc(); |
| 1107 DequeueGsc(); |
| 1108 EnqueueMfc(); |
| 1109 EnqueueGsc(); |
| 1110 |
| 1111 // Clear the interrupt fd. |
| 1112 if (!ClearDevicePollInterrupt()) |
| 1113 return; |
| 1114 |
| 1115 unsigned int poll_fds = 0; |
| 1116 // Add MFC fd, if we should poll on it. |
| 1117 // MFC can be polled as soon as either input or output buffers are queued. |
| 1118 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0) |
| 1119 poll_fds |= kPollMfc; |
| 1120 // Add GSC fd, if we should poll on it. |
| 1121 // GSC has to wait until both input and output buffers are queued. |
| 1122 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0) |
| 1123 poll_fds |= kPollGsc; |
| 1124 |
| 1125 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), |
| 1126 // so either: |
| 1127 // * device_poll_thread_ is running normally |
| 1128 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask() |
| 1129 // shut it down, in which case we're either in kResetting or kError states |
| 1130 // respectively, and we should have early-outed already. |
| 1131 DCHECK(device_poll_thread_.message_loop()); |
| 1132 // Queue the DevicePollTask() now. |
| 1133 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1134 &ExynosVideoDecodeAccelerator::DevicePollTask, |
| 1135 base::Unretained(this), |
| 1136 poll_fds)); |
| 1137 |
| 1138 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC[" |
| 1139 << decoder_input_queue_.size() << "->" |
| 1140 << mfc_input_ready_queue_.size() << "] => MFC[" |
| 1141 << mfc_free_input_buffers_.size() << "+" |
| 1142 << mfc_input_buffer_queued_count_ << "/" |
| 1143 << mfc_input_buffer_count_ << "->" |
| 1144 << mfc_free_output_buffers_.size() << "+" |
| 1145 << mfc_output_buffer_queued_count_ << "/" |
| 1146 << mfc_output_buffer_count_ << "] => " |
| 1147 << mfc_output_gsc_input_queue_.size() << " => GSC[" |
| 1148 << gsc_free_input_buffers_.size() << "+" |
| 1149 << gsc_input_buffer_queued_count_ << "/" |
| 1150 << gsc_input_buffer_count_ << "->" |
| 1151 << gsc_free_output_buffers_.size() << "+" |
| 1152 << gsc_output_buffer_queued_count_ << "/" |
| 1153 << gsc_output_buffer_count_ << "] => VDA[" |
| 1154 << decoder_frames_at_client_ << "]"; |
| 1155 |
| 1156 ScheduleDecodeBufferTaskIfNeeded(); |
| 1157 } |
| 1158 |
| 1159 void ExynosVideoDecodeAccelerator::EnqueueMfc() { |
| 1160 DVLOG(3) << "EnqueueMfc()"; |
| 1161 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1162 DCHECK_NE(decoder_state_, kUninitialized); |
| 1163 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc"); |
| 1164 |
| 1165 // Drain the pipe of completed decode buffers. |
| 1166 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_; |
| 1167 while (!mfc_input_ready_queue_.empty()) { |
| 1168 if (!EnqueueMfcInputRecord()) |
| 1169 return; |
| 1170 } |
| 1171 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) { |
| 1172 // We just started up a previously empty queue. |
| 1173 // Queue state changed; signal interrupt. |
| 1174 if (!SetDevicePollInterrupt()) |
| 1175 return; |
| 1176 // Start VIDIOC_STREAMON if we haven't yet. |
| 1177 if (!mfc_input_streamon_) { |
| 1178 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1179 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 1180 mfc_input_streamon_ = true; |
| 1181 } |
| 1182 } |
| 1183 |
| 1184 // Enqueue all the MFC outputs we can. |
| 1185 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_; |
| 1186 while (!mfc_free_output_buffers_.empty()) { |
| 1187 if (!EnqueueMfcOutputRecord()) |
| 1188 return; |
| 1189 } |
| 1190 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) { |
| 1191 // We just started up a previously empty queue. |
| 1192 // Queue state changed; signal interrupt. |
| 1193 if (!SetDevicePollInterrupt()) |
| 1194 return; |
| 1195 // Start VIDIOC_STREAMON if we haven't yet. |
| 1196 if (!mfc_output_streamon_) { |
| 1197 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1198 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); |
| 1199 mfc_output_streamon_ = true; |
| 1200 } |
| 1201 } |
| 1202 } |
| 1203 |
| 1204 void ExynosVideoDecodeAccelerator::DequeueMfc() { |
| 1205 DVLOG(3) << "DequeueMfc()"; |
| 1206 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1207 DCHECK_NE(decoder_state_, kUninitialized); |
| 1208 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc"); |
| 1209 |
| 1210 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 1211 // list. |
| 1212 struct v4l2_buffer dqbuf; |
| 1213 struct v4l2_plane planes[2]; |
| 1214 while (mfc_input_buffer_queued_count_ > 0) { |
| 1215 DCHECK(mfc_input_streamon_); |
| 1216 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 1217 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1218 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 1219 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 1220 if (errno == EAGAIN) { |
| 1221 // EAGAIN if we're just out of buffers to dequeue. |
| 1222 break; |
| 1223 } |
| 1224 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 1225 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1226 return; |
| 1227 } |
| 1228 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; |
| 1229 DCHECK(input_record.at_device); |
| 1230 mfc_free_input_buffers_.push_back(dqbuf.index); |
| 1231 input_record.at_device = false; |
| 1232 input_record.bytes_used = 0; |
| 1233 input_record.input_id = -1; |
| 1234 mfc_input_buffer_queued_count_--; |
| 1235 } |
| 1236 |
| 1237 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the |
| 1238 // completed queue. |
| 1239 while (mfc_output_buffer_queued_count_ > 0) { |
| 1240 DCHECK(mfc_output_streamon_); |
| 1241 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 1242 memset(planes, 0, sizeof(planes)); |
| 1243 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1244 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 1245 dqbuf.m.planes = planes; |
| 1246 dqbuf.length = 2; |
| 1247 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 1248 if (errno == EAGAIN) { |
| 1249 // EAGAIN if we're just out of buffers to dequeue. |
| 1250 break; |
| 1251 } |
| 1252 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; |
| 1253 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1254 return; |
| 1255 } |
| 1256 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; |
| 1257 DCHECK(output_record.at_device); |
| 1258 output_record.at_device = false; |
| 1259 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused; |
| 1260 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused; |
| 1261 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) { |
| 1262 // This is an empty output buffer returned as part of a flush. |
| 1263 mfc_free_output_buffers_.push_back(dqbuf.index); |
| 1264 output_record.input_id = -1; |
| 1265 } else { |
| 1266 // This is an output buffer with contents to pass down the pipe. |
| 1267 mfc_output_gsc_input_queue_.push_back(dqbuf.index); |
| 1268 output_record.input_id = dqbuf.timestamp.tv_sec; |
| 1269 DCHECK(output_record.input_id >= 0); |
| 1270 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id; |
| 1271 // We don't count this output buffer dequeued yet, or add it to the free |
| 1272 // list, as it has data GSC needs to process. |
| 1273 |
| 1274 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe. |
| 1275 SetDevicePollInterrupt(); |
| 1276 } |
| 1277 mfc_output_buffer_queued_count_--; |
| 1278 } |
| 1279 |
| 1280 NotifyFlushDoneIfNeeded(); |
| 1281 } |
| 1282 |
| 1283 void ExynosVideoDecodeAccelerator::EnqueueGsc() { |
| 1284 DVLOG(3) << "EnqueueGsc()"; |
| 1285 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1286 DCHECK_NE(decoder_state_, kUninitialized); |
| 1287 DCHECK_NE(decoder_state_, kInitialized); |
| 1288 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc"); |
| 1289 |
| 1290 // Drain the pipe of completed MFC output buffers. |
| 1291 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_; |
| 1292 while (!mfc_output_gsc_input_queue_.empty() && |
| 1293 !gsc_free_input_buffers_.empty()) { |
| 1294 if (!EnqueueGscInputRecord()) |
| 1295 return; |
| 1296 } |
| 1297 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) { |
| 1298 // We just started up a previously empty queue. |
| 1299 // Queue state changed; signal interrupt. |
| 1300 if (!SetDevicePollInterrupt()) |
| 1301 return; |
| 1302 // Start VIDIOC_STREAMON if we haven't yet. |
| 1303 if (!gsc_input_streamon_) { |
| 1304 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1305 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 1306 gsc_input_streamon_ = true; |
| 1307 } |
| 1308 } |
| 1309 |
| 1310 // Enqueue a GSC output, only if we need one |
| 1311 if (gsc_input_buffer_queued_count_ != 0 && |
| 1312 gsc_output_buffer_queued_count_ == 0 && |
| 1313 !gsc_free_output_buffers_.empty()) { |
| 1314 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_; |
| 1315 if (!EnqueueGscOutputRecord()) |
| 1316 return; |
| 1317 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) { |
| 1318 // We just started up a previously empty queue. |
| 1319 // Queue state changed; signal interrupt. |
| 1320 if (!SetDevicePollInterrupt()) |
| 1321 return; |
| 1322 // Start VIDIOC_STREAMON if we haven't yet. |
| 1323 if (!gsc_output_streamon_) { |
| 1324 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1325 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); |
| 1326 gsc_output_streamon_ = true; |
| 1327 } |
| 1328 } |
| 1329 } |
| 1330 // Bug check: GSC is liable to race conditions if more than one buffer is |
| 1331 // simultaneously queued. |
| 1332 DCHECK_GE(1, gsc_output_buffer_queued_count_); |
| 1333 } |
| 1334 |
| 1335 void ExynosVideoDecodeAccelerator::DequeueGsc() { |
| 1336 DVLOG(3) << "DequeueGsc()"; |
| 1337 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1338 DCHECK_NE(decoder_state_, kUninitialized); |
| 1339 DCHECK_NE(decoder_state_, kInitialized); |
| 1340 DCHECK_NE(decoder_state_, kAfterReset); |
| 1341 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc"); |
| 1342 |
| 1343 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free |
| 1344 // list. Also recycle the corresponding MFC output buffers at this time. |
| 1345 struct v4l2_buffer dqbuf; |
| 1346 while (gsc_input_buffer_queued_count_ > 0) { |
| 1347 DCHECK(gsc_input_streamon_); |
| 1348 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 1349 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1350 dqbuf.memory = V4L2_MEMORY_DMABUF; |
| 1351 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 1352 if (errno == EAGAIN) { |
| 1353 // EAGAIN if we're just out of buffers to dequeue. |
| 1354 break; |
| 1355 } |
| 1356 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 1357 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1358 return; |
| 1359 } |
| 1360 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; |
| 1361 MfcOutputRecord& output_record = |
| 1362 mfc_output_buffer_map_[input_record.mfc_output]; |
| 1363 DCHECK(input_record.at_device); |
| 1364 gsc_free_input_buffers_.push_back(dqbuf.index); |
| 1365 mfc_free_output_buffers_.push_back(input_record.mfc_output); |
| 1366 input_record.at_device = false; |
| 1367 input_record.mfc_output = -1; |
| 1368 output_record.input_id = -1; |
| 1369 gsc_input_buffer_queued_count_--; |
| 1370 } |
| 1371 |
| 1372 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to |
| 1373 // the client. Don't recycle to its free list yet -- we can't do that until |
| 1374 // ReusePictureBuffer() returns it to us. |
| 1375 while (gsc_output_buffer_queued_count_ > 0) { |
| 1376 DCHECK(gsc_output_streamon_); |
| 1377 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 1378 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1379 dqbuf.memory = V4L2_MEMORY_DMABUF; |
| 1380 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { |
| 1381 if (errno == EAGAIN) { |
| 1382 // EAGAIN if we're just out of buffers to dequeue. |
| 1383 break; |
| 1384 } |
| 1385 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; |
| 1386 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1387 return; |
| 1388 } |
| 1389 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; |
| 1390 DCHECK(output_record.at_device); |
| 1391 DCHECK(!output_record.at_client); |
| 1392 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| 1393 output_record.at_device = false; |
| 1394 output_record.at_client = true; |
| 1395 gsc_output_buffer_queued_count_--; |
| 1396 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1397 &Client::PictureReady, client_, media::Picture( |
| 1398 output_record.picture_id, dqbuf.timestamp.tv_sec))); |
| 1399 decoder_frames_at_client_++; |
| 1400 } |
| 1401 |
| 1402 NotifyFlushDoneIfNeeded(); |
| 1403 } |
| 1404 |
| 1405 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() { |
| 1406 DVLOG(3) << "EnqueueMfcInputRecord()"; |
| 1407 DCHECK(!mfc_input_ready_queue_.empty()); |
| 1408 |
| 1409 // Enqueue a MFC input (VIDEO_OUTPUT) buffer. |
| 1410 const int buffer = mfc_input_ready_queue_.back(); |
| 1411 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer]; |
| 1412 DCHECK(!input_record.at_device); |
| 1413 struct v4l2_buffer qbuf; |
| 1414 struct v4l2_plane qbuf_plane; |
| 1415 memset(&qbuf, 0, sizeof(qbuf)); |
| 1416 memset(&qbuf_plane, 0, sizeof(qbuf_plane)); |
| 1417 qbuf.index = buffer; |
| 1418 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1419 qbuf.timestamp.tv_sec = input_record.input_id; |
| 1420 qbuf.memory = V4L2_MEMORY_MMAP; |
| 1421 qbuf.m.planes = &qbuf_plane; |
| 1422 qbuf.m.planes[0].bytesused = input_record.bytes_used; |
| 1423 qbuf.length = 1; |
| 1424 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 1425 mfc_input_ready_queue_.pop_back(); |
| 1426 input_record.at_device = true; |
| 1427 mfc_input_buffer_queued_count_++; |
| 1428 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id=" |
| 1429 << input_record.input_id; |
| 1430 return true; |
| 1431 } |
| 1432 |
| 1433 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() { |
| 1434 DVLOG(3) << "EnqueueMfcOutputRecord()"; |
| 1435 DCHECK(!mfc_free_output_buffers_.empty()); |
| 1436 |
| 1437 // Enqueue a MFC output (VIDEO_CAPTURE) buffer. |
| 1438 const int buffer = mfc_free_output_buffers_.back(); |
| 1439 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer]; |
| 1440 DCHECK(!output_record.at_device); |
| 1441 DCHECK_EQ(output_record.input_id, -1); |
| 1442 struct v4l2_buffer qbuf; |
| 1443 struct v4l2_plane qbuf_planes[2]; |
| 1444 memset(&qbuf, 0, sizeof(qbuf)); |
| 1445 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 1446 qbuf.index = buffer; |
| 1447 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1448 qbuf.memory = V4L2_MEMORY_MMAP; |
| 1449 qbuf.m.planes = qbuf_planes; |
| 1450 qbuf.length = 2; |
| 1451 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); |
| 1452 mfc_free_output_buffers_.pop_back(); |
| 1453 output_record.at_device = true; |
| 1454 mfc_output_buffer_queued_count_++; |
| 1455 return true; |
| 1456 } |
| 1457 |
| 1458 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() { |
| 1459 DVLOG(3) << "EnqueueGscInputRecord()"; |
| 1460 DCHECK(!gsc_free_input_buffers_.empty()); |
| 1461 |
| 1462 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output |
| 1463 // (VIDEO_CAPTURE) buffer. |
| 1464 const int mfc_buffer = mfc_output_gsc_input_queue_.front(); |
| 1465 const int gsc_buffer = gsc_free_input_buffers_.back(); |
| 1466 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; |
| 1467 DCHECK(!output_record.at_device); |
| 1468 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; |
| 1469 DCHECK(!input_record.at_device); |
| 1470 DCHECK_EQ(input_record.mfc_output, -1); |
| 1471 struct v4l2_buffer qbuf; |
| 1472 struct v4l2_plane qbuf_planes[2]; |
| 1473 memset(&qbuf, 0, sizeof(qbuf)); |
| 1474 memset(qbuf_planes, 0, sizeof(qbuf_planes)); |
| 1475 qbuf.index = gsc_buffer; |
| 1476 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1477 qbuf.timestamp.tv_sec = output_record.input_id; |
| 1478 qbuf.memory = V4L2_MEMORY_USERPTR; |
| 1479 qbuf.m.planes = qbuf_planes; |
| 1480 qbuf.m.planes[0].bytesused = output_record.bytes_used[0]; |
| 1481 qbuf.m.planes[0].length = mfc_output_buffer_size_[0]; |
| 1482 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0]; |
| 1483 qbuf.m.planes[1].bytesused = output_record.bytes_used[1]; |
| 1484 qbuf.m.planes[1].length = mfc_output_buffer_size_[1]; |
| 1485 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1]; |
| 1486 qbuf.length = 2; |
| 1487 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 1488 mfc_output_gsc_input_queue_.pop_front(); |
| 1489 gsc_free_input_buffers_.pop_back(); |
| 1490 input_record.at_device = true; |
| 1491 input_record.mfc_output = mfc_buffer; |
| 1492 output_record.bytes_used[0] = 0; |
| 1493 output_record.bytes_used[1] = 0; |
| 1494 gsc_input_buffer_queued_count_++; |
| 1495 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id=" |
| 1496 << output_record.input_id; |
| 1497 return true; |
| 1498 } |
| 1499 |
| 1500 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() { |
| 1501 DVLOG(3) << "EnqueueGscOutputRecord()"; |
| 1502 DCHECK(!gsc_free_output_buffers_.empty()); |
| 1503 |
| 1504 // Enqueue a GSC output (VIDEO_CAPTURE) buffer. |
| 1505 const int buffer = gsc_free_output_buffers_.front(); |
| 1506 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer]; |
| 1507 DCHECK(!output_record.at_device); |
| 1508 DCHECK(!output_record.at_client); |
| 1509 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { |
| 1510 TRACE_EVENT0( |
| 1511 "Video Decoder", |
| 1512 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR"); |
| 1513 // If we have to wait for completion, wait. Note that |
| 1514 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the |
| 1515 // buffer that has been in the queue the longest. |
| 1516 egl_client_wait_sync_khr(egl_display_, output_record.egl_sync, 0, |
| 1517 EGL_FOREVER_KHR); |
| 1518 egl_destroy_sync_khr(egl_display_, output_record.egl_sync); |
| 1519 output_record.egl_sync = EGL_NO_SYNC_KHR; |
| 1520 } |
| 1521 struct v4l2_buffer qbuf; |
| 1522 struct v4l2_plane qbuf_plane; |
| 1523 memset(&qbuf, 0, sizeof(qbuf)); |
| 1524 memset(&qbuf_plane, 0, sizeof(qbuf_plane)); |
| 1525 qbuf.index = buffer; |
| 1526 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1527 qbuf.memory = V4L2_MEMORY_DMABUF; |
| 1528 qbuf.m.planes = &qbuf_plane; |
| 1529 qbuf.m.planes[0].m.fd = output_record.fd; |
| 1530 qbuf.length = 1; |
| 1531 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); |
| 1532 gsc_free_output_buffers_.pop_front(); |
| 1533 output_record.at_device = true; |
| 1534 gsc_output_buffer_queued_count_++; |
| 1535 return true; |
| 1536 } |
| 1537 |
| 1538 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask( |
| 1539 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) { |
| 1540 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id=" |
| 1541 << picture_buffer_id; |
| 1542 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1543 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask"); |
| 1544 |
| 1545 // We run ReusePictureBufferTask even if we're in kResetting. |
| 1546 if (decoder_state_ == kError) { |
| 1547 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state"; |
| 1548 return; |
| 1549 } |
| 1550 |
| 1551 size_t index; |
| 1552 for (index = 0; index < gsc_output_buffer_map_.size(); ++index) |
| 1553 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id) |
| 1554 break; |
| 1555 |
| 1556 if (index >= gsc_output_buffer_map_.size()) { |
| 1557 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found"; |
| 1558 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 1559 return; |
| 1560 } |
| 1561 |
| 1562 GscOutputRecord& output_record = gsc_output_buffer_map_[index]; |
| 1563 if (output_record.at_device || !output_record.at_client) { |
| 1564 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable"; |
| 1565 NOTIFY_ERROR(INVALID_ARGUMENT); |
| 1566 return; |
| 1567 } |
| 1568 |
| 1569 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); |
| 1570 output_record.at_client = false; |
| 1571 output_record.egl_sync = egl_sync_ref->egl_sync; |
| 1572 gsc_free_output_buffers_.push_back(index); |
| 1573 decoder_frames_at_client_--; |
| 1574 // Take ownership of the EGLSync. |
| 1575 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; |
| 1576 // We got a buffer back, so kick the GSC. |
| 1577 EnqueueGsc(); |
| 1578 } |
| 1579 |
| 1580 void ExynosVideoDecodeAccelerator::FlushTask() { |
| 1581 DVLOG(3) << "FlushTask()"; |
| 1582 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1583 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask"); |
| 1584 |
| 1585 // Flush outstanding buffers. |
| 1586 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) { |
| 1587 // There's nothing in the pipe, so return done immediately. |
| 1588 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1589 &Client::NotifyFlushDone, client_)); |
| 1590 return; |
| 1591 } else if (decoder_state_ == kError) { |
| 1592 DVLOG(2) << "FlushTask(): early out: kError state"; |
| 1593 return; |
| 1594 } |
| 1595 |
| 1596 // We don't support stacked flushing. |
| 1597 DCHECK(!decoder_flushing_); |
| 1598 |
| 1599 // Queue up an empty buffer -- this triggers the flush. |
| 1600 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>( |
| 1601 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0, |
| 1602 kFlushBufferId))); |
| 1603 decoder_flushing_ = true; |
| 1604 |
| 1605 ScheduleDecodeBufferTaskIfNeeded(); |
| 1606 } |
| 1607 |
| 1608 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() { |
| 1609 if (!decoder_flushing_) |
| 1610 return; |
| 1611 |
| 1612 // Pipeline is empty when: |
| 1613 // * Decoder input queue is empty of non-delayed buffers. |
| 1614 // * There is no currently filling input buffer. |
| 1615 // * MFC input holding queue is empty. |
| 1616 // * All MFC input (VIDEO_OUTPUT) buffers are returned. |
| 1617 // * MFC -> GSC holding queue is empty. |
| 1618 // * All GSC input (VIDEO_OUTPUT) buffers are returned. |
| 1619 if (!decoder_input_queue_.empty()) { |
| 1620 if (decoder_input_queue_.front()->input_id != |
| 1621 decoder_delay_bitstream_buffer_id_) |
| 1622 return; |
| 1623 } |
| 1624 if (decoder_current_input_buffer_ != -1) |
| 1625 return; |
| 1626 if ((mfc_input_ready_queue_.size() + |
| 1627 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() + |
| 1628 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0) |
| 1629 return; |
| 1630 |
| 1631 decoder_delay_bitstream_buffer_id_ = -1; |
| 1632 decoder_flushing_ = false; |
| 1633 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1634 &Client::NotifyFlushDone, client_)); |
| 1635 |
| 1636 // While we were flushing, we early-outed DecodeBufferTask()s. |
| 1637 ScheduleDecodeBufferTaskIfNeeded(); |
| 1638 } |
| 1639 |
| 1640 void ExynosVideoDecodeAccelerator::ResetTask() { |
| 1641 DVLOG(3) << "ResetTask()"; |
| 1642 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1643 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask"); |
| 1644 |
| 1645 if (decoder_state_ == kError) { |
| 1646 DVLOG(2) << "ResetTask(): early out: kError state"; |
| 1647 return; |
| 1648 } |
| 1649 |
| 1650 // We stop streaming, but we _don't_ destroy our buffers. |
| 1651 if (!StopDevicePoll()) |
| 1652 return; |
| 1653 |
| 1654 decoder_current_bitstream_buffer_.reset(); |
| 1655 decoder_input_queue_.clear(); |
| 1656 |
| 1657 decoder_current_input_buffer_ = -1; |
| 1658 |
| 1659 // If we were flushing, we'll never return any more BitstreamBuffers or |
| 1660 // PictureBuffers; they have all been dropped and returned by now. |
| 1661 NotifyFlushDoneIfNeeded(); |
| 1662 |
| 1663 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening |
| 1664 // jobs will early-out in the kResetting state. |
| 1665 decoder_state_ = kResetting; |
| 1666 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1667 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this))); |
| 1668 } |
| 1669 |
| 1670 void ExynosVideoDecodeAccelerator::ResetDoneTask() { |
| 1671 DVLOG(3) << "ResetDoneTask()"; |
| 1672 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1673 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask"); |
| 1674 |
| 1675 if (decoder_state_ == kError) { |
| 1676 DVLOG(2) << "ResetDoneTask(): early out: kError state"; |
| 1677 return; |
| 1678 } |
| 1679 |
| 1680 // Reset format-specific bits. |
| 1681 if (video_profile_ >= media::H264PROFILE_MIN && |
| 1682 video_profile_ <= media::H264PROFILE_MAX) { |
| 1683 decoder_h264_parser_.reset(new content::H264Parser()); |
| 1684 } |
| 1685 |
| 1686 // Jobs drained, we're finished resetting. |
| 1687 DCHECK_EQ(decoder_state_, kResetting); |
| 1688 decoder_state_ = kAfterReset; |
| 1689 decoder_delay_bitstream_buffer_id_ = -1; |
| 1690 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1691 &Client::NotifyResetDone, client_)); |
| 1692 |
| 1693 // While we were resetting, we early-outed DecodeBufferTask()s. |
| 1694 ScheduleDecodeBufferTaskIfNeeded(); |
| 1695 } |
| 1696 |
| 1697 void ExynosVideoDecodeAccelerator::DestroyTask() { |
| 1698 DVLOG(3) << "DestroyTask()"; |
| 1699 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask"); |
| 1700 |
| 1701 // DestroyTask() should run regardless of decoder_state_. |
| 1702 |
| 1703 // Stop streaming and the device_poll_thread_. |
| 1704 StopDevicePoll(); |
| 1705 |
| 1706 decoder_current_bitstream_buffer_.reset(); |
| 1707 decoder_current_input_buffer_ = -1; |
| 1708 decoder_decode_buffer_tasks_scheduled_ = 0; |
| 1709 decoder_frames_at_client_ = 0; |
| 1710 decoder_input_queue_.clear(); |
| 1711 decoder_flushing_ = false; |
| 1712 |
| 1713 // Set our state to kError. Just in case. |
| 1714 decoder_state_ = kError; |
| 1715 } |
| 1716 |
| 1717 bool ExynosVideoDecodeAccelerator::StartDevicePoll() { |
| 1718 DVLOG(3) << "StartDevicePoll()"; |
| 1719 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1720 DCHECK(!device_poll_thread_.IsRunning()); |
| 1721 |
| 1722 // Start up the device poll thread and schedule its first DevicePollTask(). |
| 1723 if (!device_poll_thread_.Start()) { |
| 1724 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
| 1725 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1726 return false; |
| 1727 } |
| 1728 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1729 &ExynosVideoDecodeAccelerator::DevicePollTask, |
| 1730 base::Unretained(this), |
| 1731 0)); |
| 1732 |
| 1733 return true; |
| 1734 } |
| 1735 |
| 1736 bool ExynosVideoDecodeAccelerator::StopDevicePoll() { |
| 1737 DVLOG(3) << "StopDevicePoll()"; |
| 1738 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1739 |
| 1740 // Signal the DevicePollTask() to stop, and stop the device poll thread. |
| 1741 if (!SetDevicePollInterrupt()) |
| 1742 return false; |
| 1743 device_poll_thread_.Stop(); |
| 1744 // Clear the interrupt now, to be sure. |
| 1745 if (!ClearDevicePollInterrupt()) |
| 1746 return false; |
| 1747 |
| 1748 // Stop streaming. |
| 1749 if (mfc_input_streamon_) { |
| 1750 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1751 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 1752 } |
| 1753 mfc_input_streamon_ = false; |
| 1754 if (mfc_output_streamon_) { |
| 1755 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1756 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); |
| 1757 } |
| 1758 mfc_output_streamon_ = false; |
| 1759 if (gsc_input_streamon_) { |
| 1760 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1761 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 1762 } |
| 1763 gsc_input_streamon_ = false; |
| 1764 if (gsc_output_streamon_) { |
| 1765 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 1766 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); |
| 1767 } |
| 1768 gsc_output_streamon_ = false; |
| 1769 |
| 1770 // Reset all our accounting info. |
| 1771 mfc_input_ready_queue_.clear(); |
| 1772 mfc_free_input_buffers_.clear(); |
| 1773 DCHECK_EQ(mfc_input_buffer_count_, |
| 1774 static_cast<int>(mfc_input_buffer_map_.size())); |
| 1775 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 1776 mfc_free_input_buffers_.push_back(i); |
| 1777 mfc_input_buffer_map_[i].at_device = false; |
| 1778 mfc_input_buffer_map_[i].bytes_used = 0; |
| 1779 mfc_input_buffer_map_[i].input_id = -1; |
| 1780 } |
| 1781 mfc_input_buffer_queued_count_ = 0; |
| 1782 mfc_free_output_buffers_.clear(); |
| 1783 DCHECK_EQ(mfc_output_buffer_count_, |
| 1784 static_cast<int>(mfc_output_buffer_map_.size())); |
| 1785 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 1786 mfc_free_output_buffers_.push_back(i); |
| 1787 mfc_output_buffer_map_[i].at_device = false; |
| 1788 mfc_output_buffer_map_[i].input_id = -1; |
| 1789 } |
| 1790 mfc_output_buffer_queued_count_ = 0; |
| 1791 mfc_output_gsc_input_queue_.clear(); |
| 1792 gsc_free_input_buffers_.clear(); |
| 1793 DCHECK_EQ(gsc_input_buffer_count_, |
| 1794 static_cast<int>(gsc_input_buffer_map_.size())); |
| 1795 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { |
| 1796 gsc_free_input_buffers_.push_back(i); |
| 1797 gsc_input_buffer_map_[i].at_device = false; |
| 1798 gsc_input_buffer_map_[i].mfc_output = -1; |
| 1799 } |
| 1800 gsc_input_buffer_queued_count_ = 0; |
| 1801 gsc_free_output_buffers_.clear(); |
| 1802 DCHECK_EQ(gsc_output_buffer_count_, |
| 1803 static_cast<int>(gsc_output_buffer_map_.size())); |
| 1804 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { |
| 1805 // Only mark those free that aren't being held by the VDA. |
| 1806 if (!gsc_output_buffer_map_[i].at_client) { |
| 1807 gsc_free_output_buffers_.push_back(i); |
| 1808 gsc_output_buffer_map_[i].at_device = false; |
| 1809 } |
| 1810 } |
| 1811 gsc_output_buffer_queued_count_ = 0; |
| 1812 |
| 1813 DVLOG(3) << "StopDevicePoll(): device poll stopped"; |
| 1814 return true; |
| 1815 } |
| 1816 |
| 1817 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() { |
| 1818 DVLOG(3) << "SetDevicePollInterrupt()"; |
| 1819 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1820 |
| 1821 const uint64 buf = 1; |
| 1822 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { |
| 1823 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed"; |
| 1824 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1825 return false; |
| 1826 } |
| 1827 return true; |
| 1828 } |
| 1829 |
| 1830 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() { |
| 1831 DVLOG(3) << "ClearDevicePollInterrupt()"; |
| 1832 DCHECK_EQ(decoder_thread_.message_loop(), MessageLoop::current()); |
| 1833 |
| 1834 uint64 buf; |
| 1835 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { |
| 1836 if (errno == EAGAIN) { |
| 1837 // No interrupt flag set, and we're reading nonblocking. Not an error. |
| 1838 return true; |
| 1839 } else { |
| 1840 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed"; |
| 1841 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1842 return false; |
| 1843 } |
| 1844 } |
| 1845 return true; |
| 1846 } |
| 1847 |
| 1848 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) { |
| 1849 DVLOG(3) << "DevicePollTask()"; |
| 1850 DCHECK_EQ(device_poll_thread_.message_loop(), MessageLoop::current()); |
| 1851 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask"); |
| 1852 |
| 1853 // This routine just polls the set of device fds, and schedules a |
| 1854 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur. |
| 1855 // Other threads may notify this task to return early by writing to |
| 1856 // device_poll_interrupt_fd_. |
| 1857 struct pollfd pollfds[3]; |
| 1858 nfds_t nfds; |
| 1859 |
| 1860 // Add device_poll_interrupt_fd_; |
| 1861 pollfds[0].fd = device_poll_interrupt_fd_; |
| 1862 pollfds[0].events = POLLIN | POLLERR; |
| 1863 nfds = 1; |
| 1864 |
| 1865 if (poll_fds & kPollMfc) { |
| 1866 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set"; |
| 1867 pollfds[nfds].fd = mfc_fd_; |
| 1868 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1869 nfds++; |
| 1870 } |
| 1871 // Add GSC fd, if we should poll on it. |
| 1872 // GSC has to wait until both input and output buffers are queued. |
| 1873 if (poll_fds & kPollGsc) { |
| 1874 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set"; |
| 1875 pollfds[nfds].fd = gsc_fd_; |
| 1876 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; |
| 1877 nfds++; |
| 1878 } |
| 1879 |
| 1880 // Poll it! |
| 1881 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) { |
| 1882 DPLOG(ERROR) << "DevicePollTask(): poll() failed"; |
| 1883 NOTIFY_ERROR(PLATFORM_FAILURE); |
| 1884 return; |
| 1885 } |
| 1886 |
| 1887 // All processing should happen on ServiceDeviceTask(), since we shouldn't |
| 1888 // touch decoder state from this thread. |
| 1889 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1890 &ExynosVideoDecodeAccelerator::ServiceDeviceTask, |
| 1891 base::Unretained(this))); |
| 1892 } |
| 1893 |
| 1894 void ExynosVideoDecodeAccelerator::NotifyError(Error error) { |
| 1895 DVLOG(2) << "NotifyError()"; |
| 1896 |
| 1897 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { |
| 1898 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 1899 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error)); |
| 1900 return; |
| 1901 } |
| 1902 |
| 1903 if (client_) { |
| 1904 client_->NotifyError(error); |
| 1905 client_ptr_factory_.InvalidateWeakPtrs(); |
| 1906 } |
| 1907 } |
| 1908 |
| 1909 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) { |
| 1910 DVLOG(3) << "SetDecoderState(): state=%d" << state; |
| 1911 |
| 1912 // We can touch decoder_state_ only if this is the decoder thread or the |
| 1913 // decoder thread isn't running. |
| 1914 if (decoder_thread_.message_loop() != NULL && |
| 1915 decoder_thread_.message_loop() != MessageLoop::current()) { |
| 1916 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 1917 &ExynosVideoDecodeAccelerator::SetDecoderState, |
| 1918 base::Unretained(this), state)); |
| 1919 } else { |
| 1920 decoder_state_ = state; |
| 1921 } |
| 1922 } |
| 1923 |
| 1924 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() { |
| 1925 DVLOG(3) << "CreateMfcInputBuffers()"; |
| 1926 // We always run this as we prepare to initialize. |
| 1927 DCHECK_EQ(decoder_state_, kUninitialized); |
| 1928 DCHECK(!mfc_input_streamon_); |
| 1929 DCHECK_EQ(mfc_input_buffer_count_, 0); |
| 1930 |
| 1931 __u32 pixelformat = 0; |
| 1932 if (video_profile_ >= media::H264PROFILE_MIN && |
| 1933 video_profile_ <= media::H264PROFILE_MAX) { |
| 1934 pixelformat = V4L2_PIX_FMT_H264; |
| 1935 } else if (video_profile_ >= media::VP8PROFILE_MIN && |
| 1936 video_profile_ <= media::VP8PROFILE_MAX) { |
| 1937 pixelformat = V4L2_PIX_FMT_VP8; |
| 1938 } else { |
| 1939 NOTREACHED(); |
| 1940 } |
| 1941 |
| 1942 struct v4l2_format format; |
| 1943 memset(&format, 0, sizeof(format)); |
| 1944 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1945 format.fmt.pix_mp.pixelformat = pixelformat; |
| 1946 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize; |
| 1947 format.fmt.pix_mp.num_planes = 1; |
| 1948 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); |
| 1949 |
| 1950 struct v4l2_requestbuffers reqbufs; |
| 1951 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 1952 reqbufs.count = kMfcInputBufferCount; |
| 1953 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1954 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 1955 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 1956 mfc_input_buffer_count_ = reqbufs.count; |
| 1957 mfc_input_buffer_map_.resize(mfc_input_buffer_count_); |
| 1958 for (int i = 0; i < mfc_input_buffer_count_; ++i) { |
| 1959 mfc_free_input_buffers_.push_back(i); |
| 1960 |
| 1961 // Query for the MEMORY_MMAP pointer. |
| 1962 struct v4l2_plane planes[1]; |
| 1963 struct v4l2_buffer buffer; |
| 1964 memset(&buffer, 0, sizeof(buffer)); |
| 1965 memset(planes, 0, sizeof(planes)); |
| 1966 buffer.index = i; |
| 1967 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 1968 buffer.memory = V4L2_MEMORY_MMAP; |
| 1969 buffer.m.planes = planes; |
| 1970 buffer.length = 1; |
| 1971 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer); |
| 1972 void* address = mmap(NULL, buffer.m.planes[0].length, |
| 1973 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, |
| 1974 buffer.m.planes[0].m.mem_offset); |
| 1975 if (address == MAP_FAILED) { |
| 1976 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; |
| 1977 return false; |
| 1978 } |
| 1979 mfc_input_buffer_map_[i].address = address; |
| 1980 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length; |
| 1981 } |
| 1982 |
| 1983 return true; |
| 1984 } |
| 1985 |
| 1986 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() { |
| 1987 DVLOG(3) << "CreateMfcOutputBuffers()"; |
| 1988 DCHECK_EQ(decoder_state_, kInitialized); |
| 1989 DCHECK(!mfc_output_streamon_); |
| 1990 DCHECK_EQ(mfc_output_buffer_count_, 0); |
| 1991 |
| 1992 // Number of MFC output buffers we need. |
| 1993 struct v4l2_control ctrl; |
| 1994 memset(&ctrl, 0, sizeof(ctrl)); |
| 1995 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; |
| 1996 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl); |
| 1997 |
| 1998 // Output format setup in Initialize(). |
| 1999 |
| 2000 // Allocate the output buffers. |
| 2001 struct v4l2_requestbuffers reqbufs; |
| 2002 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2003 reqbufs.count = ctrl.value + kMfcOutputBufferExtraCount; |
| 2004 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2005 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 2006 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 2007 |
| 2008 // Fill our free-buffers list, and create DMABUFs from them. |
| 2009 mfc_output_buffer_count_ = reqbufs.count; |
| 2010 mfc_output_buffer_map_.resize(mfc_output_buffer_count_); |
| 2011 for (int i = 0; i < mfc_output_buffer_count_; ++i) { |
| 2012 mfc_free_output_buffers_.push_back(i); |
| 2013 |
| 2014 // Query for the MEMORY_MMAP pointer. |
| 2015 struct v4l2_plane planes[2]; |
| 2016 struct v4l2_buffer buffer; |
| 2017 memset(&buffer, 0, sizeof(buffer)); |
| 2018 memset(planes, 0, sizeof(planes)); |
| 2019 buffer.index = i; |
| 2020 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2021 buffer.memory = V4L2_MEMORY_MMAP; |
| 2022 buffer.m.planes = planes; |
| 2023 buffer.length = 2; |
| 2024 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer); |
| 2025 |
| 2026 // Get their user memory for GSC input. |
| 2027 for (int j = 0; j < 2; ++j) { |
| 2028 void* address = mmap(NULL, buffer.m.planes[j].length, |
| 2029 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, |
| 2030 buffer.m.planes[j].m.mem_offset); |
| 2031 if (address == MAP_FAILED) { |
| 2032 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; |
| 2033 return false; |
| 2034 } |
| 2035 mfc_output_buffer_map_[i].address[j] = address; |
| 2036 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length; |
| 2037 } |
| 2038 } |
| 2039 |
| 2040 return true; |
| 2041 } |
| 2042 |
| 2043 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() { |
| 2044 DVLOG(3) << "CreateGscInputBuffers()"; |
| 2045 DCHECK_EQ(decoder_state_, kInitialized); |
| 2046 DCHECK(!gsc_input_streamon_); |
| 2047 DCHECK_EQ(gsc_input_buffer_count_, 0); |
| 2048 |
| 2049 struct v4l2_format format; |
| 2050 memset(&format, 0, sizeof(format)); |
| 2051 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 2052 format.fmt.pix_mp.width = frame_buffer_size_.width(); |
| 2053 format.fmt.pix_mp.height = frame_buffer_size_.height(); |
| 2054 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_; |
| 2055 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0]; |
| 2056 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1]; |
| 2057 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much |
| 2058 // sense. Convention seems to be to assume 8bpp for these tiled formats. |
| 2059 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width(); |
| 2060 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width(); |
| 2061 format.fmt.pix_mp.num_planes = 2; |
| 2062 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 2063 |
| 2064 struct v4l2_control control; |
| 2065 memset(&control, 0, sizeof(control)); |
| 2066 control.id = V4L2_CID_ROTATE; |
| 2067 control.value = 0; |
| 2068 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 2069 |
| 2070 memset(&control, 0, sizeof(control)); |
| 2071 control.id = V4L2_CID_HFLIP; |
| 2072 control.value = 0; |
| 2073 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 2074 |
| 2075 memset(&control, 0, sizeof(control)); |
| 2076 control.id = V4L2_CID_VFLIP; |
| 2077 control.value = 0; |
| 2078 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 2079 |
| 2080 memset(&control, 0, sizeof(control)); |
| 2081 control.id = V4L2_CID_GLOBAL_ALPHA; |
| 2082 control.value = 255; |
| 2083 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); |
| 2084 |
| 2085 struct v4l2_requestbuffers reqbufs; |
| 2086 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2087 reqbufs.count = kGscInputBufferCount; |
| 2088 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 2089 reqbufs.memory = V4L2_MEMORY_USERPTR; |
| 2090 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 2091 |
| 2092 gsc_input_buffer_count_ = reqbufs.count; |
| 2093 gsc_input_buffer_map_.resize(gsc_input_buffer_count_); |
| 2094 for (int i = 0; i < gsc_input_buffer_count_; ++i) { |
| 2095 gsc_free_input_buffers_.push_back(i); |
| 2096 gsc_input_buffer_map_[i].mfc_output = -1; |
| 2097 } |
| 2098 |
| 2099 return true; |
| 2100 } |
| 2101 |
| 2102 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() { |
| 2103 DVLOG(3) << "CreateGscOutputBuffers()"; |
| 2104 DCHECK_EQ(decoder_state_, kInitialized); |
| 2105 DCHECK(!gsc_output_streamon_); |
| 2106 DCHECK_EQ(gsc_output_buffer_count_, 0); |
| 2107 |
| 2108 // GSC outputs into the EGLImages we create from the textures we are |
| 2109 // assigned. Assume RGBA8888 format. |
| 2110 struct v4l2_format format; |
| 2111 memset(&format, 0, sizeof(format)); |
| 2112 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2113 format.fmt.pix_mp.width = frame_buffer_size_.width(); |
| 2114 format.fmt.pix_mp.height = frame_buffer_size_.height(); |
| 2115 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32; |
| 2116 format.fmt.pix_mp.plane_fmt[0].sizeimage = |
| 2117 frame_buffer_size_.width() * frame_buffer_size_.height() * 4; |
| 2118 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4; |
| 2119 format.fmt.pix_mp.num_planes = 1; |
| 2120 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); |
| 2121 |
| 2122 struct v4l2_requestbuffers reqbufs; |
| 2123 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2124 reqbufs.count = kGscOutputBufferCount; |
| 2125 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2126 reqbufs.memory = V4L2_MEMORY_DMABUF; |
| 2127 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); |
| 2128 |
| 2129 // We don't actually fill in the freelist or the map here. That happens once |
| 2130 // we have actual usable buffers, after AssignPictureBuffers(); |
| 2131 gsc_output_buffer_count_ = reqbufs.count; |
| 2132 gsc_output_buffer_map_.resize(gsc_output_buffer_count_); |
| 2133 |
| 2134 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): " |
| 2135 << "buffer_count=" << gsc_output_buffer_count_ |
| 2136 << ", width=" << frame_buffer_size_.width() |
| 2137 << ", height=" << frame_buffer_size_.height(); |
| 2138 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| 2139 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_count_, |
| 2140 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()), |
| 2141 GL_TEXTURE_2D)); |
| 2142 |
| 2143 return true; |
| 2144 } |
| 2145 |
| 2146 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() { |
| 2147 DVLOG(3) << "DestroyMfcInputBuffers()"; |
| 2148 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 2149 DCHECK(!mfc_input_streamon_); |
| 2150 |
| 2151 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { |
| 2152 if (mfc_input_buffer_map_[i].address != NULL) { |
| 2153 munmap(mfc_input_buffer_map_[i].address, |
| 2154 mfc_input_buffer_map_[i].length); |
| 2155 } |
| 2156 } |
| 2157 |
| 2158 struct v4l2_requestbuffers reqbufs; |
| 2159 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2160 reqbufs.count = 0; |
| 2161 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 2162 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 2163 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 2164 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 2165 |
| 2166 mfc_input_buffer_map_.clear(); |
| 2167 mfc_free_input_buffers_.clear(); |
| 2168 mfc_input_buffer_count_ = 0; |
| 2169 } |
| 2170 |
| 2171 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() { |
| 2172 DVLOG(3) << "DestroyMfcOutputBuffers()"; |
| 2173 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 2174 DCHECK(!mfc_output_streamon_); |
| 2175 |
| 2176 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { |
| 2177 if (mfc_output_buffer_map_[i].address[0] != NULL) |
| 2178 munmap(mfc_output_buffer_map_[i].address[0], |
| 2179 mfc_output_buffer_map_[i].length[0]); |
| 2180 if (mfc_output_buffer_map_[i].address[1] != NULL) |
| 2181 munmap(mfc_output_buffer_map_[i].address[1], |
| 2182 mfc_output_buffer_map_[i].length[1]); |
| 2183 } |
| 2184 |
| 2185 struct v4l2_requestbuffers reqbufs; |
| 2186 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2187 reqbufs.count = 0; |
| 2188 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2189 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 2190 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 2191 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS"; |
| 2192 |
| 2193 mfc_output_buffer_map_.clear(); |
| 2194 mfc_free_output_buffers_.clear(); |
| 2195 mfc_output_buffer_count_ = 0; |
| 2196 } |
| 2197 |
| 2198 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() { |
| 2199 DVLOG(3) << "DestroyGscInputBuffers()"; |
| 2200 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 2201 DCHECK(!gsc_input_streamon_); |
| 2202 |
| 2203 struct v4l2_requestbuffers reqbufs; |
| 2204 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2205 reqbufs.count = 0; |
| 2206 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 2207 reqbufs.memory = V4L2_MEMORY_DMABUF; |
| 2208 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 2209 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 2210 |
| 2211 gsc_input_buffer_map_.clear(); |
| 2212 gsc_free_input_buffers_.clear(); |
| 2213 gsc_input_buffer_count_ = 0; |
| 2214 } |
| 2215 |
| 2216 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() { |
| 2217 DVLOG(3) << "DestroyGscOutputBuffers()"; |
| 2218 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); |
| 2219 DCHECK(!gsc_output_streamon_); |
| 2220 |
| 2221 if (gsc_output_buffer_map_.size() != 0) { |
| 2222 if (!make_context_current_.Run()) |
| 2223 DLOG(ERROR) << "DestroyGscOutputBuffers(): " |
| 2224 << "could not make context current"; |
| 2225 |
| 2226 size_t i = 0; |
| 2227 do { |
| 2228 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; |
| 2229 if (output_record.fd != -1) |
| 2230 HANDLE_EINTR(close(output_record.fd)); |
| 2231 if (output_record.egl_image != EGL_NO_IMAGE_KHR) |
| 2232 egl_destroy_image_khr(egl_display_, output_record.egl_image); |
| 2233 if (output_record.egl_sync != EGL_NO_SYNC_KHR) |
| 2234 egl_destroy_sync_khr(egl_display_, output_record.egl_sync); |
| 2235 if (client_) |
| 2236 client_->DismissPictureBuffer(output_record.picture_id); |
| 2237 ++i; |
| 2238 } while (i < gsc_output_buffer_map_.size()); |
| 2239 } |
| 2240 |
| 2241 struct v4l2_requestbuffers reqbufs; |
| 2242 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 2243 reqbufs.count = 0; |
| 2244 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 2245 reqbufs.memory = V4L2_MEMORY_DMABUF; |
| 2246 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) |
| 2247 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; |
| 2248 |
| 2249 gsc_output_buffer_map_.clear(); |
| 2250 gsc_free_output_buffers_.clear(); |
| 2251 gsc_output_buffer_count_ = 0; |
| 2252 } |
| 2253 |
| 2254 } // namespace content |
OLD | NEW |