Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(33)

Side by Side Diff: content/common/gpu/media/v4l2_slice_video_decode_accelerator.cc

Issue 833063003: Add accelerated video decoder interface, VP8 and H.264 implementations and hook up to V4L2SVDA. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <fcntl.h>
6 #include <linux/videodev2.h>
7 #include <poll.h>
8 #include <sys/eventfd.h>
9 #include <sys/ioctl.h>
10 #include <sys/mman.h>
11
12 #include "base/bind.h"
13 #include "base/bind_helpers.h"
14 #include "base/callback.h"
15 #include "base/callback_helpers.h"
16 #include "base/command_line.h"
17 #include "base/message_loop/message_loop_proxy.h"
18 #include "base/numerics/safe_conversions.h"
19 #include "base/strings/stringprintf.h"
20 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
21 #include "media/base/bind_to_current_loop.h"
22 #include "media/base/media_switches.h"
23 #include "ui/gl/scoped_binders.h"
24
25 #define LOGF(level) LOG(level) << __FUNCTION__ << "(): "
26 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): "
27
28 #define NOTIFY_ERROR(x) \
29 do { \
30 SetDecoderState(kError); \
31 DLOG(ERROR) << "calling NotifyError(): " << x; \
32 NotifyError(x); \
33 } while (0)
34
35 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \
36 do { \
37 if (device_->Ioctl(type, arg) != 0) { \
38 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
39 return value; \
40 } \
41 } while (0)
42
43 #define IOCTL_OR_ERROR_RETURN(type, arg) \
44 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
45
46 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
47 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
48
49 #define IOCTL_OR_LOG_ERROR(type, arg) \
50 do { \
51 if (device_->Ioctl(type, arg) != 0) \
52 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
53 } while (0)
54
55 namespace content {
56
57 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::V4L2DecodeSurface(
58 int32 bitstream_id,
59 int input_record,
60 int output_record,
61 const ReleaseCB& release_cb)
62 : bitstream_id_(bitstream_id),
63 input_record_(input_record),
64 output_record_(output_record),
65 config_store_(input_record + 1),
66 decoded_(false),
67 release_cb_(release_cb) {
68 }
69
70 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::~V4L2DecodeSurface() {
71 DVLOGF(5) << "Releasing output record id=" << output_record_;
72 release_cb_.Run(output_record_);
73 }
74
75 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetReferenceSurfaces(
76 const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces) {
77 DCHECK(reference_surfaces_.empty());
78 reference_surfaces_ = ref_surfaces;
79 }
80
81 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetDecoded() {
82 DCHECK(!decoded_);
83 decoded_ = true;
84
85 // We can now drop references to all reference surfaces for this surface
86 // as we are done with decoding.
87 reference_surfaces_.clear();
88 }
89
90 std::string V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::ToString()
91 const {
92 std::string out;
93 base::StringAppendF(&out, "Buffer %d -> %d. ", input_record_, output_record_);
94 base::StringAppendF(&out, "Reference surfaces:");
95 for (const auto& ref : reference_surfaces_) {
96 DCHECK_NE(ref->output_record(), output_record_);
97 base::StringAppendF(&out, " %d", ref->output_record());
98 }
99 return out;
100 }
101
102 V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord()
103 : input_id(-1),
104 address(nullptr),
105 length(0),
106 bytes_used(0),
107 at_device(false) {
108 }
109
110 V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord()
111 : at_device(false),
112 at_client(false),
113 picture_id(-1),
114 egl_image(EGL_NO_IMAGE_KHR),
115 egl_sync(EGL_NO_SYNC_KHR),
116 cleared(false) {
117 }
118
119 struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
120 BitstreamBufferRef(
121 base::WeakPtr<VideoDecodeAccelerator::Client>& client,
122 const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
123 base::SharedMemory* shm,
124 size_t size,
125 int32 input_id);
126 ~BitstreamBufferRef();
127 const base::WeakPtr<VideoDecodeAccelerator::Client> client;
128 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
129 const scoped_ptr<base::SharedMemory> shm;
130 const size_t size;
131 off_t bytes_used;
132 const int32 input_id;
133 };
134
135 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
136 base::WeakPtr<VideoDecodeAccelerator::Client>& client,
137 const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
138 base::SharedMemory* shm,
139 size_t size,
140 int32 input_id)
141 : client(client),
142 client_message_loop_proxy(client_message_loop_proxy),
143 shm(shm),
144 size(size),
145 bytes_used(0),
146 input_id(input_id) {
147 }
148
149 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
150 if (input_id >= 0) {
151 DVLOGF(5) << "returning input_id: " << input_id;
152 client_message_loop_proxy->PostTask(
153 FROM_HERE,
154 base::Bind(&VideoDecodeAccelerator::Client::NotifyEndOfBitstreamBuffer,
155 client, input_id));
156 }
157 }
158
159 struct V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef {
160 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
161 ~EGLSyncKHRRef();
162 EGLDisplay const egl_display;
163 EGLSyncKHR egl_sync;
164 };
165
166 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
167 EGLDisplay egl_display,
168 EGLSyncKHR egl_sync)
169 : egl_display(egl_display), egl_sync(egl_sync) {
170 }
171
172 V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
173 // We don't check for eglDestroySyncKHR failures, because if we get here
174 // with a valid sync object, something went wrong and we are getting
175 // destroyed anyway.
176 if (egl_sync != EGL_NO_SYNC_KHR)
177 eglDestroySyncKHR(egl_display, egl_sync);
178 }
179
180 struct V4L2SliceVideoDecodeAccelerator::PictureRecord {
181 PictureRecord(bool cleared, const media::Picture& picture);
182 ~PictureRecord();
183 bool cleared; // Whether the texture is cleared and safe to render from.
184 media::Picture picture; // The decoded picture.
185 };
186
187 V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord(
188 bool cleared,
189 const media::Picture& picture)
190 : cleared(cleared), picture(picture) {
191 }
192
193 V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {
194 }
195
196 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
197 const scoped_refptr<V4L2Device>& device,
198 EGLDisplay egl_display,
199 EGLContext egl_context,
200 const base::WeakPtr<Client>& io_client,
201 const base::Callback<bool(void)>& make_context_current,
202 const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
203 : input_planes_count_(0),
204 output_planes_count_(0),
205 child_message_loop_proxy_(base::MessageLoopProxy::current()),
206 io_message_loop_proxy_(io_message_loop_proxy),
207 io_client_(io_client),
208 device_(device),
209 decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"),
210 device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"),
211 input_streamon_(false),
212 input_buffer_queued_count_(0),
213 output_streamon_(false),
214 output_buffer_queued_count_(0),
215 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
216 output_format_fourcc_(0),
217 output_dpb_size_(0),
218 state_(kUninitialized),
219 decoder_flushing_(false),
220 decoder_resetting_(false),
221 surface_set_change_pending_(false),
222 picture_clearing_count_(0),
223 pictures_assigned_(false, false),
224 make_context_current_(make_context_current),
225 egl_display_(egl_display),
226 egl_context_(egl_context),
227 weak_this_factory_(this) {
228 weak_this_ = weak_this_factory_.GetWeakPtr();
229 }
230
231 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() {
232 DVLOGF(2);
233
234 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
235 DCHECK(!decoder_thread_.IsRunning());
236 DCHECK(!device_poll_thread_.IsRunning());
237
238 DCHECK(input_buffer_map_.empty());
239 DCHECK(output_buffer_map_.empty());
240 }
241
242 void V4L2SliceVideoDecodeAccelerator::NotifyError(Error error) {
243 if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
244 child_message_loop_proxy_->PostTask(
245 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::NotifyError,
246 weak_this_, error));
247 return;
248 }
249
250 if (client_) {
251 client_->NotifyError(error);
252 client_ptr_factory_.reset();
253 }
254 }
255
256 bool V4L2SliceVideoDecodeAccelerator::Initialize(
257 media::VideoCodecProfile profile,
258 VideoDecodeAccelerator::Client* client) {
259 DVLOGF(3);
260 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
261 DCHECK_EQ(state_, kUninitialized);
262
263 client_ptr_factory_.reset(
264 new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client));
265 client_ = client_ptr_factory_->GetWeakPtr();
266
267 video_profile_ = profile;
268
269 if (video_profile_ >= media::H264PROFILE_MIN &&
270 video_profile_ <= media::H264PROFILE_MAX) {
271 h264_accelerator_.reset(new V4L2H264Accelerator(this));
272 decoder_.reset(new H264Decoder(h264_accelerator_.get()));
273 } else if (video_profile_ >= media::VP8PROFILE_MIN &&
274 video_profile_ <= media::VP8PROFILE_MAX) {
275 vp8_accelerator_.reset(new V4L2VP8Accelerator(this));
276 decoder_.reset(new VP8Decoder(vp8_accelerator_.get()));
277 } else {
278 DLOG(ERROR) << "Unsupported profile " << video_profile_;
279 return false;
280 }
281
282 // TODO(posciak): This needs to be queried once supported.
283 input_planes_count_ = 1;
284 output_planes_count_ = 1;
285
286 if (egl_display_ == EGL_NO_DISPLAY) {
287 LOG(ERROR) << "Initialize(): could not get EGLDisplay";
288 NOTIFY_ERROR(PLATFORM_FAILURE);
289 return false;
290 }
291
292 // We need the context to be initialized to query extensions.
293 if (!make_context_current_.Run()) {
294 LOG(ERROR) << "Initialize(): could not make context current";
295 NOTIFY_ERROR(PLATFORM_FAILURE);
296 return false;
297 }
298
299 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
300 LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
301 NOTIFY_ERROR(PLATFORM_FAILURE);
302 return false;
303 }
304
305 // Capabilities check.
306 struct v4l2_capability caps;
307 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
308 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
309 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
310 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
311 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
312 ", caps check failed: 0x" << std::hex << caps.capabilities;
313 NOTIFY_ERROR(PLATFORM_FAILURE);
314 return false;
315 }
316
317 if (!SetupFormats())
318 return false;
319
320 if (!decoder_thread_.Start()) {
321 DLOG(ERROR) << "Initialize(): device thread failed to start";
322 NOTIFY_ERROR(PLATFORM_FAILURE);
323 return false;
324 }
325 decoder_thread_proxy_ = decoder_thread_.message_loop_proxy();
326
327 // InitializeTask will NOTIFY_ERROR on failure.
328 decoder_thread_proxy_->PostTask(
329 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::InitializeTask,
330 base::Unretained(this)));
331
332 DVLOGF(1) << "V4L2SliceVideoDecodeAccelerator initialized";
333 return true;
334 }
335
336 void V4L2SliceVideoDecodeAccelerator::InitializeTask() {
337 DVLOGF(3);
338 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
339
340 if (!CreateInputBuffers())
341 NOTIFY_ERROR(PLATFORM_FAILURE);
342
343 if (!StartDevicePoll())
344 NOTIFY_ERROR(PLATFORM_FAILURE);
345
346 SetDecoderState(kDecoding);
347 }
348
349 void V4L2SliceVideoDecodeAccelerator::Destroy() {
350 DVLOGF(3);
351 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
352
353 DCHECK(decoder_thread_.IsRunning());
354 decoder_thread_proxy_->PostTask(
355 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
356 base::Unretained(this)));
357
358 // Wait for tasks to finish/early-exit.
359 decoder_thread_.Stop();
360
361 delete this;
362 }
363
364 void V4L2SliceVideoDecodeAccelerator::DestroyTask() {
365 DVLOGF(3);
366 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
367
368 SetDecoderState(kIdle);
369
370 decoder_->Reset();
371
372 decoder_current_bitstream_buffer_.reset();
373 while (!decoder_input_queue_.empty())
374 decoder_input_queue_.pop();
375
376 // Stop streaming and the device_poll_thread_.
377 StopDevicePoll(false);
378
379 DestroyInputBuffers();
380 DestroyOutputs(false);
381
382 DCHECK(surfaces_at_device_.empty());
383 DCHECK(surfaces_at_display_.empty());
384 DCHECK(decoder_display_queue_.empty());
385 }
386
387 bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
388 DCHECK_EQ(state_, kUninitialized);
389
390 __u32 input_format_fourcc =
391 V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, true);
392 if (!input_format_fourcc) {
393 NOTREACHED();
394 return false;
395 }
396
397 size_t input_size;
398 if (base::CommandLine::ForCurrentProcess()->HasSwitch(
399 switches::kIgnoreResolutionLimitsForAcceleratedVideoDecode))
400 input_size = kInputBufferMaxSizeFor4k;
401 else
402 input_size = kInputBufferMaxSizeFor1080p;
403
404 struct v4l2_format format;
405 memset(&format, 0, sizeof(format));
406 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
407 format.fmt.pix_mp.pixelformat = input_format_fourcc;
408 format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
409 format.fmt.pix_mp.num_planes = input_planes_count_;
410 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
411
412 // We have to set up the format for output, because the driver may not allow
413 // changing it once we start streaming; whether it can support our chosen
414 // output format or not may depend on the input format.
415 struct v4l2_fmtdesc fmtdesc;
416 memset(&fmtdesc, 0, sizeof(fmtdesc));
417 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
418 while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
419 if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
420 output_format_fourcc_ = fmtdesc.pixelformat;
421 break;
422 }
423 ++fmtdesc.index;
424 }
425
426 if (output_format_fourcc_ == 0) {
427 LOG(ERROR) << "Could not find a usable output format";
428 return false;
429 }
430
431 // Only set fourcc for output; resolution, etc., will come from the
432 // driver once it extracts it from the stream.
433 memset(&format, 0, sizeof(format));
434 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
435 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
436 format.fmt.pix_mp.num_planes = output_planes_count_;
437 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
438
439 return true;
440 }
441
442 bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
443 DVLOGF(3);
444 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
445 DCHECK(!input_streamon_);
446 DCHECK(input_buffer_map_.empty());
447
448 struct v4l2_requestbuffers reqbufs;
449 memset(&reqbufs, 0, sizeof(reqbufs));
450 reqbufs.count = 8;
451 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
452 reqbufs.memory = V4L2_MEMORY_MMAP;
453 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
454 input_buffer_map_.resize(reqbufs.count);
455 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
456 free_input_buffers_.push_back(i);
457
458 // Query for the MEMORY_MMAP pointer.
459 struct v4l2_plane planes[VIDEO_MAX_PLANES];
460 struct v4l2_buffer buffer;
461 memset(&buffer, 0, sizeof(buffer));
462 memset(planes, 0, sizeof(planes));
463 buffer.index = i;
464 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
465 buffer.memory = V4L2_MEMORY_MMAP;
466 buffer.m.planes = planes;
467 buffer.length = input_planes_count_;
468 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
469 void* address = device_->Mmap(nullptr, buffer.m.planes[0].length,
470 PROT_READ | PROT_WRITE, MAP_SHARED,
471 buffer.m.planes[0].m.mem_offset);
472 if (address == MAP_FAILED) {
473 PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
474 return false;
475 }
476 input_buffer_map_[i].address = address;
477 input_buffer_map_[i].length = buffer.m.planes[0].length;
478 }
479
480 return true;
481 }
482
483 bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
484 DVLOGF(3);
485 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
486 DCHECK(!output_streamon_);
487 DCHECK(output_buffer_map_.empty());
488 DCHECK(surfaces_at_display_.empty());
489 DCHECK(surfaces_at_device_.empty());
490
491 frame_buffer_size_ = decoder_->GetPicSize();
492 output_dpb_size_ = decoder_->GetRequiredNumOfPictures();
493
494 DCHECK_GT(output_dpb_size_, 0u);
495 DCHECK(!frame_buffer_size_.IsEmpty());
496
497 struct v4l2_format format;
498 memset(&format, 0, sizeof(format));
499 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
500 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
501 format.fmt.pix_mp.width = frame_buffer_size_.width();
502 format.fmt.pix_mp.height = frame_buffer_size_.height();
503 format.fmt.pix_mp.num_planes = input_planes_count_;
504
505 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
506 PLOG(ERROR) << "Failed setting format to: " << output_format_fourcc_;
507 NOTIFY_ERROR(PLATFORM_FAILURE);
508 return false;
509 }
510
511 struct v4l2_requestbuffers reqbufs;
512 memset(&reqbufs, 0, sizeof(reqbufs));
513 reqbufs.count = output_dpb_size_;
514 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
515 reqbufs.memory = V4L2_MEMORY_MMAP;
516 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
517
518 if (reqbufs.count > output_dpb_size_) {
519 PLOG(ERROR) << "Could not allocate enough output buffers";
520 return false;
521 }
522
523 output_buffer_map_.resize(reqbufs.count);
524
525 DVLOGF(3) << "buffer_count=" << output_buffer_map_.size()
526 << ", width=" << frame_buffer_size_.width()
527 << ", height=" << frame_buffer_size_.height();
528
529 child_message_loop_proxy_->PostTask(
530 FROM_HERE,
531 base::Bind(&VideoDecodeAccelerator::Client::ProvidePictureBuffers,
532 client_, output_buffer_map_.size(), frame_buffer_size_,
533 device_->GetTextureTarget()));
534
535 // Wait for the client to call AssignPictureBuffers() on the Child thread.
536 // We do this, because if we continue decoding without finishing buffer
537 // allocation, we may end up Resetting before AssignPictureBuffers arrives,
538 // resulting in unnecessary complications and subtle bugs.
539 pictures_assigned_.Wait();
540 DVLOG(1) << "pictures after ASSIGN " << free_output_buffers_.size();
541
542 return true;
543 }
544
545 void V4L2SliceVideoDecodeAccelerator::DestroyInputBuffers() {
546 DVLOGF(3);
547 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread() ||
548 !decoder_thread_.IsRunning());
549 DCHECK(!input_streamon_);
550
551 for (auto& input_record : input_buffer_map_) {
552 if (input_record.address != nullptr)
553 device_->Munmap(input_record.address, input_record.length);
554 }
555
556 struct v4l2_requestbuffers reqbufs;
557 memset(&reqbufs, 0, sizeof(reqbufs));
558 reqbufs.count = 0;
559 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
560 reqbufs.memory = V4L2_MEMORY_MMAP;
561 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
562
563 input_buffer_map_.clear();
564 free_input_buffers_.clear();
565 }
566
567 void V4L2SliceVideoDecodeAccelerator::DismissPictures(
568 std::vector<int32> picture_buffer_ids,
569 base::WaitableEvent* done) {
570 DVLOGF(3);
571 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
572
573 for (auto picture_buffer_id : picture_buffer_ids) {
574 DVLOGF(1) << "dismissing PictureBuffer id=" << picture_buffer_id;
575 client_->DismissPictureBuffer(picture_buffer_id);
576 }
577
578 done->Signal();
579 }
580
581 void V4L2SliceVideoDecodeAccelerator::DevicePollTask(bool poll_device) {
582 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
583
584 bool event_pending;
585 if (!device_->Poll(poll_device, &event_pending)) {
586 NOTIFY_ERROR(PLATFORM_FAILURE);
587 return;
588 }
589
590 // All processing should happen on ServiceDeviceTask(), since we shouldn't
591 // touch encoder state from this thread.
592 decoder_thread_proxy_->PostTask(
593 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask,
594 base::Unretained(this)));
595 }
596
597 void V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask() {
598 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
599 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
600 // so either:
601 // * device_poll_thread_ is running normally
602 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down,
603 // in which case we should early-out.
604 if (!device_poll_thread_.message_loop())
605 return;
606
607 Dequeue();
608
609 if (!device_->ClearDevicePollInterrupt())
610 return;
611
612 bool poll_device =
613 (input_buffer_queued_count_ > 0 && output_buffer_queued_count_ > 0);
614
615 device_poll_thread_.message_loop()->PostTask(
616 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
617 base::Unretained(this), poll_device));
618
619 DVLOGF(2) << ": buffer counts: "
620 << "INPUT[" << decoder_input_queue_.size() << "]"
621 << "DEVICE["
622 << free_input_buffers_.size() << "+"
623 << input_buffer_queued_count_ << "/"
624 << input_buffer_map_.size() << "]->["
625 << free_output_buffers_.size() << "+"
626 << output_buffer_queued_count_ << "/"
627 << output_buffer_map_.size() << "]"
628 << "=> DISPLAYQ[ " << decoder_display_queue_.size() << "]"
629 << "=> CLIENT[" << surfaces_at_display_.size() << "]";
630
631 ScheduleDecodeBufferTaskIfNeeded();
632 }
633
634 void V4L2SliceVideoDecodeAccelerator::Enqueue(
635 const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
636 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
637
638 const int old_inputs_queued = input_buffer_queued_count_;
639 const int old_outputs_queued = output_buffer_queued_count_;
640
641 if (!EnqueueInputRecord(dec_surface->input_record(),
642 dec_surface->config_store())) {
643 DVLOGF(1) << "Failed queueing an input buffer";
644 NOTIFY_ERROR(PLATFORM_FAILURE);
645 return;
646 }
647
648 if (!EnqueueOutputRecord(dec_surface->output_record())) {
649 DVLOGF(1) << "Failed queueing an output buffer";
650 NOTIFY_ERROR(PLATFORM_FAILURE);
651 return;
652 }
653
654 bool inserted =
655 surfaces_at_device_.insert(std::make_pair(dec_surface->output_record(),
656 dec_surface)).second;
657 DCHECK(inserted);
658
659 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
660 // We started up a previously empty queue.
661 // Queue state changed; signal interrupt.
662 if (!device_->SetDevicePollInterrupt()) {
663 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
664 NOTIFY_ERROR(PLATFORM_FAILURE);
665 return;
666 }
667 // VIDIOC_STREAMON if we haven't yet.
668 if (!input_streamon_) {
669 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
670 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
671 input_streamon_ = true;
672 }
673 }
674
675 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
676 // We just started up a previously empty queue.
677 // Queue state changed; signal interrupt.
678 if (!device_->SetDevicePollInterrupt()) {
679 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
680 NOTIFY_ERROR(PLATFORM_FAILURE);
681 return;
682 }
683 // Start VIDIOC_STREAMON if we haven't yet.
684 if (!output_streamon_) {
685 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
686 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
687 output_streamon_ = true;
688 }
689 }
690 }
691
692 void V4L2SliceVideoDecodeAccelerator::Dequeue() {
693 DVLOGF(3);
694 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
695
696 struct v4l2_buffer dqbuf;
697 struct v4l2_plane planes[VIDEO_MAX_PLANES];
698 while (input_buffer_queued_count_ > 0) {
699 DCHECK(input_streamon_);
700 memset(&dqbuf, 0, sizeof(dqbuf));
701 memset(&planes, 0, sizeof(planes));
702 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
703 dqbuf.memory = V4L2_MEMORY_USERPTR;
704 dqbuf.m.planes = planes;
705 dqbuf.length = input_planes_count_;
706 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
707 if (errno == EAGAIN) {
708 // EAGAIN if we're just out of buffers to dequeue.
709 break;
710 }
711 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
712 NOTIFY_ERROR(PLATFORM_FAILURE);
713 return;
714 }
715 InputRecord& input_record = input_buffer_map_[dqbuf.index];
716 DCHECK(input_record.at_device);
717 input_record.at_device = false;
718 input_record.input_id = -1;
719 input_record.bytes_used = 0;
720 free_input_buffers_.push_back(dqbuf.index);
721 input_buffer_queued_count_--;
722 }
723
724 while (output_buffer_queued_count_ > 0) {
725 DCHECK(output_streamon_);
726 memset(&dqbuf, 0, sizeof(dqbuf));
727 memset(&planes, 0, sizeof(planes));
728 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
729 dqbuf.memory = V4L2_MEMORY_MMAP;
730 dqbuf.m.planes = planes;
731 dqbuf.length = output_planes_count_;
732 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
733 if (errno == EAGAIN) {
734 // EAGAIN if we're just out of buffers to dequeue.
735 break;
736 }
737 PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
738 NOTIFY_ERROR(PLATFORM_FAILURE);
739 return;
740 }
741 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
742 DCHECK(output_record.at_device);
743 output_record.at_device = false;
744 DCHECK_NE(output_record.picture_id, -1);
745 output_buffer_queued_count_--;
746 DVLOGF(3) << "Decoded output " << dqbuf.index;
747
748 V4L2DecodeSurfaceByOutputId::iterator it =
749 surfaces_at_device_.find(dqbuf.index);
750 if (it == surfaces_at_device_.end()) {
751 DLOG(ERROR) << "Got invalid surface from device.";
752 NOTIFY_ERROR(PLATFORM_FAILURE);
753 }
754
755 it->second->SetDecoded();
756 surfaces_at_device_.erase(it);
757 }
758
759 // A frame was decoded, see if we can output it.
760 TryOutputSurfaces();
761
762 ProcessPendingEventsIfNeeded();
763 }
764
765 void V4L2SliceVideoDecodeAccelerator::ProcessPendingEventsIfNeeded() {
766 // Process pending events, if any, in the correct order.
767 // We always first process the surface set change, as it is an internal
768 // event from the decoder and interleaving it with external requests would
769 // put the decoder in an undefined state.
770 FinishSurfaceSetChangeIfNeeded();
771
772 // Process external (client) requests.
773 FinishFlushIfNeeded();
774 FinishResetIfNeeded();
775 }
776
777 void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) {
778 DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
779 DVLOGF(4) << "Reusing output buffer, index=" << index;
780 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
781
782 OutputRecord& output_record = output_buffer_map_[index];
783 DCHECK(!output_record.at_device);
784 output_record.at_client = false;
785
786 free_output_buffers_.push_back(index);
787
788 ScheduleDecodeBufferTaskIfNeeded();
789 }
790
791 bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord(
792 int index,
793 uint32_t config_store) {
794 DVLOGF(3);
795 DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
796 DCHECK_GT(config_store, 0u);
797
798 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
799 InputRecord& input_record = input_buffer_map_[index];
800 DCHECK(!input_record.at_device);
801 struct v4l2_buffer qbuf;
802 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
803 memset(&qbuf, 0, sizeof(qbuf));
804 memset(qbuf_planes, 0, sizeof(qbuf_planes));
805 qbuf.index = index;
806 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
807 qbuf.memory = V4L2_MEMORY_MMAP;
808 qbuf.m.planes = qbuf_planes;
809 qbuf.m.planes[0].bytesused = input_record.bytes_used;
810 qbuf.length = input_planes_count_;
811 qbuf.config_store = config_store;
812 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
813 input_record.at_device = true;
814 input_buffer_queued_count_++;
815
816 return true;
817 }
818
819 bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(int index) {
820 DVLOGF(3);
821 DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
822
823 // Enqueue an output (VIDEO_CAPTURE) buffer.
824 OutputRecord& output_record = output_buffer_map_[index];
825 DCHECK(!output_record.at_device);
826 DCHECK(!output_record.at_client);
827 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
828 DCHECK_NE(output_record.picture_id, -1);
829 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
830 // If we have to wait for completion, wait. Note that
831 // free_output_buffers_ is a FIFO queue, so we always wait on the
832 // buffer that has been in the queue the longest.
833 if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
834 EGL_FOREVER_KHR) == EGL_FALSE) {
835 // This will cause tearing, but is safe otherwise.
836 DVLOGF(1) << "eglClientWaitSyncKHR failed!";
837 }
838 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
839 LOGF(ERROR) << "eglDestroySyncKHR failed!";
840 NOTIFY_ERROR(PLATFORM_FAILURE);
841 return false;
842 }
843 output_record.egl_sync = EGL_NO_SYNC_KHR;
844 }
845
846 struct v4l2_buffer qbuf;
847 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
848 memset(&qbuf, 0, sizeof(qbuf));
849 memset(qbuf_planes, 0, sizeof(qbuf_planes));
850 qbuf.index = index;
851 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
852 qbuf.memory = V4L2_MEMORY_MMAP;
853 qbuf.m.planes = qbuf_planes;
854 qbuf.length = output_planes_count_;
855 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
856 output_record.at_device = true;
857 output_buffer_queued_count_++;
858
859 return true;
860 }
861
862 bool V4L2SliceVideoDecodeAccelerator::StartDevicePoll() {
863 DVLOGF(3) << "Starting device poll";
864 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
865 DCHECK(!device_poll_thread_.IsRunning());
866
867 // Start up the device poll thread and schedule its first DevicePollTask().
868 if (!device_poll_thread_.Start()) {
869 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
870 NOTIFY_ERROR(PLATFORM_FAILURE);
871 return false;
872 }
873 // Enqueue a poll task with no devices to poll on - will wait only for the
874 // poll interrupt
875 device_poll_thread_.message_loop()->PostTask(
876 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
877 base::Unretained(this), false));
878
879 return true;
880 }
881
882 bool V4L2SliceVideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
883 DVLOGF(3) << "Stopping device poll";
884 if (decoder_thread_.IsRunning())
885 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
886
887 // Signal the DevicePollTask() to stop, and stop the device poll thread.
888 if (!device_->SetDevicePollInterrupt()) {
889 PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
890 NOTIFY_ERROR(PLATFORM_FAILURE);
891 return false;
892 }
893 device_poll_thread_.Stop();
894
895 // Clear the interrupt now, to be sure.
896 if (!device_->ClearDevicePollInterrupt()) {
897 NOTIFY_ERROR(PLATFORM_FAILURE);
898 return false;
899 }
900
901 if (!keep_input_state) {
902 if (input_streamon_) {
903 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
904 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
905 }
906 input_streamon_ = false;
907 }
908
909 if (output_streamon_) {
910 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
911 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
912 }
913 output_streamon_ = false;
914
915 surfaces_at_device_.clear();
916
917 if (!keep_input_state) {
918 free_input_buffers_.clear();
919 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
920 InputRecord& input_record = input_buffer_map_[i];
921 input_record.at_device = false;
922 input_record.bytes_used = 0;
923 input_record.input_id = -1;
924 free_input_buffers_.push_back(i);
925 }
926 input_buffer_queued_count_ = 0;
927 }
928
929 free_output_buffers_.clear();
930 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
931 OutputRecord& output_record = output_buffer_map_[i];
932 DCHECK(!(output_record.at_client && output_record.at_device));
933 output_record.at_device = false;
934 if (!output_record.at_client)
935 free_output_buffers_.push_back(i);
936 }
937
938 output_buffer_queued_count_ = 0;
939
940 DVLOGF(3) << " device poll stopped";
941 return true;
942 }
943
944 void V4L2SliceVideoDecodeAccelerator::Decode(
945 const media::BitstreamBuffer& bitstream_buffer) {
946 DVLOGF(3) << "input_id=" << bitstream_buffer.id()
947 << ", size=" << bitstream_buffer.size();
948 DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
949
950 decoder_thread_proxy_->PostTask(
951 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask,
952 base::Unretained(this), bitstream_buffer));
953 }
954
955 void V4L2SliceVideoDecodeAccelerator::DecodeTask(
956 const media::BitstreamBuffer& bitstream_buffer) {
957 DVLOGF(3) << "input_id=" << bitstream_buffer.id()
958 << " size=" << bitstream_buffer.size();
959 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
960
961 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
962 io_client_, io_message_loop_proxy_,
963 new base::SharedMemory(bitstream_buffer.handle(), true),
964 bitstream_buffer.size(), bitstream_buffer.id()));
965 if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
966 LOGF(ERROR) << "Could not map bitstream_buffer";
967 NOTIFY_ERROR(UNREADABLE_INPUT);
968 return;
969 }
970 DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory();
971
972 decoder_input_queue_.push(
973 linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
974
975 ScheduleDecodeBufferTaskIfNeeded();
976 }
977
978 bool V4L2SliceVideoDecodeAccelerator::TrySetNewBistreamBuffer() {
979 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
980 DCHECK(!decoder_current_bitstream_buffer_);
981
982 if (decoder_input_queue_.empty())
983 return false;
984
985 decoder_current_bitstream_buffer_.reset(
986 decoder_input_queue_.front().release());
987 decoder_input_queue_.pop();
988
989 if (decoder_current_bitstream_buffer_->input_id == kFlushBufferId) {
990 // This is a buffer we queued for ourselves to trigger flush at this time.
991 InitiateFlush();
992 return false;
993 }
994
995 const uint8* const data = reinterpret_cast<const uint8*>(
996 decoder_current_bitstream_buffer_->shm->memory());
997 const size_t data_size = decoder_current_bitstream_buffer_->size;
998 decoder_->SetStream(data, data_size);
999
1000 return true;
1001 }
1002
1003 void V4L2SliceVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
1004 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1005 if (state_ == kDecoding) {
1006 decoder_thread_proxy_->PostTask(
1007 FROM_HERE,
1008 base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
1009 base::Unretained(this)));
1010 }
1011 }
1012
1013 void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
1014 DVLOGF(3);
1015 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1016
1017 if (state_ != kDecoding)
1018 return;
1019
1020 while (true) {
1021 AcceleratedVideoDecoder::DecResult res;
1022 res = decoder_->Decode();
1023 switch (res) {
1024 case AcceleratedVideoDecoder::kAllocateNewSurfaces:
1025 DVLOGF(2) << "Decoder requesting a new set of surfaces";
1026 InitiateSurfaceSetChange();
1027 return;
1028
1029 case AcceleratedVideoDecoder::kRanOutOfStreamData:
1030 decoder_current_bitstream_buffer_.reset();
kcwu 2015/01/09 11:03:00 Will decoder cache remain data in current buffer?
Pawel Osciak 2015/01/09 13:50:31 The VDA will always get a full NALU (H264) or fram
Pawel Osciak 2015/01/12 07:18:20 The above applies to H264. For VP8, we have no way
1031 if (!TrySetNewBistreamBuffer())
1032 return;
1033
1034 break;
1035
1036 case AcceleratedVideoDecoder::kRanOutOfSurfaces:
1037 // No more surfaces for the decoder, we'll come back once we have more.
1038 DVLOGF(5) << "Ran out of surfaces";
1039 return;
1040
1041 case AcceleratedVideoDecoder::kDecodeError:
1042 DVLOGF(1) << "Error decoding stream";
1043 NOTIFY_ERROR(PLATFORM_FAILURE);
1044 return;
1045 }
1046 }
1047 }
1048
1049 void V4L2SliceVideoDecodeAccelerator::InitiateSurfaceSetChange() {
1050 DVLOGF(1);
1051 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1052
1053 DCHECK_EQ(state_, kDecoding);
1054 SetDecoderState(kIdle);
1055
1056 DCHECK(!surface_set_change_pending_);
1057 surface_set_change_pending_ = true;
1058
1059 FinishSurfaceSetChangeIfNeeded();
1060 }
1061
1062 void V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChangeIfNeeded() {
1063 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1064
1065 if (!surface_set_change_pending_ || !surfaces_at_device_.empty())
1066 return;
1067
1068 DCHECK_EQ(state_, kIdle);
1069
1070 // Keep input queue running while we switch outputs.
1071 if (!StopDevicePoll(true))
1072 return;
1073
1074 // This will return only once all buffers are dismissed and destroyed.
1075 // This does not wait until they are displayed however, as display retains
1076 // references to the buffers bound to textures and will release them
1077 // after displaying.
1078 if (!DestroyOutputs(true)) {
1079 NOTIFY_ERROR(PLATFORM_FAILURE);
1080 return;
1081 }
1082
1083 if (!CreateOutputBuffers()) {
1084 NOTIFY_ERROR(PLATFORM_FAILURE);
1085 return;
1086 }
1087
1088 if (!StartDevicePoll()) {
1089 NOTIFY_ERROR(PLATFORM_FAILURE);
1090 return;
1091 }
1092
1093 surface_set_change_pending_ = false;
1094 SetDecoderState(kDecoding);
1095 ScheduleDecodeBufferTaskIfNeeded();
1096 }
1097
1098 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
1099 DVLOGF(3);
1100 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1101 std::vector<EGLImageKHR> egl_images_to_destroy;
1102 std::vector<int32> picture_buffers_to_dismiss;
1103
1104 if (output_buffer_map_.empty())
1105 return true;
1106
1107 for (auto output_record : output_buffer_map_) {
1108 DCHECK(!output_record.at_device);
1109 output_record.at_client = false;
1110
1111 if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1112 if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE)
1113 DVLOGF(1) << "eglDestroySyncKHR failed.";
1114 }
1115
1116 if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1117 child_message_loop_proxy_->PostTask(
1118 FROM_HERE,
1119 base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_,
1120 egl_display_, output_record.egl_image));
1121 }
1122
1123 picture_buffers_to_dismiss.push_back(output_record.picture_id);
1124 }
1125
1126 if (dismiss) {
1127 DVLOGF(2) << "Scheduling picture dismissal";
1128 base::WaitableEvent done(false, false);
1129 child_message_loop_proxy_->PostTask(
1130 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DismissPictures,
1131 weak_this_, picture_buffers_to_dismiss, &done));
1132 done.Wait();
1133 }
1134
1135 // At this point client can't call ReusePictureBuffer on any of the pictures
1136 // anymore, so it's safe to destroy.
1137 return DestroyOutputBuffers();
1138 }
1139
1140 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputBuffers() {
1141 DVLOGF(3);
1142 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread() ||
1143 !decoder_thread_.IsRunning());
1144 DCHECK(!output_streamon_);
1145 DCHECK(surfaces_at_device_.empty());
1146 DCHECK(decoder_display_queue_.empty());
1147 DCHECK(surfaces_at_display_.size() + free_output_buffers_.size() ==
1148 output_buffer_map_.size());
1149
1150 if (output_buffer_map_.empty())
1151 return true;
1152
1153 // It's ok to do this, client will retain references to textures, but we are
1154 // not interested in reusing the surfaces anymore.
1155 // This will prevent us from reusing old surfaces in case we have some
1156 // ReusePictureBuffer() pending on ChildThread already. It's ok to ignore
1157 // them, because we have already dismissed them (in DestroyOutputs()).
1158 surfaces_at_display_.clear();
1159 DCHECK_EQ(free_output_buffers_.size(), output_buffer_map_.size());
1160
1161 free_output_buffers_.clear();
1162 output_buffer_map_.clear();
1163
1164 struct v4l2_requestbuffers reqbufs;
1165 memset(&reqbufs, 0, sizeof(reqbufs));
1166 reqbufs.count = 0;
1167 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1168 reqbufs.memory = V4L2_MEMORY_MMAP;
1169 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1170
1171 return true;
1172 }
1173
1174 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers(
1175 const std::vector<media::PictureBuffer>& buffers) {
1176 DVLOGF(3);
1177 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1178
1179 if (buffers.size() != output_buffer_map_.size()) {
1180 DLOG(ERROR) << "Failed to provide requested picture buffers. "
1181 << "(Got " << buffers.size() << ", requested "
1182 << output_buffer_map_.size() << ")";
1183 NOTIFY_ERROR(INVALID_ARGUMENT);
1184 return;
1185 }
1186
1187 if (!make_context_current_.Run()) {
1188 DLOG(ERROR) << "could not make context current";
1189 NOTIFY_ERROR(PLATFORM_FAILURE);
1190 return;
1191 }
1192
1193 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
1194
1195 // It's safe to manipulate all the buffer state here, because the decoder
1196 // thread is waiting on pictures_assigned_.
1197 DCHECK(free_output_buffers_.empty());
1198 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1199 DCHECK(buffers[i].size() == frame_buffer_size_);
1200
1201 OutputRecord& output_record = output_buffer_map_[i];
1202 DCHECK(!output_record.at_device);
1203 DCHECK(!output_record.at_client);
1204 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
1205 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1206 DCHECK_EQ(output_record.picture_id, -1);
1207 DCHECK_EQ(output_record.cleared, false);
1208
1209 EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
1210 egl_context_,
1211 buffers[i].texture_id(),
1212 frame_buffer_size_,
1213 i,
1214 output_format_fourcc_,
1215 output_planes_count_);
1216 if (egl_image == EGL_NO_IMAGE_KHR) {
1217 LOGF(ERROR) << "Could not create EGLImageKHR";
1218 // Ownership of EGLImages allocated in previous iterations of this loop
1219 // has been transferred to output_buffer_map_. After we error-out here
1220 // the destructor will handle their cleanup.
1221 NOTIFY_ERROR(PLATFORM_FAILURE);
1222 return;
1223 }
1224
1225 output_record.egl_image = egl_image;
1226 output_record.picture_id = buffers[i].id();
1227 free_output_buffers_.push_back(i);
1228 DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
1229 }
1230
1231 pictures_assigned_.Signal();
1232 }
1233
1234 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer(
1235 int32 picture_buffer_id) {
1236 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1237 DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
1238
1239 if (!make_context_current_.Run()) {
1240 LOGF(ERROR) << "could not make context current";
1241 NOTIFY_ERROR(PLATFORM_FAILURE);
1242 return;
1243 }
1244
1245 EGLSyncKHR egl_sync =
1246 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
1247 if (egl_sync == EGL_NO_SYNC_KHR) {
1248 LOGF(ERROR) << "eglCreateSyncKHR() failed";
1249 NOTIFY_ERROR(PLATFORM_FAILURE);
1250 return;
1251 }
1252
1253 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(
1254 new EGLSyncKHRRef(egl_display_, egl_sync));
1255 decoder_thread_proxy_->PostTask(
1256 FROM_HERE,
1257 base::Bind(&V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask,
1258 base::Unretained(this), picture_buffer_id,
1259 base::Passed(&egl_sync_ref)));
1260 }
1261
1262 void V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask(
1263 int32 picture_buffer_id,
1264 scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1265 DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
1266 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1267
1268 V4L2DecodeSurfaceByPictureBufferId::iterator it =
1269 surfaces_at_display_.find(picture_buffer_id);
1270 if (it == surfaces_at_display_.end()) {
1271 // It's possible that we've already posted a DismissPictureBuffer for this
1272 // picture, but it has not yet executed when this ReusePictureBuffer was
1273 // posted to us by the client. In that case just ignore this (we've already
1274 // dismissed it and accounted for that) and let the sync object get
1275 // destroyed.
1276 DVLOGF(3) << "got picture id= " << picture_buffer_id
1277 << " not in use (anymore?).";
1278 return;
1279 }
1280
1281 OutputRecord& output_record = output_buffer_map_[it->second->output_record()];
1282 if (output_record.at_device || !output_record.at_client) {
1283 DVLOGF(1) << "picture_buffer_id not reusable";
1284 NOTIFY_ERROR(INVALID_ARGUMENT);
1285 return;
1286 }
1287
1288 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1289 DCHECK(!output_record.at_device);
1290 output_record.at_client = false;
1291 output_record.egl_sync = egl_sync_ref->egl_sync;
1292 // Take ownership of the EGLSync.
1293 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1294 surfaces_at_display_.erase(it);
1295 }
1296
1297 void V4L2SliceVideoDecodeAccelerator::Flush() {
1298 DVLOGF(3);
1299 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1300
1301 decoder_thread_proxy_->PostTask(
1302 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::FlushTask,
1303 base::Unretained(this)));
1304 }
1305
1306 void V4L2SliceVideoDecodeAccelerator::FlushTask() {
1307 DVLOGF(3);
1308 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1309
1310 if (!decoder_input_queue_.empty()) {
1311 // We are not done with pending inputs, so queue an empty buffer,
1312 // which - when reached - will trigger flush sequence.
1313 decoder_input_queue_.push(
1314 linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1315 io_client_, io_message_loop_proxy_, nullptr, 0, kFlushBufferId)));
1316 return;
1317 }
1318
1319 // No more inputs pending, so just finish flushing here.
1320 InitiateFlush();
1321 }
1322
1323 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() {
1324 DVLOGF(3);
1325 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1326
1327 DCHECK(!decoder_flushing_);
1328 DCHECK_EQ(state_, kDecoding);
1329 SetDecoderState(kIdle);
1330
1331 // This will trigger output for all remaining surfaces in the decoder.
1332 // However, not all of them may be decoded yet (they would be queued
1333 // in hardware then).
1334 if (!decoder_->Flush()) {
1335 DVLOGF(1) << "Failed flushing the decoder.";
1336 NOTIFY_ERROR(PLATFORM_FAILURE);
1337 return;
1338 }
1339
1340 // Put the decoder in an idle state, ready to resume.
1341 decoder_->Reset();
1342
1343 decoder_flushing_ = true;
1344
1345 decoder_thread_proxy_->PostTask(
1346 FROM_HERE,
1347 base::Bind(&V4L2SliceVideoDecodeAccelerator::FinishFlushIfNeeded,
1348 base::Unretained(this)));
1349 }
1350
1351 void V4L2SliceVideoDecodeAccelerator::FinishFlushIfNeeded() {
1352 DVLOGF(3);
1353 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1354
1355 if (!decoder_flushing_ || !surfaces_at_device_.empty())
1356 return;
1357
1358 DCHECK_EQ(state_, kIdle);
1359
1360 // At this point, all remaining surfaces are decoded and dequeued, and since
1361 // we have already scheduled output for them in InitiateFlush(), their
1362 // respective PictureReady calls have been posted (or they have been queued on
1363 // pending_picture_ready_). So at this time, once we SendPictureReady(),
1364 // we will have all remaining PictureReady() posted to the client and we
1365 // can post NotifyFlushDone().
1366 DCHECK(decoder_display_queue_.empty());
1367 SendPictureReady();
1368
1369 child_message_loop_proxy_->PostTask(
1370 FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
1371
1372 decoder_flushing_ = false;
1373
1374 DVLOGF(3) << "Flush finished";
1375 SetDecoderState(kDecoding);
1376 ScheduleDecodeBufferTaskIfNeeded();
1377 }
1378
1379 void V4L2SliceVideoDecodeAccelerator::Reset() {
1380 DVLOGF(3);
1381 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1382
1383 decoder_thread_proxy_->PostTask(
1384 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ResetTask,
1385 base::Unretained(this)));
1386 }
1387
1388 void V4L2SliceVideoDecodeAccelerator::ResetTask() {
1389 DVLOGF(3);
1390 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1391
1392 if (decoder_resetting_) {
1393 // This is a bug in the client, multiple Reset()s before NotifyResetDone()
1394 // are not allowed.
1395 NOTREACHED() << "Client should not be requesting multiple Reset()s";
1396 return;
1397 }
1398
1399 DCHECK_EQ(state_, kDecoding);
1400 SetDecoderState(kIdle);
1401
1402 // Put the decoder in an idle state, ready to resume.
1403 decoder_->Reset();
1404
1405 decoder_resetting_ = true;
1406
1407 // Drop all remaining inputs.
1408 decoder_current_bitstream_buffer_.reset();
1409 while (!decoder_input_queue_.empty())
1410 decoder_input_queue_.pop();
1411
1412 FinishResetIfNeeded();
1413 }
1414
1415 void V4L2SliceVideoDecodeAccelerator::FinishResetIfNeeded() {
1416 DVLOGF(3);
1417 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1418
1419 if (!decoder_resetting_ || !surfaces_at_device_.empty())
1420 return;
1421
1422 DCHECK_EQ(state_, kIdle);
1423 DCHECK(!decoder_flushing_);
1424
1425 // Drop any pending outputs.
1426 while (!decoder_display_queue_.empty())
1427 decoder_display_queue_.pop();
1428
1429 SendPictureReady();
1430 decoder_resetting_ = false;
1431
1432 child_message_loop_proxy_->PostTask(
1433 FROM_HERE, base::Bind(&Client::NotifyResetDone, client_));
1434
1435 DVLOGF(3) << "Reset finished";
1436
1437 SetDecoderState(kDecoding);
1438 ScheduleDecodeBufferTaskIfNeeded();
1439 }
1440
1441 void V4L2SliceVideoDecodeAccelerator::SetDecoderState(State state) {
1442 DVLOGF(3) << "state=" << state;
1443
1444 // We can touch decoder_state_ only if this is the decoder thread or the
1445 // decoder thread isn't running.
1446 if (decoder_thread_.IsRunning() &&
1447 !decoder_thread_proxy_->BelongsToCurrentThread()) {
1448 decoder_thread_proxy_->PostTask(
1449 FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::SetDecoderState,
1450 base::Unretained(this), state));
1451 } else {
1452 state_ = state;
1453 }
1454 }
1455
1456 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator(
1457 V4L2SliceVideoDecodeAccelerator* v4l2_dec)
1458 : num_slices_(0), v4l2_dec_(v4l2_dec) {
1459 DCHECK(v4l2_dec_);
1460 }
1461
1462 scoped_refptr<H264Picture>
1463 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() {
1464 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
1465 if (!dec_surface)
1466 return nullptr;
1467
1468 return new V4L2H264Picture(dec_surface);
1469 }
1470
1471 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
1472 H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
1473 uint8_t dst_list[32]) {
1474 size_t i = 0;
1475 for (auto& pic : src_pic_list)
1476 dst_list[i++] = pic ? pic->dpb_position : VIDEO_MAX_FRAME;
1477
1478 while (i < 32)
1479 dst_list[i++] = VIDEO_MAX_FRAME;
1480 }
1481
1482 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::H264DPBToV4L2DPB(
1483 const H264DPB& dpb,
1484 std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
1485 memset(v4l2_decode_param_.dpb, 0, sizeof(v4l2_decode_param_.dpb));
1486 size_t i = 0;
1487 for (const auto& pic : dpb) {
1488 struct v4l2_h264_dpb_entry& entry = v4l2_decode_param_.dpb[i++];
1489 scoped_refptr<V4L2DecodeSurface> dec_surface =
1490 H264PictureToV4L2DecodeSurface(pic);
1491 entry.buf_index = dec_surface->output_record();
1492 entry.frame_num = pic->frame_num;
1493 entry.pic_num = pic->pic_num;
1494 entry.top_field_order_cnt = pic->top_field_order_cnt;
1495 entry.bottom_field_order_cnt = pic->bottom_field_order_cnt;
1496 entry.flags = (pic->ref ? V4L2_H264_DPB_ENTRY_FLAG_ACTIVE : 0) |
1497 (pic->long_term ? V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM : 0);
1498
1499 ref_surfaces->push_back(dec_surface);
1500 }
1501 }
1502
1503 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
1504 const media::H264SPS* sps,
1505 const media::H264PPS* pps,
1506 const H264DPB& dpb,
1507 const H264Picture::Vector& ref_pic_listp0,
1508 const H264Picture::Vector& ref_pic_listb0,
1509 const H264Picture::Vector& ref_pic_listb1,
1510 const scoped_refptr<H264Picture>& pic) {
1511 struct v4l2_ext_control ctrl;
1512 std::vector<struct v4l2_ext_control> ctrls;
1513
1514 struct v4l2_ctrl_h264_sps v4l2_sps;
1515 memset(&v4l2_sps, 0, sizeof(v4l2_sps));
1516 v4l2_sps.constraint_set_flags =
1517 sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0 |
1518 sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0 |
1519 sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0 |
1520 sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0 |
1521 sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0 |
1522 sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0;
1523 #define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a
1524 SPS_TO_V4L2SPS(profile_idc);
1525 SPS_TO_V4L2SPS(level_idc);
1526 SPS_TO_V4L2SPS(seq_parameter_set_id);
1527 SPS_TO_V4L2SPS(chroma_format_idc);
1528 SPS_TO_V4L2SPS(bit_depth_luma_minus8);
1529 SPS_TO_V4L2SPS(bit_depth_chroma_minus8);
1530 SPS_TO_V4L2SPS(log2_max_frame_num_minus4);
1531 SPS_TO_V4L2SPS(pic_order_cnt_type);
1532 SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4);
1533 SPS_TO_V4L2SPS(offset_for_non_ref_pic);
1534 SPS_TO_V4L2SPS(offset_for_top_to_bottom_field);
1535 SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle);
1536
1537 COMPILE_ASSERT(arraysize(v4l2_sps.offset_for_ref_frame) ==
1538 arraysize(sps->offset_for_ref_frame),
1539 offset_for_ref_frame_arrays_must_be_same_size);
1540 for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i)
1541 v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i];
1542 SPS_TO_V4L2SPS(max_num_ref_frames);
1543 SPS_TO_V4L2SPS(pic_width_in_mbs_minus1);
1544 SPS_TO_V4L2SPS(pic_height_in_map_units_minus1);
1545 #undef SPS_TO_V4L2SPS
1546
1547 #define SET_V4L2_SPS_FLAG_IF(cond, flag) \
1548 v4l2_sps.flags |= ((sps->cond) ? (flag) : 0)
1549 SET_V4L2_SPS_FLAG_IF(separate_colour_plane_flag,
1550 V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE);
1551 SET_V4L2_SPS_FLAG_IF(qpprime_y_zero_transform_bypass_flag,
1552 V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS);
1553 SET_V4L2_SPS_FLAG_IF(delta_pic_order_always_zero_flag,
1554 V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO);
1555 SET_V4L2_SPS_FLAG_IF(gaps_in_frame_num_value_allowed_flag,
1556 V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED);
1557 SET_V4L2_SPS_FLAG_IF(frame_mbs_only_flag, V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY);
1558 SET_V4L2_SPS_FLAG_IF(mb_adaptive_frame_field_flag,
1559 V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD);
1560 SET_V4L2_SPS_FLAG_IF(direct_8x8_inference_flag,
1561 V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE);
1562 #undef SET_FLAG
1563 memset(&ctrl, 0, sizeof(ctrl));
1564 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SPS;
1565 ctrl.size = sizeof(v4l2_sps);
1566 ctrl.p_h264_sps = &v4l2_sps;
1567 ctrls.push_back(ctrl);
1568
1569 struct v4l2_ctrl_h264_pps v4l2_pps;
1570 memset(&v4l2_pps, 0, sizeof(v4l2_pps));
1571 #define PPS_TO_V4L2PPS(a) v4l2_pps.a = pps->a
1572 PPS_TO_V4L2PPS(pic_parameter_set_id);
1573 PPS_TO_V4L2PPS(seq_parameter_set_id);
1574 PPS_TO_V4L2PPS(num_slice_groups_minus1);
1575 PPS_TO_V4L2PPS(num_ref_idx_l0_default_active_minus1);
1576 PPS_TO_V4L2PPS(num_ref_idx_l1_default_active_minus1);
1577 PPS_TO_V4L2PPS(weighted_bipred_idc);
1578 PPS_TO_V4L2PPS(pic_init_qp_minus26);
1579 PPS_TO_V4L2PPS(pic_init_qs_minus26);
1580 PPS_TO_V4L2PPS(chroma_qp_index_offset);
1581 PPS_TO_V4L2PPS(second_chroma_qp_index_offset);
1582 #undef PPS_TO_V4L2PPS
1583
1584 #define SET_V4L2_PPS_FLAG_IF(cond, flag) \
1585 v4l2_pps.flags |= ((pps->cond) ? (flag) : 0)
1586 SET_V4L2_PPS_FLAG_IF(entropy_coding_mode_flag,
1587 V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE);
1588 SET_V4L2_PPS_FLAG_IF(
1589 bottom_field_pic_order_in_frame_present_flag,
1590 V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT);
1591 SET_V4L2_PPS_FLAG_IF(weighted_pred_flag, V4L2_H264_PPS_FLAG_WEIGHTED_PRED);
1592 SET_V4L2_PPS_FLAG_IF(deblocking_filter_control_present_flag,
1593 V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT);
1594 SET_V4L2_PPS_FLAG_IF(constrained_intra_pred_flag,
1595 V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED);
1596 SET_V4L2_PPS_FLAG_IF(redundant_pic_cnt_present_flag,
1597 V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT);
1598 SET_V4L2_PPS_FLAG_IF(transform_8x8_mode_flag,
1599 V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE);
1600 SET_V4L2_PPS_FLAG_IF(pic_scaling_matrix_present_flag,
1601 V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT);
1602 #undef SET_V4L2_PPS_FLAG_IF
1603 memset(&ctrl, 0, sizeof(ctrl));
1604 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS;
1605 ctrl.size = sizeof(v4l2_pps);
1606 ctrl.p_h264_pps = &v4l2_pps;
1607 ctrls.push_back(ctrl);
1608
1609 struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix;
1610 memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix));
1611 COMPILE_ASSERT(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
1612 arraysize(pps->scaling_list4x4) &&
1613 arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
1614 arraysize(pps->scaling_list4x4[0]) &&
1615 arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
1616 arraysize(pps->scaling_list8x8) &&
1617 arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
1618 arraysize(pps->scaling_list8x8[0]),
1619 scaling_lists_invalid_size);
1620 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) {
1621 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]);
1622 ++j) {
1623 v4l2_scaling_matrix.scaling_list_4x4[i][j] = pps->scaling_list4x4[i][j];
1624 }
1625 }
1626 for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) {
1627 for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]);
1628 ++j) {
1629 v4l2_scaling_matrix.scaling_list_8x8[i][j] = pps->scaling_list8x8[i][j];
1630 }
1631 }
1632 memset(&ctrl, 0, sizeof(ctrl));
1633 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX;
1634 ctrl.size = sizeof(v4l2_scaling_matrix);
1635 ctrl.p_h264_scal_mtrx = &v4l2_scaling_matrix;
1636 ctrls.push_back(ctrl);
1637
1638 scoped_refptr<V4L2DecodeSurface> dec_surface =
1639 H264PictureToV4L2DecodeSurface(pic);
1640
1641 struct v4l2_ext_controls ext_ctrls;
1642 memset(&ext_ctrls, 0, sizeof(ext_ctrls));
1643 ext_ctrls.count = ctrls.size();
1644 ext_ctrls.controls = &ctrls[0];
1645 ext_ctrls.config_store = dec_surface->config_store();
1646 v4l2_dec_->SubmitExtControls(&ext_ctrls);
1647
1648 H264PictureListToDPBIndicesList(ref_pic_listp0,
1649 v4l2_decode_param_.ref_pic_list_p0);
1650 H264PictureListToDPBIndicesList(ref_pic_listb0,
1651 v4l2_decode_param_.ref_pic_list_b0);
1652 H264PictureListToDPBIndicesList(ref_pic_listb1,
1653 v4l2_decode_param_.ref_pic_list_b1);
1654
1655 std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
1656 H264DPBToV4L2DPB(dpb, &ref_surfaces);
1657 dec_surface->SetReferenceSurfaces(ref_surfaces);
1658
1659 return true;
1660 }
1661
1662 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
1663 const media::H264PPS* pps,
1664 const media::H264SliceHeader* slice_hdr,
1665 const H264Picture::Vector& ref_pic_list0,
1666 const H264Picture::Vector& ref_pic_list1,
1667 const scoped_refptr<H264Picture>& pic,
1668 const uint8_t* data,
1669 size_t size) {
1670 if (num_slices_ == kMaxSlices) {
1671 LOGF(ERROR) << "Over limit of supported slices per frame";
1672 return false;
1673 }
1674
1675 struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
1676 v4l2_slice_params_[num_slices_++];
1677 memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
1678
1679 v4l2_slice_param.size = size;
1680 #define SHDR_TO_V4L2SPARM(a) v4l2_slice_param.a = slice_hdr->a
1681 SHDR_TO_V4L2SPARM(header_bit_size);
1682 SHDR_TO_V4L2SPARM(first_mb_in_slice);
1683 SHDR_TO_V4L2SPARM(slice_type);
1684 SHDR_TO_V4L2SPARM(pic_parameter_set_id);
1685 SHDR_TO_V4L2SPARM(colour_plane_id);
1686 SHDR_TO_V4L2SPARM(frame_num);
1687 SHDR_TO_V4L2SPARM(idr_pic_id);
1688 SHDR_TO_V4L2SPARM(pic_order_cnt_lsb);
1689 SHDR_TO_V4L2SPARM(delta_pic_order_cnt_bottom);
1690 SHDR_TO_V4L2SPARM(delta_pic_order_cnt0);
1691 SHDR_TO_V4L2SPARM(delta_pic_order_cnt1);
1692 SHDR_TO_V4L2SPARM(redundant_pic_cnt);
1693 SHDR_TO_V4L2SPARM(dec_ref_pic_marking_bit_size);
1694 SHDR_TO_V4L2SPARM(cabac_init_idc);
1695 SHDR_TO_V4L2SPARM(slice_qp_delta);
1696 SHDR_TO_V4L2SPARM(slice_qs_delta);
1697 SHDR_TO_V4L2SPARM(disable_deblocking_filter_idc);
1698 SHDR_TO_V4L2SPARM(slice_alpha_c0_offset_div2);
1699 SHDR_TO_V4L2SPARM(slice_beta_offset_div2);
1700 SHDR_TO_V4L2SPARM(num_ref_idx_l0_active_minus1);
1701 SHDR_TO_V4L2SPARM(num_ref_idx_l1_active_minus1);
1702 SHDR_TO_V4L2SPARM(pic_order_cnt_bit_size);
1703 #undef SHDR_TO_V4L2SPARM
1704
1705 #define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
1706 v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
1707 SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
1708 SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
1709 SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
1710 V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
1711 SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
1712 #undef SET_V4L2_SPARM_FLAG_IF
1713
1714 struct v4l2_h264_pred_weight_table* pred_weight_table =
1715 &v4l2_slice_param.pred_weight_table;
1716
1717 if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) &&
1718 pps->weighted_pred_flag) ||
1719 (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) {
1720 pred_weight_table->luma_log2_weight_denom =
1721 slice_hdr->luma_log2_weight_denom;
1722 pred_weight_table->chroma_log2_weight_denom =
1723 slice_hdr->chroma_log2_weight_denom;
1724
1725 struct v4l2_h264_weight_factors* factorsl0 =
1726 &pred_weight_table->weight_factors[0];
1727
1728 for (int i = 0; i < 32; ++i) {
1729 factorsl0->luma_weight[i] =
1730 slice_hdr->pred_weight_table_l0.luma_weight[i];
1731 factorsl0->luma_offset[i] =
1732 slice_hdr->pred_weight_table_l0.luma_offset[i];
1733
1734 for (int j = 0; j < 2; ++j) {
1735 factorsl0->chroma_weight[i][j] =
1736 slice_hdr->pred_weight_table_l0.chroma_weight[i][j];
1737 factorsl0->chroma_offset[i][j] =
1738 slice_hdr->pred_weight_table_l0.chroma_offset[i][j];
1739 }
1740 }
1741
1742 if (slice_hdr->IsBSlice()) {
1743 struct v4l2_h264_weight_factors* factorsl1 =
1744 &pred_weight_table->weight_factors[1];
1745
1746 for (int i = 0; i < 32; ++i) {
1747 factorsl1->luma_weight[i] =
1748 slice_hdr->pred_weight_table_l1.luma_weight[i];
1749 factorsl1->luma_offset[i] =
1750 slice_hdr->pred_weight_table_l1.luma_offset[i];
1751
1752 for (int j = 0; j < 2; ++j) {
1753 factorsl1->chroma_weight[i][j] =
1754 slice_hdr->pred_weight_table_l1.chroma_weight[i][j];
1755 factorsl1->chroma_offset[i][j] =
1756 slice_hdr->pred_weight_table_l1.chroma_offset[i][j];
1757 }
1758 }
1759 }
1760 }
1761
1762 H264PictureListToDPBIndicesList(ref_pic_list0,
1763 v4l2_slice_param.ref_pic_list0);
1764 H264PictureListToDPBIndicesList(ref_pic_list1,
1765 v4l2_slice_param.ref_pic_list1);
1766
1767 scoped_refptr<V4L2DecodeSurface> dec_surface =
1768 H264PictureToV4L2DecodeSurface(pic);
1769
1770 v4l2_decode_param_.nal_ref_idc = slice_hdr->nal_ref_idc;
1771
1772 // TODO(posciak): Don't add start code back here, but have it passed from
1773 // the parser.
1774 size_t data_copy_size = size + 3;
1775 scoped_ptr<uint8_t[]> data_copy(new uint8_t[data_copy_size]);
1776 memset(data_copy.get(), 0, data_copy_size);
1777 data_copy[2] = 0x01;
1778 memcpy(data_copy.get() + 3, data, size);
1779 return v4l2_dec_->SubmitSlice(dec_surface->input_record(), data_copy.get(),
1780 data_copy_size);
1781 }
1782
1783 bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(int index,
1784 const uint8_t* data,
1785 size_t size) {
1786 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
1787
1788 InputRecord& input_record = input_buffer_map_[index];
1789
1790 if (input_record.bytes_used + size > input_record.length) {
1791 DVLOGF(1) << "Input buffer too small";
1792 return false;
1793 }
1794
1795 memcpy(static_cast<uint8*>(input_record.address) + input_record.bytes_used,
1796 data, size);
1797 input_record.bytes_used += size;
1798
1799 return true;
1800 }
1801
1802 bool V4L2SliceVideoDecodeAccelerator::SubmitExtControls(
1803 struct v4l2_ext_controls* ext_ctrls) {
1804 DCHECK_GT(ext_ctrls->config_store, 0u);
1805 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, ext_ctrls);
1806 return true;
1807 }
1808
1809 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
1810 const scoped_refptr<H264Picture>& pic) {
1811 scoped_refptr<V4L2DecodeSurface> dec_surface =
1812 H264PictureToV4L2DecodeSurface(pic);
1813
1814 v4l2_decode_param_.num_slices = num_slices_;
1815 v4l2_decode_param_.idr_pic_flag = pic->idr;
1816 v4l2_decode_param_.top_field_order_cnt = pic->top_field_order_cnt;
1817 v4l2_decode_param_.bottom_field_order_cnt = pic->bottom_field_order_cnt;
1818
1819 struct v4l2_ext_control ctrl;
1820 std::vector<struct v4l2_ext_control> ctrls;
1821
1822 memset(&ctrl, 0, sizeof(ctrl));
1823 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
1824 ctrl.size = sizeof(v4l2_slice_params_);
1825 ctrl.p_h264_slice_param = v4l2_slice_params_;
1826 ctrls.push_back(ctrl);
1827
1828 memset(&ctrl, 0, sizeof(ctrl));
1829 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
1830 ctrl.size = sizeof(v4l2_decode_param_);
1831 ctrl.p_h264_decode_param = &v4l2_decode_param_;
1832 ctrls.push_back(ctrl);
1833
1834 struct v4l2_ext_controls ext_ctrls;
1835 memset(&ext_ctrls, 0, sizeof(ext_ctrls));
1836 ext_ctrls.count = ctrls.size();
1837 ext_ctrls.controls = &ctrls[0];
1838 ext_ctrls.config_store = dec_surface->config_store();
1839 v4l2_dec_->SubmitExtControls(&ext_ctrls);
1840
1841 num_slices_ = 0;
1842 memset(&v4l2_decode_param_, 0, sizeof(v4l2_decode_param_));
1843 memset(&v4l2_slice_params_, 0, sizeof(v4l2_slice_params_));
1844
1845 v4l2_dec_->DecodeSurface(dec_surface);
1846 return true;
1847 }
1848
1849 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::OutputPicture(
1850 const scoped_refptr<H264Picture>& pic) {
1851 scoped_refptr<V4L2DecodeSurface> dec_surface =
1852 H264PictureToV4L2DecodeSurface(pic);
1853 v4l2_dec_->SurfaceReady(dec_surface);
1854 return true;
1855 }
1856
1857 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
1858 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
1859 H264PictureToV4L2DecodeSurface(const scoped_refptr<H264Picture>& pic) {
1860 V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
1861 CHECK(v4l2_pic);
1862 return v4l2_pic->dec_surface();
1863 }
1864
1865 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator(
1866 V4L2SliceVideoDecodeAccelerator* v4l2_dec)
1867 : v4l2_dec_(v4l2_dec) {
1868 DCHECK(v4l2_dec_);
1869 }
1870
1871 scoped_refptr<VP8Picture>
1872 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() {
1873 scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
1874 if (!dec_surface)
1875 return nullptr;
1876
1877 return new V4L2VP8Picture(dec_surface);
1878 }
1879
1880 #define ARRAY_MEMCPY_CHECKED(to, from) \
1881 do { \
1882 static_assert(sizeof(to) == sizeof(from), \
1883 #from " and " #to " arrays must be of same size"); \
1884 memcpy(to, from, sizeof(to)); \
1885 } while (0)
1886
1887 static void FillV4L2SegmentationHeader(
1888 const media::VP8SegmentationHeader& vp8_sgmnt_hdr,
1889 struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
1890 #define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
1891 v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
1892 SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
1893 V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
1894 SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
1895 V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
1896 SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
1897 V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
1898 #undef SET_V4L2_SPARM_FLAG_IF
1899 v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
1900
1901 ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->quant_update,
1902 vp8_sgmnt_hdr.quantizer_update_value);
1903 ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->lf_update,
1904 vp8_sgmnt_hdr.lf_update_value);
1905 ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->segment_probs,
1906 vp8_sgmnt_hdr.segment_prob);
1907 }
1908
1909 static void FillV4L2LoopfilterHeader(
1910 const media::VP8LoopFilterHeader& vp8_loopfilter_hdr,
1911 struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
1912 #define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
1913 v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
1914 SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
1915 SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
1916 V4L2_VP8_LF_HDR_DELTA_UPDATE);
1917 #undef SET_V4L2_SGMNT_HDR_FLAG_IF
1918
1919 #define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
1920 LF_HDR_TO_V4L2_LF_HDR(type);
1921 LF_HDR_TO_V4L2_LF_HDR(level);
1922 LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
1923 #undef LF_HDR_TO_V4L2_LF_HDR
1924
1925 ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->ref_frm_delta_magnitude,
1926 vp8_loopfilter_hdr.ref_frame_delta_magnitude);
1927 ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->mb_mode_delta_magnitude,
1928 vp8_loopfilter_hdr.mb_mode_delta_magnitude);
1929 }
1930
1931 static void FillV4L2QuantizationHeader(
1932 const media::VP8QuantizationHeader& vp8_quant_hdr,
1933 struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
1934 v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
1935 v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta_magnitude;
1936 v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta_magnitude;
1937 v4l2_quant_hdr->y2_ac_delta = vp8_quant_hdr.y2_ac_delta_magnitude;
1938 v4l2_quant_hdr->uv_dc_delta = vp8_quant_hdr.uv_dc_delta_magnitude;
1939 v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta_magnitude;
1940 }
1941
1942 static void FillV4L2EntropyHeader(
1943 const media::VP8EntropyHeader& vp8_entropy_hdr,
1944 struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
1945 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs,
1946 vp8_entropy_hdr.coeff_probs);
1947 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs,
1948 vp8_entropy_hdr.y_mode_probs);
1949 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs,
1950 vp8_entropy_hdr.uv_mode_probs);
1951 ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs,
1952 vp8_entropy_hdr.mv_probs);
1953 }
1954
1955 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
1956 const scoped_refptr<VP8Picture>& pic,
1957 const media::VP8FrameHeader* frame_hdr,
1958 const scoped_refptr<VP8Picture>& last_frame,
1959 const scoped_refptr<VP8Picture>& golden_frame,
1960 const scoped_refptr<VP8Picture>& alt_frame) {
1961 struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
1962 memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
1963
1964 #define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
1965 FHDR_TO_V4L2_FHDR(key_frame);
1966 FHDR_TO_V4L2_FHDR(version);
1967 FHDR_TO_V4L2_FHDR(width);
1968 FHDR_TO_V4L2_FHDR(horizontal_scale);
1969 FHDR_TO_V4L2_FHDR(height);
1970 FHDR_TO_V4L2_FHDR(vertical_scale);
1971 FHDR_TO_V4L2_FHDR(sign_bias_golden);
1972 FHDR_TO_V4L2_FHDR(sign_bias_alternate);
1973 FHDR_TO_V4L2_FHDR(prob_skip_false);
1974 FHDR_TO_V4L2_FHDR(prob_intra);
1975 FHDR_TO_V4L2_FHDR(prob_last);
1976 FHDR_TO_V4L2_FHDR(prob_gf);
1977 FHDR_TO_V4L2_FHDR(bool_dec_range);
1978 FHDR_TO_V4L2_FHDR(bool_dec_value);
1979 FHDR_TO_V4L2_FHDR(bool_dec_count);
1980 #undef FHDR_TO_V4L2_FHDR
1981
1982 #define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
1983 v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
1984 SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
1985 V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
1986 SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
1987 SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
1988 V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
1989 #undef SET_V4L2_FRM_HDR_FLAG_IF
1990
1991 FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
1992 &v4l2_frame_hdr.sgmnt_hdr);
1993
1994 FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
1995
1996 FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
1997 &v4l2_frame_hdr.quant_hdr);
1998
1999 FillV4L2EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
2000
2001 v4l2_frame_hdr.first_part_size =
2002 base::checked_cast<__u32>(frame_hdr->first_part_size);
2003 v4l2_frame_hdr.first_part_offset =
2004 base::checked_cast<__u32>(frame_hdr->first_part_offset);
2005 v4l2_frame_hdr.macroblock_bit_offset =
2006 base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
2007 v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
2008
2009 static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) ==
2010 arraysize(frame_hdr->dct_partition_sizes),
2011 "DCT partition size arrays must have equal number of elements");
2012 for (size_t i = 0; i < frame_hdr->num_of_dct_partitions &&
2013 i < arraysize(v4l2_frame_hdr.dct_part_sizes); ++i)
2014 v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i];
2015
2016 scoped_refptr<V4L2DecodeSurface> dec_surface =
2017 VP8PictureToV4L2DecodeSurface(pic);
2018 std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
2019
2020 if (last_frame) {
2021 scoped_refptr<V4L2DecodeSurface> last_frame_surface =
2022 VP8PictureToV4L2DecodeSurface(last_frame);
2023 v4l2_frame_hdr.last_frame = last_frame_surface->output_record();
2024 ref_surfaces.push_back(last_frame_surface);
2025 } else {
2026 v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
2027 }
2028
2029 if (golden_frame) {
2030 scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
2031 VP8PictureToV4L2DecodeSurface(golden_frame);
2032 v4l2_frame_hdr.golden_frame = golden_frame_surface->output_record();
2033 ref_surfaces.push_back(golden_frame_surface);
2034 } else {
2035 v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
2036 }
2037
2038 if (alt_frame) {
2039 scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
2040 VP8PictureToV4L2DecodeSurface(alt_frame);
2041 v4l2_frame_hdr.alt_frame = alt_frame_surface->output_record();
2042 ref_surfaces.push_back(alt_frame_surface);
2043 } else {
2044 v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
2045 }
2046
2047 struct v4l2_ext_control ctrl;
2048 memset(&ctrl, 0, sizeof(ctrl));
2049 ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
2050 ctrl.size = sizeof(v4l2_frame_hdr);
2051 ctrl.p_vp8_frame_hdr = &v4l2_frame_hdr;
2052
2053 struct v4l2_ext_controls ext_ctrls;
2054 memset(&ext_ctrls, 0, sizeof(ext_ctrls));
2055 ext_ctrls.count = 1;
2056 ext_ctrls.controls = &ctrl;
2057 ext_ctrls.config_store = dec_surface->config_store();
2058
2059 if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
2060 return false;
2061
2062 dec_surface->SetReferenceSurfaces(ref_surfaces);
2063
2064 if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
2065 frame_hdr->frame_size))
2066 return false;
2067
2068 v4l2_dec_->DecodeSurface(dec_surface);
2069 return true;
2070 }
2071
2072 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::OutputPicture(
2073 const scoped_refptr<VP8Picture>& pic) {
2074 scoped_refptr<V4L2DecodeSurface> dec_surface =
2075 VP8PictureToV4L2DecodeSurface(pic);
2076
2077 v4l2_dec_->SurfaceReady(dec_surface);
2078 return true;
2079 }
2080
2081 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
2082 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::
2083 VP8PictureToV4L2DecodeSurface(const scoped_refptr<VP8Picture>& pic) {
2084 V4L2VP8Picture* v4l2_pic = pic->AsV4L2VP8Picture();
2085 CHECK(v4l2_pic);
2086 return v4l2_pic->dec_surface();
2087 }
2088
2089 void V4L2SliceVideoDecodeAccelerator::DecodeSurface(
2090 const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
2091 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2092
2093 DVLOGF(3) << "Submitting decode for surface: " << dec_surface->ToString();
2094 Enqueue(dec_surface);
2095 }
2096
2097 void V4L2SliceVideoDecodeAccelerator::SurfaceReady(
2098 const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
2099 DVLOGF(3);
2100 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2101
2102 decoder_display_queue_.push(dec_surface);
2103 TryOutputSurfaces();
2104 }
2105
2106 void V4L2SliceVideoDecodeAccelerator::TryOutputSurfaces() {
2107 while (!decoder_display_queue_.empty()) {
2108 scoped_refptr<V4L2DecodeSurface> dec_surface =
2109 decoder_display_queue_.front();
2110
2111 if (!dec_surface->decoded())
2112 break;
2113
2114 decoder_display_queue_.pop();
2115 OutputSurface(dec_surface);
2116 }
2117 }
2118
2119 void V4L2SliceVideoDecodeAccelerator::OutputSurface(
2120 const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
2121 DVLOGF(3);
2122 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2123
2124 OutputRecord& output_record =
2125 output_buffer_map_[dec_surface->output_record()];
2126
2127 bool inserted =
2128 surfaces_at_display_.insert(std::make_pair(output_record.picture_id,
2129 dec_surface)).second;
2130 DCHECK(inserted);
2131
2132 DCHECK(!output_record.at_client);
2133 DCHECK(!output_record.at_device);
2134 DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
2135 DCHECK_NE(output_record.picture_id, -1);
2136 output_record.at_client = true;
2137
2138 media::Picture picture(output_record.picture_id, dec_surface->bitstream_id(),
2139 gfx::Rect(frame_buffer_size_));
2140 pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
2141 SendPictureReady();
2142 output_record.cleared = true;
2143 }
2144
2145 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
2146 V4L2SliceVideoDecodeAccelerator::CreateSurface() {
2147 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2148
2149 if (free_input_buffers_.empty() || free_output_buffers_.empty())
2150 return nullptr;
2151
2152 for (auto& a : free_input_buffers_)
2153 DVLOG(1) << "inputs: " << a;
2154 for (auto& b : free_output_buffers_)
2155 DVLOG(1) << "outputs: " << b;
2156
2157 int input = free_input_buffers_.front();
2158 free_input_buffers_.pop_front();
2159 int output = free_output_buffers_.front();
2160 free_output_buffers_.pop_front();
2161
2162 InputRecord& input_record = input_buffer_map_[input];
2163 DCHECK_EQ(input_record.bytes_used, 0u);
2164 DCHECK_EQ(input_record.input_id, -1);
2165 DCHECK(decoder_current_bitstream_buffer_ != nullptr);
2166 input_record.input_id = decoder_current_bitstream_buffer_->input_id;
2167
2168 scoped_refptr<V4L2DecodeSurface> dec_surface = new V4L2DecodeSurface(
2169 decoder_current_bitstream_buffer_->input_id, input, output,
2170 base::Bind(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
2171 base::Unretained(this)));
2172
2173 DVLOGF(1) << "Created surface " << input << " -> " << output;
2174 return dec_surface;
2175 }
2176
2177 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() {
2178 DVLOGF(3);
2179 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2180 bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_);
2181 while (pending_picture_ready_.size() > 0) {
2182 bool cleared = pending_picture_ready_.front().cleared;
2183 const media::Picture& picture = pending_picture_ready_.front().picture;
2184 if (cleared && picture_clearing_count_ == 0) {
2185 DVLOGF(3) << "To IO " << picture.picture_buffer_id();
2186 // This picture is cleared. Post it to IO thread to reduce latency. This
2187 // should be the case after all pictures are cleared at the beginning.
2188 io_message_loop_proxy_->PostTask(
2189 FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
2190 pending_picture_ready_.pop();
2191 } else if (!cleared || resetting_or_flushing) {
2192 DVLOGF(3) << ". cleared=" << pending_picture_ready_.front().cleared
2193 << ", decoder_resetting_=" << decoder_resetting_
2194 << ", decoder_flushing_=" << decoder_flushing_
2195 << ", picture_clearing_count_=" << picture_clearing_count_;
2196 DVLOGF(3) << "To GPU " << picture.picture_buffer_id();
2197 // If the picture is not cleared, post it to the child thread because it
2198 // has to be cleared in the child thread. A picture only needs to be
2199 // cleared once. If the decoder is resetting or flushing, send all
2200 // pictures to ensure PictureReady arrive before reset or flush done.
2201 child_message_loop_proxy_->PostTaskAndReply(
2202 FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
2203 // Unretained is safe. If Client::PictureReady gets to run, |this| is
2204 // alive. Destroy() will wait the decode thread to finish.
2205 base::Bind(&V4L2SliceVideoDecodeAccelerator::PictureCleared,
2206 base::Unretained(this)));
2207 picture_clearing_count_++;
2208 pending_picture_ready_.pop();
2209 } else {
2210 // This picture is cleared. But some pictures are about to be cleared on
2211 // the child thread. To preserve the order, do not send this until those
2212 // pictures are cleared.
2213 break;
2214 }
2215 }
2216 }
2217
2218 void V4L2SliceVideoDecodeAccelerator::PictureCleared() {
2219 DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
2220 DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
2221 DCHECK_GT(picture_clearing_count_, 0);
2222 picture_clearing_count_--;
2223 SendPictureReady();
2224 }
2225
2226 bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() {
2227 return true;
2228 }
2229
2230 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/media/v4l2_slice_video_decode_accelerator.h ('k') | content/common/gpu/media/vp8_decoder.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698