Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / content / common / gpu / media / v4l2_video_decode_accelerator.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <dlfcn.h>
6 #include <errno.h>
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <sys/eventfd.h>
11 #include <sys/ioctl.h>
12 #include <sys/mman.h>
13
14 #include "base/bind.h"
15 #include "base/command_line.h"
16 #include "base/debug/trace_event.h"
17 #include "base/memory/shared_memory.h"
18 #include "base/message_loop/message_loop.h"
19 #include "base/message_loop/message_loop_proxy.h"
20 #include "base/numerics/safe_conversions.h"
21 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22 #include "media/base/media_switches.h"
23 #include "media/filters/h264_parser.h"
24 #include "ui/gl/scoped_binders.h"
25
26 #define NOTIFY_ERROR(x)                            \
27   do {                                             \
28     SetDecoderState(kError);                       \
29     DLOG(ERROR) << "calling NotifyError(): " << x; \
30     NotifyError(x);                                \
31   } while (0)
32
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value)              \
34   do {                                                             \
35     if (device_->Ioctl(type, arg) != 0) {                          \
36       DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
37       NOTIFY_ERROR(PLATFORM_FAILURE);                              \
38       return value;                                                \
39     }                                                              \
40   } while (0)
41
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
44
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
47
48 #define IOCTL_OR_LOG_ERROR(type, arg)                              \
49   do {                                                             \
50     if (device_->Ioctl(type, arg) != 0)                            \
51       DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52   } while (0)
53
54 namespace content {
55
56 namespace {
57
58 // TODO(posciak): remove once we update linux-headers.
59 #ifndef V4L2_EVENT_RESOLUTION_CHANGE
60 #define V4L2_EVENT_RESOLUTION_CHANGE 5
61 #endif
62
63 }  // anonymous namespace
64
65 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
66   BitstreamBufferRef(
67       base::WeakPtr<Client>& client,
68       scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
69       base::SharedMemory* shm,
70       size_t size,
71       int32 input_id);
72   ~BitstreamBufferRef();
73   const base::WeakPtr<Client> client;
74   const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
75   const scoped_ptr<base::SharedMemory> shm;
76   const size_t size;
77   off_t bytes_used;
78   const int32 input_id;
79 };
80
81 struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
82   EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
83   ~EGLSyncKHRRef();
84   EGLDisplay const egl_display;
85   EGLSyncKHR egl_sync;
86 };
87
88 struct V4L2VideoDecodeAccelerator::PictureRecord {
89   PictureRecord(bool cleared, const media::Picture& picture);
90   ~PictureRecord();
91   bool cleared;  // Whether the texture is cleared and safe to render from.
92   media::Picture picture;  // The decoded picture.
93 };
94
95 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
96     base::WeakPtr<Client>& client,
97     scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
98     base::SharedMemory* shm, size_t size, int32 input_id)
99     : client(client),
100       client_message_loop_proxy(client_message_loop_proxy),
101       shm(shm),
102       size(size),
103       bytes_used(0),
104       input_id(input_id) {
105 }
106
107 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
108   if (input_id >= 0) {
109     client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
110         &Client::NotifyEndOfBitstreamBuffer, client, input_id));
111   }
112 }
113
114 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
115     EGLDisplay egl_display, EGLSyncKHR egl_sync)
116     : egl_display(egl_display),
117       egl_sync(egl_sync) {
118 }
119
120 V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
121   // We don't check for eglDestroySyncKHR failures, because if we get here
122   // with a valid sync object, something went wrong and we are getting
123   // destroyed anyway.
124   if (egl_sync != EGL_NO_SYNC_KHR)
125     eglDestroySyncKHR(egl_display, egl_sync);
126 }
127
128 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
129     : at_device(false),
130       address(NULL),
131       length(0),
132       bytes_used(0),
133       input_id(-1) {
134 }
135
136 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
137 }
138
139 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
140     : at_device(false),
141       at_client(false),
142       egl_image(EGL_NO_IMAGE_KHR),
143       egl_sync(EGL_NO_SYNC_KHR),
144       picture_id(-1),
145       cleared(false) {
146 }
147
148 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
149
150 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
151     bool cleared,
152     const media::Picture& picture)
153     : cleared(cleared), picture(picture) {}
154
155 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
156
157 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
158     EGLDisplay egl_display,
159     EGLContext egl_context,
160     const base::WeakPtr<Client>& io_client,
161     const base::Callback<bool(void)>& make_context_current,
162     scoped_ptr<V4L2Device> device,
163     const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
164     : child_message_loop_proxy_(base::MessageLoopProxy::current()),
165       io_message_loop_proxy_(io_message_loop_proxy),
166       io_client_(io_client),
167       decoder_thread_("V4L2DecoderThread"),
168       decoder_state_(kUninitialized),
169       device_(device.Pass()),
170       decoder_delay_bitstream_buffer_id_(-1),
171       decoder_current_input_buffer_(-1),
172       decoder_decode_buffer_tasks_scheduled_(0),
173       decoder_frames_at_client_(0),
174       decoder_flushing_(false),
175       resolution_change_pending_(false),
176       resolution_change_reset_pending_(false),
177       decoder_partial_frame_pending_(false),
178       input_streamon_(false),
179       input_buffer_queued_count_(0),
180       output_streamon_(false),
181       output_buffer_queued_count_(0),
182       output_dpb_size_(0),
183       output_planes_count_(0),
184       picture_clearing_count_(0),
185       pictures_assigned_(false, false),
186       device_poll_thread_("V4L2DevicePollThread"),
187       make_context_current_(make_context_current),
188       egl_display_(egl_display),
189       egl_context_(egl_context),
190       video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
191       weak_this_factory_(this) {
192   weak_this_ = weak_this_factory_.GetWeakPtr();
193 }
194
195 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
196   DCHECK(!decoder_thread_.IsRunning());
197   DCHECK(!device_poll_thread_.IsRunning());
198
199   DestroyInputBuffers();
200   DestroyOutputBuffers();
201
202   // These maps have members that should be manually destroyed, e.g. file
203   // descriptors, mmap() segments, etc.
204   DCHECK(input_buffer_map_.empty());
205   DCHECK(output_buffer_map_.empty());
206 }
207
208 bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
209                                             Client* client) {
210   DVLOG(3) << "Initialize()";
211   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
212   DCHECK_EQ(decoder_state_, kUninitialized);
213
214   client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
215   client_ = client_ptr_factory_->GetWeakPtr();
216
217   switch (profile) {
218     case media::H264PROFILE_BASELINE:
219       DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
220       break;
221     case media::H264PROFILE_MAIN:
222       DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
223       break;
224     case media::H264PROFILE_HIGH:
225       DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
226       break;
227     case media::VP8PROFILE_ANY:
228       DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
229       break;
230     default:
231       DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
232       return false;
233   };
234   video_profile_ = profile;
235
236   if (egl_display_ == EGL_NO_DISPLAY) {
237     DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
238     NOTIFY_ERROR(PLATFORM_FAILURE);
239     return false;
240   }
241
242   // We need the context to be initialized to query extensions.
243   if (!make_context_current_.Run()) {
244     DLOG(ERROR) << "Initialize(): could not make context current";
245     NOTIFY_ERROR(PLATFORM_FAILURE);
246     return false;
247   }
248
249   if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
250     DLOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
251     NOTIFY_ERROR(PLATFORM_FAILURE);
252     return false;
253   }
254
255   // Capabilities check.
256   struct v4l2_capability caps;
257   const __u32 kCapsRequired =
258       V4L2_CAP_VIDEO_CAPTURE_MPLANE |
259       V4L2_CAP_VIDEO_OUTPUT_MPLANE |
260       V4L2_CAP_STREAMING;
261   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
262   if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
263     DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
264         ", caps check failed: 0x" << std::hex << caps.capabilities;
265     NOTIFY_ERROR(PLATFORM_FAILURE);
266     return false;
267   }
268
269   if (!CreateInputBuffers())
270     return false;
271
272   // Output format has to be setup before streaming starts.
273   struct v4l2_format format;
274   memset(&format, 0, sizeof(format));
275   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
276   uint32 output_format_fourcc = device_->PreferredOutputFormat();
277   if (output_format_fourcc == 0) {
278     // TODO(posciak): We should enumerate available output formats, as well as
279     // take into account formats that the client is ready to accept.
280     return false;
281   }
282   format.fmt.pix_mp.pixelformat = output_format_fourcc;
283   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
284
285   // Subscribe to the resolution change event.
286   struct v4l2_event_subscription sub;
287   memset(&sub, 0, sizeof(sub));
288   sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
289   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
290
291   // Initialize format-specific bits.
292   if (video_profile_ >= media::H264PROFILE_MIN &&
293       video_profile_ <= media::H264PROFILE_MAX) {
294     decoder_h264_parser_.reset(new media::H264Parser());
295   }
296
297   if (!decoder_thread_.Start()) {
298     DLOG(ERROR) << "Initialize(): decoder thread failed to start";
299     NOTIFY_ERROR(PLATFORM_FAILURE);
300     return false;
301   }
302
303   // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
304   decoder_thread_.message_loop()->PostTask(
305       FROM_HERE,
306       base::Bind(
307           base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
308           base::Unretained(this)));
309
310   SetDecoderState(kInitialized);
311   return true;
312 }
313
314 void V4L2VideoDecodeAccelerator::Decode(
315     const media::BitstreamBuffer& bitstream_buffer) {
316   DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
317            << ", size=" << bitstream_buffer.size();
318   DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
319
320   // DecodeTask() will take care of running a DecodeBufferTask().
321   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
322       &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
323       bitstream_buffer));
324 }
325
326 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
327     const std::vector<media::PictureBuffer>& buffers) {
328   DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
329   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
330
331   if (buffers.size() != output_buffer_map_.size()) {
332     DLOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
333                    " buffers. (Got " << buffers.size()
334                 << ", requested " << output_buffer_map_.size() << ")";
335     NOTIFY_ERROR(INVALID_ARGUMENT);
336     return;
337   }
338
339   if (!make_context_current_.Run()) {
340     DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
341     NOTIFY_ERROR(PLATFORM_FAILURE);
342     return;
343   }
344
345   gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
346
347   // It's safe to manipulate all the buffer state here, because the decoder
348   // thread is waiting on pictures_assigned_.
349   DCHECK(free_output_buffers_.empty());
350   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
351     DCHECK(buffers[i].size() == frame_buffer_size_);
352
353     OutputRecord& output_record = output_buffer_map_[i];
354     DCHECK(!output_record.at_device);
355     DCHECK(!output_record.at_client);
356     DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
357     DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
358     DCHECK_EQ(output_record.picture_id, -1);
359     DCHECK_EQ(output_record.cleared, false);
360
361     EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
362                                                     egl_context_,
363                                                     buffers[i].texture_id(),
364                                                     frame_buffer_size_,
365                                                     i,
366                                                     output_planes_count_);
367     if (egl_image == EGL_NO_IMAGE_KHR) {
368       DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
369       // Ownership of EGLImages allocated in previous iterations of this loop
370       // has been transferred to output_buffer_map_. After we error-out here
371       // the destructor will handle their cleanup.
372       NOTIFY_ERROR(PLATFORM_FAILURE);
373       return;
374     }
375
376     output_record.egl_image = egl_image;
377     output_record.picture_id = buffers[i].id();
378     free_output_buffers_.push(i);
379     DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
380              << "]: picture_id=" << output_record.picture_id;
381   }
382
383   pictures_assigned_.Signal();
384 }
385
386 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
387   DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
388   // Must be run on child thread, as we'll insert a sync in the EGL context.
389   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
390
391   if (!make_context_current_.Run()) {
392     DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
393     NOTIFY_ERROR(PLATFORM_FAILURE);
394     return;
395   }
396
397   EGLSyncKHR egl_sync =
398       eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
399   if (egl_sync == EGL_NO_SYNC_KHR) {
400     DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
401     NOTIFY_ERROR(PLATFORM_FAILURE);
402     return;
403   }
404
405   scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
406       egl_display_, egl_sync));
407   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
408       &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
409       base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
410 }
411
412 void V4L2VideoDecodeAccelerator::Flush() {
413   DVLOG(3) << "Flush()";
414   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
415   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
416       &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
417 }
418
419 void V4L2VideoDecodeAccelerator::Reset() {
420   DVLOG(3) << "Reset()";
421   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
422   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
423       &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
424 }
425
426 void V4L2VideoDecodeAccelerator::Destroy() {
427   DVLOG(3) << "Destroy()";
428   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
429
430   // We're destroying; cancel all callbacks.
431   client_ptr_factory_.reset();
432   weak_this_factory_.InvalidateWeakPtrs();
433
434   // If the decoder thread is running, destroy using posted task.
435   if (decoder_thread_.IsRunning()) {
436     decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
437         &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
438     pictures_assigned_.Signal();
439     // DestroyTask() will cause the decoder_thread_ to flush all tasks.
440     decoder_thread_.Stop();
441   } else {
442     // Otherwise, call the destroy task directly.
443     DestroyTask();
444   }
445
446   // Set to kError state just in case.
447   SetDecoderState(kError);
448
449   delete this;
450 }
451
452 bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
453
454 void V4L2VideoDecodeAccelerator::DecodeTask(
455     const media::BitstreamBuffer& bitstream_buffer) {
456   DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
457   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
458   DCHECK_NE(decoder_state_, kUninitialized);
459   TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
460                bitstream_buffer.id());
461
462   scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
463       io_client_, io_message_loop_proxy_,
464       new base::SharedMemory(bitstream_buffer.handle(), true),
465       bitstream_buffer.size(), bitstream_buffer.id()));
466   if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
467     DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
468     NOTIFY_ERROR(UNREADABLE_INPUT);
469     return;
470   }
471   DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
472
473   if (decoder_state_ == kResetting || decoder_flushing_) {
474     // In the case that we're resetting or flushing, we need to delay decoding
475     // the BitstreamBuffers that come after the Reset() or Flush() call.  When
476     // we're here, we know that this DecodeTask() was scheduled by a Decode()
477     // call that came after (in the client thread) the Reset() or Flush() call;
478     // thus set up the delay if necessary.
479     if (decoder_delay_bitstream_buffer_id_ == -1)
480       decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
481   } else if (decoder_state_ == kError) {
482     DVLOG(2) << "DecodeTask(): early out: kError state";
483     return;
484   }
485
486   decoder_input_queue_.push(
487       linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
488   decoder_decode_buffer_tasks_scheduled_++;
489   DecodeBufferTask();
490 }
491
492 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
493   DVLOG(3) << "DecodeBufferTask()";
494   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
495   DCHECK_NE(decoder_state_, kUninitialized);
496   TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
497
498   decoder_decode_buffer_tasks_scheduled_--;
499
500   if (decoder_state_ == kResetting) {
501     DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
502     return;
503   } else if (decoder_state_ == kError) {
504     DVLOG(2) << "DecodeBufferTask(): early out: kError state";
505     return;
506   } else if (decoder_state_ == kChangingResolution) {
507     DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
508     return;
509   }
510
511   if (decoder_current_bitstream_buffer_ == NULL) {
512     if (decoder_input_queue_.empty()) {
513       // We're waiting for a new buffer -- exit without scheduling a new task.
514       return;
515     }
516     linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
517     if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
518       // We're asked to delay decoding on this and subsequent buffers.
519       return;
520     }
521
522     // Setup to use the next buffer.
523     decoder_current_bitstream_buffer_.reset(buffer_ref.release());
524     decoder_input_queue_.pop();
525     DVLOG(3) << "DecodeBufferTask(): reading input_id="
526              << decoder_current_bitstream_buffer_->input_id
527              << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
528                               decoder_current_bitstream_buffer_->shm->memory() :
529                               NULL)
530              << ", size=" << decoder_current_bitstream_buffer_->size;
531   }
532   bool schedule_task = false;
533   const size_t size = decoder_current_bitstream_buffer_->size;
534   size_t decoded_size = 0;
535   if (size == 0) {
536     const int32 input_id = decoder_current_bitstream_buffer_->input_id;
537     if (input_id >= 0) {
538       // This is a buffer queued from the client that has zero size.  Skip.
539       schedule_task = true;
540     } else {
541       // This is a buffer of zero size, queued to flush the pipe.  Flush.
542       DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
543                 static_cast<base::SharedMemory*>(NULL));
544       // Enqueue a buffer guaranteed to be empty.  To do that, we flush the
545       // current input, enqueue no data to the next frame, then flush that down.
546       schedule_task = true;
547       if (decoder_current_input_buffer_ != -1 &&
548           input_buffer_map_[decoder_current_input_buffer_].input_id !=
549               kFlushBufferId)
550         schedule_task = FlushInputFrame();
551
552       if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
553         DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
554         decoder_partial_frame_pending_ = false;
555         schedule_task = true;
556       } else {
557         // If we failed to enqueue the empty buffer (due to pipeline
558         // backpressure), don't advance the bitstream buffer queue, and don't
559         // schedule the next task.  This bitstream buffer queue entry will get
560         // reprocessed when the pipeline frees up.
561         schedule_task = false;
562       }
563     }
564   } else {
565     // This is a buffer queued from the client, with actual contents.  Decode.
566     const uint8* const data =
567         reinterpret_cast<const uint8*>(
568             decoder_current_bitstream_buffer_->shm->memory()) +
569         decoder_current_bitstream_buffer_->bytes_used;
570     const size_t data_size =
571         decoder_current_bitstream_buffer_->size -
572         decoder_current_bitstream_buffer_->bytes_used;
573     if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
574       NOTIFY_ERROR(UNREADABLE_INPUT);
575       return;
576     }
577     // AdvanceFrameFragment should not return a size larger than the buffer
578     // size, even on invalid data.
579     CHECK_LE(decoded_size, data_size);
580
581     switch (decoder_state_) {
582       case kInitialized:
583       case kAfterReset:
584         schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
585         break;
586       case kDecoding:
587         schedule_task = DecodeBufferContinue(data, decoded_size);
588         break;
589       default:
590         NOTIFY_ERROR(ILLEGAL_STATE);
591         return;
592     }
593   }
594   if (decoder_state_ == kError) {
595     // Failed during decode.
596     return;
597   }
598
599   if (schedule_task) {
600     decoder_current_bitstream_buffer_->bytes_used += decoded_size;
601     if (decoder_current_bitstream_buffer_->bytes_used ==
602         decoder_current_bitstream_buffer_->size) {
603       // Our current bitstream buffer is done; return it.
604       int32 input_id = decoder_current_bitstream_buffer_->input_id;
605       DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
606       // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
607       decoder_current_bitstream_buffer_.reset();
608     }
609     ScheduleDecodeBufferTaskIfNeeded();
610   }
611 }
612
613 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
614     const uint8* data,
615     size_t size,
616     size_t* endpos) {
617   if (video_profile_ >= media::H264PROFILE_MIN &&
618       video_profile_ <= media::H264PROFILE_MAX) {
619     // For H264, we need to feed HW one frame at a time.  This is going to take
620     // some parsing of our input stream.
621     decoder_h264_parser_->SetStream(data, size);
622     media::H264NALU nalu;
623     media::H264Parser::Result result;
624     *endpos = 0;
625
626     // Keep on peeking the next NALs while they don't indicate a frame
627     // boundary.
628     for (;;) {
629       bool end_of_frame = false;
630       result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
631       if (result == media::H264Parser::kInvalidStream ||
632           result == media::H264Parser::kUnsupportedStream)
633         return false;
634       if (result == media::H264Parser::kEOStream) {
635         // We've reached the end of the buffer before finding a frame boundary.
636         decoder_partial_frame_pending_ = true;
637         return true;
638       }
639       switch (nalu.nal_unit_type) {
640         case media::H264NALU::kNonIDRSlice:
641         case media::H264NALU::kIDRSlice:
642           if (nalu.size < 1)
643             return false;
644           // For these two, if the "first_mb_in_slice" field is zero, start a
645           // new frame and return.  This field is Exp-Golomb coded starting on
646           // the eighth data bit of the NAL; a zero value is encoded with a
647           // leading '1' bit in the byte, which we can detect as the byte being
648           // (unsigned) greater than or equal to 0x80.
649           if (nalu.data[1] >= 0x80) {
650             end_of_frame = true;
651             break;
652           }
653           break;
654         case media::H264NALU::kSEIMessage:
655         case media::H264NALU::kSPS:
656         case media::H264NALU::kPPS:
657         case media::H264NALU::kAUD:
658         case media::H264NALU::kEOSeq:
659         case media::H264NALU::kEOStream:
660         case media::H264NALU::kReserved14:
661         case media::H264NALU::kReserved15:
662         case media::H264NALU::kReserved16:
663         case media::H264NALU::kReserved17:
664         case media::H264NALU::kReserved18:
665           // These unconditionally signal a frame boundary.
666           end_of_frame = true;
667           break;
668         default:
669           // For all others, keep going.
670           break;
671       }
672       if (end_of_frame) {
673         if (!decoder_partial_frame_pending_ && *endpos == 0) {
674           // The frame was previously restarted, and we haven't filled the
675           // current frame with any contents yet.  Start the new frame here and
676           // continue parsing NALs.
677         } else {
678           // The frame wasn't previously restarted and/or we have contents for
679           // the current frame; signal the start of a new frame here: we don't
680           // have a partial frame anymore.
681           decoder_partial_frame_pending_ = false;
682           return true;
683         }
684       }
685       *endpos = (nalu.data + nalu.size) - data;
686     }
687     NOTREACHED();
688     return false;
689   } else {
690     DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
691     DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
692     // For VP8, we can just dump the entire buffer.  No fragmentation needed,
693     // and we never return a partial frame.
694     *endpos = size;
695     decoder_partial_frame_pending_ = false;
696     return true;
697   }
698 }
699
700 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
701   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
702
703   // If we're behind on tasks, schedule another one.
704   int buffers_to_decode = decoder_input_queue_.size();
705   if (decoder_current_bitstream_buffer_ != NULL)
706     buffers_to_decode++;
707   if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
708     decoder_decode_buffer_tasks_scheduled_++;
709     decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
710         &V4L2VideoDecodeAccelerator::DecodeBufferTask,
711         base::Unretained(this)));
712   }
713 }
714
715 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
716     const void* data, size_t size, size_t* endpos) {
717   DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
718   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
719   DCHECK_NE(decoder_state_, kUninitialized);
720   DCHECK_NE(decoder_state_, kDecoding);
721   // Initial decode.  We haven't been able to get output stream format info yet.
722   // Get it, and start decoding.
723
724   // Copy in and send to HW.
725   if (!AppendToInputFrame(data, size))
726     return false;
727
728   // If we only have a partial frame, don't flush and process yet.
729   if (decoder_partial_frame_pending_)
730     return true;
731
732   if (!FlushInputFrame())
733     return false;
734
735   // Recycle buffers.
736   Dequeue();
737
738   // Check and see if we have format info yet.
739   struct v4l2_format format;
740   bool again = false;
741   if (!GetFormatInfo(&format, &again))
742     return false;
743
744   if (again) {
745     // Need more stream to decode format, return true and schedule next buffer.
746     *endpos = size;
747     return true;
748   }
749
750   // Run this initialization only on first startup.
751   if (decoder_state_ == kInitialized) {
752     DVLOG(3) << "DecodeBufferInitial(): running initialization";
753     // Success! Setup our parameters.
754     if (!CreateBuffersForFormat(format))
755       return false;
756
757     // We expect to process the initial buffer once during stream init to
758     // configure stream parameters, but will not consume the steam data on that
759     // iteration.  Subsequent iterations (including after reset) do not require
760     // the stream init step.
761     *endpos = 0;
762   } else {
763     *endpos = size;
764   }
765
766   decoder_state_ = kDecoding;
767   ScheduleDecodeBufferTaskIfNeeded();
768   return true;
769 }
770
771 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
772     const void* data, size_t size) {
773   DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
774   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
775   DCHECK_EQ(decoder_state_, kDecoding);
776
777   // Both of these calls will set kError state if they fail.
778   // Only flush the frame if it's complete.
779   return (AppendToInputFrame(data, size) &&
780           (decoder_partial_frame_pending_ || FlushInputFrame()));
781 }
782
783 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
784     const void* data, size_t size) {
785   DVLOG(3) << "AppendToInputFrame()";
786   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
787   DCHECK_NE(decoder_state_, kUninitialized);
788   DCHECK_NE(decoder_state_, kResetting);
789   DCHECK_NE(decoder_state_, kError);
790   // This routine can handle data == NULL and size == 0, which occurs when
791   // we queue an empty buffer for the purposes of flushing the pipe.
792
793   // Flush if we're too big
794   if (decoder_current_input_buffer_ != -1) {
795     InputRecord& input_record =
796         input_buffer_map_[decoder_current_input_buffer_];
797     if (input_record.bytes_used + size > input_record.length) {
798       if (!FlushInputFrame())
799         return false;
800       decoder_current_input_buffer_ = -1;
801     }
802   }
803
804   // Try to get an available input buffer
805   if (decoder_current_input_buffer_ == -1) {
806     if (free_input_buffers_.empty()) {
807       // See if we can get more free buffers from HW
808       Dequeue();
809       if (free_input_buffers_.empty()) {
810         // Nope!
811         DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
812         return false;
813       }
814     }
815     decoder_current_input_buffer_ = free_input_buffers_.back();
816     free_input_buffers_.pop_back();
817     InputRecord& input_record =
818         input_buffer_map_[decoder_current_input_buffer_];
819     DCHECK_EQ(input_record.bytes_used, 0);
820     DCHECK_EQ(input_record.input_id, -1);
821     DCHECK(decoder_current_bitstream_buffer_ != NULL);
822     input_record.input_id = decoder_current_bitstream_buffer_->input_id;
823   }
824
825   DCHECK(data != NULL || size == 0);
826   if (size == 0) {
827     // If we asked for an empty buffer, return now.  We return only after
828     // getting the next input buffer, since we might actually want an empty
829     // input buffer for flushing purposes.
830     return true;
831   }
832
833   // Copy in to the buffer.
834   InputRecord& input_record =
835       input_buffer_map_[decoder_current_input_buffer_];
836   if (size > input_record.length - input_record.bytes_used) {
837     LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
838     NOTIFY_ERROR(UNREADABLE_INPUT);
839     return false;
840   }
841   memcpy(
842       reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
843       data,
844       size);
845   input_record.bytes_used += size;
846
847   return true;
848 }
849
850 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
851   DVLOG(3) << "FlushInputFrame()";
852   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
853   DCHECK_NE(decoder_state_, kUninitialized);
854   DCHECK_NE(decoder_state_, kResetting);
855   DCHECK_NE(decoder_state_, kError);
856
857   if (decoder_current_input_buffer_ == -1)
858     return true;
859
860   InputRecord& input_record =
861       input_buffer_map_[decoder_current_input_buffer_];
862   DCHECK_NE(input_record.input_id, -1);
863   DCHECK(input_record.input_id != kFlushBufferId ||
864          input_record.bytes_used == 0);
865   // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
866   //   got from the client.  We can skip it if it is empty.
867   // * if input_id < 0 (should be kFlushBufferId in this case), this input
868   //   buffer was prompted by a flush buffer, and should be queued even when
869   //   empty.
870   if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
871     input_record.input_id = -1;
872     free_input_buffers_.push_back(decoder_current_input_buffer_);
873     decoder_current_input_buffer_ = -1;
874     return true;
875   }
876
877   // Queue it.
878   input_ready_queue_.push(decoder_current_input_buffer_);
879   decoder_current_input_buffer_ = -1;
880   DVLOG(3) << "FlushInputFrame(): submitting input_id="
881            << input_record.input_id;
882   // Enqueue once since there's new available input for it.
883   Enqueue();
884
885   return (decoder_state_ != kError);
886 }
887
888 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
889   DVLOG(3) << "ServiceDeviceTask()";
890   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
891   DCHECK_NE(decoder_state_, kUninitialized);
892   TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
893
894   if (decoder_state_ == kResetting) {
895     DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
896     return;
897   } else if (decoder_state_ == kError) {
898     DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
899     return;
900   } else if (decoder_state_ == kChangingResolution) {
901     DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
902     return;
903   }
904
905   if (event_pending)
906     DequeueEvents();
907   Dequeue();
908   Enqueue();
909
910   // Clear the interrupt fd.
911   if (!device_->ClearDevicePollInterrupt()) {
912     NOTIFY_ERROR(PLATFORM_FAILURE);
913     return;
914   }
915
916   bool poll_device = false;
917   // Add fd, if we should poll on it.
918   // Can be polled as soon as either input or output buffers are queued.
919   if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
920     poll_device = true;
921
922   // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
923   // so either:
924   // * device_poll_thread_ is running normally
925   // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
926   //   shut it down, in which case we're either in kResetting or kError states
927   //   respectively, and we should have early-outed already.
928   DCHECK(device_poll_thread_.message_loop());
929   // Queue the DevicePollTask() now.
930   device_poll_thread_.message_loop()->PostTask(
931       FROM_HERE,
932       base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
933                  base::Unretained(this),
934                  poll_device));
935
936   DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
937            << decoder_input_queue_.size() << "->"
938            << input_ready_queue_.size() << "] => DEVICE["
939            << free_input_buffers_.size() << "+"
940            << input_buffer_queued_count_ << "/"
941            << input_buffer_map_.size() << "->"
942            << free_output_buffers_.size() << "+"
943            << output_buffer_queued_count_ << "/"
944            << output_buffer_map_.size() << "] => VDA["
945            << decoder_frames_at_client_ << "]";
946
947   ScheduleDecodeBufferTaskIfNeeded();
948   StartResolutionChangeIfNeeded();
949 }
950
951 void V4L2VideoDecodeAccelerator::Enqueue() {
952   DVLOG(3) << "Enqueue()";
953   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
954   DCHECK_NE(decoder_state_, kUninitialized);
955   TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
956
957   // Drain the pipe of completed decode buffers.
958   const int old_inputs_queued = input_buffer_queued_count_;
959   while (!input_ready_queue_.empty()) {
960     if (!EnqueueInputRecord())
961       return;
962   }
963   if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
964     // We just started up a previously empty queue.
965     // Queue state changed; signal interrupt.
966     if (!device_->SetDevicePollInterrupt()) {
967       DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
968       NOTIFY_ERROR(PLATFORM_FAILURE);
969       return;
970     }
971     // Start VIDIOC_STREAMON if we haven't yet.
972     if (!input_streamon_) {
973       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
974       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
975       input_streamon_ = true;
976     }
977   }
978
979   // Enqueue all the outputs we can.
980   const int old_outputs_queued = output_buffer_queued_count_;
981   while (!free_output_buffers_.empty()) {
982     if (!EnqueueOutputRecord())
983       return;
984   }
985   if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
986     // We just started up a previously empty queue.
987     // Queue state changed; signal interrupt.
988     if (!device_->SetDevicePollInterrupt()) {
989       DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
990       NOTIFY_ERROR(PLATFORM_FAILURE);
991       return;
992     }
993     // Start VIDIOC_STREAMON if we haven't yet.
994     if (!output_streamon_) {
995       __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
996       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
997       output_streamon_ = true;
998     }
999   }
1000 }
1001
1002 void V4L2VideoDecodeAccelerator::DequeueEvents() {
1003   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1004   DCHECK_NE(decoder_state_, kUninitialized);
1005   DVLOG(3) << "DequeueEvents()";
1006
1007   struct v4l2_event ev;
1008   memset(&ev, 0, sizeof(ev));
1009
1010   while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1011     if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1012       DVLOG(3) << "DequeueEvents(): got resolution change event.";
1013       DCHECK(!resolution_change_pending_);
1014       resolution_change_pending_ = IsResolutionChangeNecessary();
1015     } else {
1016       DLOG(FATAL) << "DequeueEvents(): got an event (" << ev.type
1017                   << ") we haven't subscribed to.";
1018     }
1019   }
1020 }
1021
1022 void V4L2VideoDecodeAccelerator::Dequeue() {
1023   DVLOG(3) << "Dequeue()";
1024   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1025   DCHECK_NE(decoder_state_, kUninitialized);
1026   TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1027
1028   // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1029   // list.
1030   while (input_buffer_queued_count_ > 0) {
1031     DCHECK(input_streamon_);
1032     struct v4l2_buffer dqbuf;
1033     struct v4l2_plane planes[1];
1034     memset(&dqbuf, 0, sizeof(dqbuf));
1035     memset(planes, 0, sizeof(planes));
1036     dqbuf.type   = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1037     dqbuf.memory = V4L2_MEMORY_MMAP;
1038     dqbuf.m.planes = planes;
1039     dqbuf.length = 1;
1040     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1041       if (errno == EAGAIN) {
1042         // EAGAIN if we're just out of buffers to dequeue.
1043         break;
1044       }
1045       DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1046       NOTIFY_ERROR(PLATFORM_FAILURE);
1047       return;
1048     }
1049     InputRecord& input_record = input_buffer_map_[dqbuf.index];
1050     DCHECK(input_record.at_device);
1051     free_input_buffers_.push_back(dqbuf.index);
1052     input_record.at_device = false;
1053     input_record.bytes_used = 0;
1054     input_record.input_id = -1;
1055     input_buffer_queued_count_--;
1056   }
1057
1058   // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1059   // completed queue.
1060   while (output_buffer_queued_count_ > 0) {
1061     DCHECK(output_streamon_);
1062     struct v4l2_buffer dqbuf;
1063     scoped_ptr<struct v4l2_plane[]> planes(
1064         new v4l2_plane[output_planes_count_]);
1065     memset(&dqbuf, 0, sizeof(dqbuf));
1066     memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1067     dqbuf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1068     dqbuf.memory = V4L2_MEMORY_MMAP;
1069     dqbuf.m.planes = planes.get();
1070     dqbuf.length = output_planes_count_;
1071     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1072       if (errno == EAGAIN) {
1073         // EAGAIN if we're just out of buffers to dequeue.
1074         break;
1075       }
1076       DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1077       NOTIFY_ERROR(PLATFORM_FAILURE);
1078       return;
1079     }
1080     OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1081     DCHECK(output_record.at_device);
1082     DCHECK(!output_record.at_client);
1083     DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1084     DCHECK_NE(output_record.picture_id, -1);
1085     output_record.at_device = false;
1086     if (dqbuf.m.planes[0].bytesused + dqbuf.m.planes[1].bytesused == 0) {
1087       // This is an empty output buffer returned as part of a flush.
1088       free_output_buffers_.push(dqbuf.index);
1089     } else {
1090       DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1091       output_record.at_client = true;
1092       DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1093                << " as picture_id=" << output_record.picture_id;
1094       const media::Picture& picture =
1095           media::Picture(output_record.picture_id, dqbuf.timestamp.tv_sec);
1096       pending_picture_ready_.push(
1097           PictureRecord(output_record.cleared, picture));
1098       SendPictureReady();
1099       output_record.cleared = true;
1100       decoder_frames_at_client_++;
1101     }
1102     output_buffer_queued_count_--;
1103   }
1104
1105   NotifyFlushDoneIfNeeded();
1106 }
1107
1108 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1109   DVLOG(3) << "EnqueueInputRecord()";
1110   DCHECK(!input_ready_queue_.empty());
1111
1112   // Enqueue an input (VIDEO_OUTPUT) buffer.
1113   const int buffer = input_ready_queue_.front();
1114   InputRecord& input_record = input_buffer_map_[buffer];
1115   DCHECK(!input_record.at_device);
1116   struct v4l2_buffer qbuf;
1117   struct v4l2_plane qbuf_plane;
1118   memset(&qbuf, 0, sizeof(qbuf));
1119   memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1120   qbuf.index                 = buffer;
1121   qbuf.type                  = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1122   qbuf.timestamp.tv_sec      = input_record.input_id;
1123   qbuf.memory                = V4L2_MEMORY_MMAP;
1124   qbuf.m.planes              = &qbuf_plane;
1125   qbuf.m.planes[0].bytesused = input_record.bytes_used;
1126   qbuf.length                = 1;
1127   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1128   input_ready_queue_.pop();
1129   input_record.at_device = true;
1130   input_buffer_queued_count_++;
1131   DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1132            << input_record.input_id << " size="  << input_record.bytes_used;
1133   return true;
1134 }
1135
1136 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1137   DVLOG(3) << "EnqueueOutputRecord()";
1138   DCHECK(!free_output_buffers_.empty());
1139
1140   // Enqueue an output (VIDEO_CAPTURE) buffer.
1141   const int buffer = free_output_buffers_.front();
1142   OutputRecord& output_record = output_buffer_map_[buffer];
1143   DCHECK(!output_record.at_device);
1144   DCHECK(!output_record.at_client);
1145   DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1146   DCHECK_NE(output_record.picture_id, -1);
1147   if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1148     TRACE_EVENT0("Video Decoder",
1149                  "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1150     // If we have to wait for completion, wait.  Note that
1151     // free_output_buffers_ is a FIFO queue, so we always wait on the
1152     // buffer that has been in the queue the longest.
1153     if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1154                              EGL_FOREVER_KHR) == EGL_FALSE) {
1155       // This will cause tearing, but is safe otherwise.
1156       DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1157     }
1158     if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1159       DLOG(FATAL) << __func__ << " eglDestroySyncKHR failed!";
1160       NOTIFY_ERROR(PLATFORM_FAILURE);
1161       return false;
1162     }
1163     output_record.egl_sync = EGL_NO_SYNC_KHR;
1164   }
1165   struct v4l2_buffer qbuf;
1166   scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1167       new v4l2_plane[output_planes_count_]);
1168   memset(&qbuf, 0, sizeof(qbuf));
1169   memset(
1170       qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1171   qbuf.index    = buffer;
1172   qbuf.type     = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1173   qbuf.memory   = V4L2_MEMORY_MMAP;
1174   qbuf.m.planes = qbuf_planes.get();
1175   qbuf.length = output_planes_count_;
1176   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1177   free_output_buffers_.pop();
1178   output_record.at_device = true;
1179   output_buffer_queued_count_++;
1180   return true;
1181 }
1182
1183 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1184     int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1185   DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1186            << picture_buffer_id;
1187   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1188   TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1189
1190   // We run ReusePictureBufferTask even if we're in kResetting.
1191   if (decoder_state_ == kError) {
1192     DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1193     return;
1194   }
1195
1196   if (decoder_state_ == kChangingResolution) {
1197     DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1198     return;
1199   }
1200
1201   size_t index;
1202   for (index = 0; index < output_buffer_map_.size(); ++index)
1203     if (output_buffer_map_[index].picture_id == picture_buffer_id)
1204       break;
1205
1206   if (index >= output_buffer_map_.size()) {
1207     // It's possible that we've already posted a DismissPictureBuffer for this
1208     // picture, but it has not yet executed when this ReusePictureBuffer was
1209     // posted to us by the client. In that case just ignore this (we've already
1210     // dismissed it and accounted for that) and let the sync object get
1211     // destroyed.
1212     DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1213              << picture_buffer_id << " not in use (anymore?).";
1214     return;
1215   }
1216
1217   OutputRecord& output_record = output_buffer_map_[index];
1218   if (output_record.at_device || !output_record.at_client) {
1219     DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1220     NOTIFY_ERROR(INVALID_ARGUMENT);
1221     return;
1222   }
1223
1224   DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1225   DCHECK(!output_record.at_device);
1226   output_record.at_client = false;
1227   output_record.egl_sync = egl_sync_ref->egl_sync;
1228   free_output_buffers_.push(index);
1229   decoder_frames_at_client_--;
1230   // Take ownership of the EGLSync.
1231   egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1232   // We got a buffer back, so enqueue it back.
1233   Enqueue();
1234 }
1235
1236 void V4L2VideoDecodeAccelerator::FlushTask() {
1237   DVLOG(3) << "FlushTask()";
1238   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1239   TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1240
1241   // Flush outstanding buffers.
1242   if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1243     // There's nothing in the pipe, so return done immediately.
1244     DVLOG(3) << "FlushTask(): returning flush";
1245     child_message_loop_proxy_->PostTask(
1246         FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
1247     return;
1248   } else if (decoder_state_ == kError) {
1249     DVLOG(2) << "FlushTask(): early out: kError state";
1250     return;
1251   }
1252
1253   // We don't support stacked flushing.
1254   DCHECK(!decoder_flushing_);
1255
1256   // Queue up an empty buffer -- this triggers the flush.
1257   decoder_input_queue_.push(
1258       linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1259           io_client_, io_message_loop_proxy_, NULL, 0, kFlushBufferId)));
1260   decoder_flushing_ = true;
1261   SendPictureReady();  // Send all pending PictureReady.
1262
1263   ScheduleDecodeBufferTaskIfNeeded();
1264 }
1265
1266 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1267   if (!decoder_flushing_)
1268     return;
1269
1270   // Pipeline is empty when:
1271   // * Decoder input queue is empty of non-delayed buffers.
1272   // * There is no currently filling input buffer.
1273   // * Input holding queue is empty.
1274   // * All input (VIDEO_OUTPUT) buffers are returned.
1275   if (!decoder_input_queue_.empty()) {
1276     if (decoder_input_queue_.front()->input_id !=
1277         decoder_delay_bitstream_buffer_id_)
1278       return;
1279   }
1280   if (decoder_current_input_buffer_ != -1)
1281     return;
1282   if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1283     return;
1284
1285   // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1286   // sequence after flush to continue, even if we are not resetting. This would
1287   // make sense, because we don't really want to resume from a non-resume point
1288   // (e.g. not from an IDR) if we are flushed.
1289   // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1290   // could argue either way, or even say that Flush() is not needed/harmful when
1291   // transitioning to next chunk.
1292   // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1293   // when doing MSE. This should be harmless otherwise.
1294   if (!StopDevicePoll(false))
1295     return;
1296
1297   if (!StartDevicePoll())
1298     return;
1299
1300   decoder_delay_bitstream_buffer_id_ = -1;
1301   decoder_flushing_ = false;
1302   DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1303   child_message_loop_proxy_->PostTask(
1304       FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
1305
1306   // While we were flushing, we early-outed DecodeBufferTask()s.
1307   ScheduleDecodeBufferTaskIfNeeded();
1308 }
1309
1310 void V4L2VideoDecodeAccelerator::ResetTask() {
1311   DVLOG(3) << "ResetTask()";
1312   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1313   TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1314
1315   if (decoder_state_ == kError) {
1316     DVLOG(2) << "ResetTask(): early out: kError state";
1317     return;
1318   }
1319
1320   // If we are in the middle of switching resolutions, postpone reset until
1321   // it's done. We don't have to worry about timing of this wrt to decoding,
1322   // because input pipe is already stopped if we are changing resolution.
1323   // We will come back here after we are done with the resolution change.
1324   DCHECK(!resolution_change_reset_pending_);
1325   if (resolution_change_pending_ || decoder_state_ == kChangingResolution) {
1326     resolution_change_reset_pending_ = true;
1327     return;
1328   }
1329
1330   // We stop streaming and clear buffer tracking info (not preserving inputs).
1331   // StopDevicePoll() unconditionally does _not_ destroy buffers, however.
1332   if (!StopDevicePoll(false))
1333     return;
1334
1335   decoder_current_bitstream_buffer_.reset();
1336   while (!decoder_input_queue_.empty())
1337     decoder_input_queue_.pop();
1338
1339   decoder_current_input_buffer_ = -1;
1340
1341   // If we were flushing, we'll never return any more BitstreamBuffers or
1342   // PictureBuffers; they have all been dropped and returned by now.
1343   NotifyFlushDoneIfNeeded();
1344
1345   // Mark that we're resetting, then enqueue a ResetDoneTask().  All intervening
1346   // jobs will early-out in the kResetting state.
1347   decoder_state_ = kResetting;
1348   SendPictureReady();  // Send all pending PictureReady.
1349   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1350       &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1351 }
1352
1353 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1354   DVLOG(3) << "ResetDoneTask()";
1355   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1356   TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1357
1358   if (decoder_state_ == kError) {
1359     DVLOG(2) << "ResetDoneTask(): early out: kError state";
1360     return;
1361   }
1362
1363   if (!StartDevicePoll())
1364     return;
1365
1366   // We might have received a resolution change event while we were waiting
1367   // for the reset to finish. The codec will not post another event if the
1368   // resolution after reset remains the same as the one to which were just
1369   // about to switch, so preserve the event across reset so we can address
1370   // it after resuming.
1371
1372   // Reset format-specific bits.
1373   if (video_profile_ >= media::H264PROFILE_MIN &&
1374       video_profile_ <= media::H264PROFILE_MAX) {
1375     decoder_h264_parser_.reset(new media::H264Parser());
1376   }
1377
1378   // Jobs drained, we're finished resetting.
1379   DCHECK_EQ(decoder_state_, kResetting);
1380   if (output_buffer_map_.empty()) {
1381     // We must have gotten Reset() before we had a chance to request buffers
1382     // from the client.
1383     decoder_state_ = kInitialized;
1384   } else {
1385     decoder_state_ = kAfterReset;
1386   }
1387
1388   decoder_partial_frame_pending_ = false;
1389   decoder_delay_bitstream_buffer_id_ = -1;
1390   child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1391       &Client::NotifyResetDone, client_));
1392
1393   // While we were resetting, we early-outed DecodeBufferTask()s.
1394   ScheduleDecodeBufferTaskIfNeeded();
1395 }
1396
1397 void V4L2VideoDecodeAccelerator::DestroyTask() {
1398   DVLOG(3) << "DestroyTask()";
1399   TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1400
1401   // DestroyTask() should run regardless of decoder_state_.
1402
1403   // Stop streaming and the device_poll_thread_.
1404   StopDevicePoll(false);
1405
1406   decoder_current_bitstream_buffer_.reset();
1407   decoder_current_input_buffer_ = -1;
1408   decoder_decode_buffer_tasks_scheduled_ = 0;
1409   decoder_frames_at_client_ = 0;
1410   while (!decoder_input_queue_.empty())
1411     decoder_input_queue_.pop();
1412   decoder_flushing_ = false;
1413
1414   // Set our state to kError.  Just in case.
1415   decoder_state_ = kError;
1416 }
1417
1418 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1419   DVLOG(3) << "StartDevicePoll()";
1420   DCHECK(!device_poll_thread_.IsRunning());
1421   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1422
1423   // Start up the device poll thread and schedule its first DevicePollTask().
1424   if (!device_poll_thread_.Start()) {
1425     DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1426     NOTIFY_ERROR(PLATFORM_FAILURE);
1427     return false;
1428   }
1429   device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1430       &V4L2VideoDecodeAccelerator::DevicePollTask,
1431       base::Unretained(this),
1432       0));
1433
1434   return true;
1435 }
1436
1437 bool V4L2VideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
1438   DVLOG(3) << "StopDevicePoll()";
1439   if (decoder_thread_.IsRunning())
1440     DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1441
1442   // Signal the DevicePollTask() to stop, and stop the device poll thread.
1443   if (!device_->SetDevicePollInterrupt()) {
1444     DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1445     NOTIFY_ERROR(PLATFORM_FAILURE);
1446     return false;
1447   }
1448   device_poll_thread_.Stop();
1449   // Clear the interrupt now, to be sure.
1450   if (!device_->ClearDevicePollInterrupt()) {
1451     NOTIFY_ERROR(PLATFORM_FAILURE);
1452     return false;
1453   }
1454
1455   // Stop streaming.
1456   if (!keep_input_state) {
1457     if (input_streamon_) {
1458       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1459       IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1460     }
1461     input_streamon_ = false;
1462   }
1463   if (output_streamon_) {
1464     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1465     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1466   }
1467   output_streamon_ = false;
1468
1469   // Reset all our accounting info.
1470   if (!keep_input_state) {
1471     while (!input_ready_queue_.empty())
1472       input_ready_queue_.pop();
1473     free_input_buffers_.clear();
1474     for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1475       free_input_buffers_.push_back(i);
1476       input_buffer_map_[i].at_device = false;
1477       input_buffer_map_[i].bytes_used = 0;
1478       input_buffer_map_[i].input_id = -1;
1479     }
1480     input_buffer_queued_count_ = 0;
1481   }
1482
1483   while (!free_output_buffers_.empty())
1484     free_output_buffers_.pop();
1485
1486   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1487     OutputRecord& output_record = output_buffer_map_[i];
1488     DCHECK(!(output_record.at_client && output_record.at_device));
1489
1490     // After streamoff, the device drops ownership of all buffers, even if
1491     // we don't dequeue them explicitly.
1492     output_buffer_map_[i].at_device = false;
1493     // Some of them may still be owned by the client however.
1494     // Reuse only those that aren't.
1495     if (!output_record.at_client) {
1496       DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1497       free_output_buffers_.push(i);
1498     }
1499   }
1500   output_buffer_queued_count_ = 0;
1501
1502   DVLOG(3) << "StopDevicePoll(): device poll stopped";
1503   return true;
1504 }
1505
1506 void V4L2VideoDecodeAccelerator::StartResolutionChangeIfNeeded() {
1507   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1508   DCHECK_NE(decoder_state_, kUninitialized);
1509   DCHECK_NE(decoder_state_, kResetting);
1510
1511   if (!resolution_change_pending_)
1512     return;
1513
1514   DVLOG(3) << "No more work, initiate resolution change";
1515
1516   // Keep input queue.
1517   if (!StopDevicePoll(true))
1518     return;
1519
1520   decoder_state_ = kChangingResolution;
1521   DCHECK(resolution_change_pending_);
1522   resolution_change_pending_ = false;
1523
1524   // Post a task to clean up buffers on child thread. This will also ensure
1525   // that we won't accept ReusePictureBuffer() anymore after that.
1526   child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1527       &V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1528       weak_this_));
1529 }
1530
1531 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1532   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1533   DCHECK_EQ(decoder_state_, kChangingResolution);
1534   DVLOG(3) << "FinishResolutionChange()";
1535
1536   if (decoder_state_ == kError) {
1537     DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1538     return;
1539   }
1540
1541   struct v4l2_format format;
1542   bool again;
1543   bool ret = GetFormatInfo(&format, &again);
1544   if (!ret || again) {
1545     DVLOG(3) << "Couldn't get format information after resolution change";
1546     NOTIFY_ERROR(PLATFORM_FAILURE);
1547     return;
1548   }
1549
1550   if (!CreateBuffersForFormat(format)) {
1551     DVLOG(3) << "Couldn't reallocate buffers after resolution change";
1552     NOTIFY_ERROR(PLATFORM_FAILURE);
1553     return;
1554   }
1555
1556   decoder_state_ = kDecoding;
1557
1558   if (resolution_change_reset_pending_) {
1559     resolution_change_reset_pending_ = false;
1560     ResetTask();
1561     return;
1562   }
1563
1564   if (!StartDevicePoll())
1565     return;
1566
1567   Enqueue();
1568   ScheduleDecodeBufferTaskIfNeeded();
1569 }
1570
1571 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1572   DVLOG(3) << "DevicePollTask()";
1573   DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1574   TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1575
1576   bool event_pending = false;
1577
1578   if (!device_->Poll(poll_device, &event_pending)) {
1579     NOTIFY_ERROR(PLATFORM_FAILURE);
1580     return;
1581   }
1582
1583   // All processing should happen on ServiceDeviceTask(), since we shouldn't
1584   // touch decoder state from this thread.
1585   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1586       &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1587       base::Unretained(this), event_pending));
1588 }
1589
1590 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1591   DVLOG(2) << "NotifyError()";
1592
1593   if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1594     child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1595         &V4L2VideoDecodeAccelerator::NotifyError, weak_this_, error));
1596     return;
1597   }
1598
1599   if (client_) {
1600     client_->NotifyError(error);
1601     client_ptr_factory_.reset();
1602   }
1603 }
1604
1605 void V4L2VideoDecodeAccelerator::SetDecoderState(State state) {
1606   DVLOG(3) << "SetDecoderState(): state=" << state;
1607
1608   // We can touch decoder_state_ only if this is the decoder thread or the
1609   // decoder thread isn't running.
1610   if (decoder_thread_.message_loop() != NULL &&
1611       decoder_thread_.message_loop() != base::MessageLoop::current()) {
1612     decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1613         &V4L2VideoDecodeAccelerator::SetDecoderState,
1614         base::Unretained(this), state));
1615   } else {
1616     decoder_state_ = state;
1617   }
1618 }
1619
1620 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1621                                                  bool* again) {
1622   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1623
1624   *again = false;
1625   memset(format, 0, sizeof(*format));
1626   format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1627   if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1628     if (errno == EINVAL) {
1629       // EINVAL means we haven't seen sufficient stream to decode the format.
1630       *again = true;
1631       return true;
1632     } else {
1633       DPLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1634       NOTIFY_ERROR(PLATFORM_FAILURE);
1635       return false;
1636     }
1637   }
1638
1639   return true;
1640 }
1641
1642 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1643     const struct v4l2_format& format) {
1644   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1645   output_planes_count_ = format.fmt.pix_mp.num_planes;
1646   frame_buffer_size_.SetSize(
1647       format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1648   DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1649            << frame_buffer_size_.ToString();
1650
1651   if (!CreateOutputBuffers())
1652     return false;
1653
1654   return true;
1655 }
1656
1657 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1658   DVLOG(3) << "CreateInputBuffers()";
1659   // We always run this as we prepare to initialize.
1660   DCHECK_EQ(decoder_state_, kUninitialized);
1661   DCHECK(!input_streamon_);
1662   DCHECK(input_buffer_map_.empty());
1663
1664   __u32 pixelformat = V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_);
1665   if (!pixelformat) {
1666     NOTREACHED();
1667     return false;
1668   }
1669
1670   struct v4l2_format format;
1671   memset(&format, 0, sizeof(format));
1672   format.type                              = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1673   format.fmt.pix_mp.pixelformat            = pixelformat;
1674   if (CommandLine::ForCurrentProcess()->HasSwitch(
1675           switches::kIgnoreResolutionLimitsForAcceleratedVideoDecode))
1676     format.fmt.pix_mp.plane_fmt[0].sizeimage = kInputBufferMaxSizeFor4k;
1677   else
1678     format.fmt.pix_mp.plane_fmt[0].sizeimage = kInputBufferMaxSizeFor1080p;
1679   format.fmt.pix_mp.num_planes             = 1;
1680   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1681
1682   struct v4l2_requestbuffers reqbufs;
1683   memset(&reqbufs, 0, sizeof(reqbufs));
1684   reqbufs.count  = kInputBufferCount;
1685   reqbufs.type   = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1686   reqbufs.memory = V4L2_MEMORY_MMAP;
1687   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1688   input_buffer_map_.resize(reqbufs.count);
1689   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1690     free_input_buffers_.push_back(i);
1691
1692     // Query for the MEMORY_MMAP pointer.
1693     struct v4l2_plane planes[1];
1694     struct v4l2_buffer buffer;
1695     memset(&buffer, 0, sizeof(buffer));
1696     memset(planes, 0, sizeof(planes));
1697     buffer.index    = i;
1698     buffer.type     = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1699     buffer.memory   = V4L2_MEMORY_MMAP;
1700     buffer.m.planes = planes;
1701     buffer.length   = 1;
1702     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1703     void* address = device_->Mmap(NULL,
1704                                   buffer.m.planes[0].length,
1705                                   PROT_READ | PROT_WRITE,
1706                                   MAP_SHARED,
1707                                   buffer.m.planes[0].m.mem_offset);
1708     if (address == MAP_FAILED) {
1709       DPLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1710       return false;
1711     }
1712     input_buffer_map_[i].address = address;
1713     input_buffer_map_[i].length = buffer.m.planes[0].length;
1714   }
1715
1716   return true;
1717 }
1718
1719 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1720   DVLOG(3) << "CreateOutputBuffers()";
1721   DCHECK(decoder_state_ == kInitialized ||
1722          decoder_state_ == kChangingResolution);
1723   DCHECK(!output_streamon_);
1724   DCHECK(output_buffer_map_.empty());
1725
1726   // Number of output buffers we need.
1727   struct v4l2_control ctrl;
1728   memset(&ctrl, 0, sizeof(ctrl));
1729   ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1730   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1731   output_dpb_size_ = ctrl.value;
1732
1733   // Output format setup in Initialize().
1734
1735   // Allocate the output buffers.
1736   struct v4l2_requestbuffers reqbufs;
1737   memset(&reqbufs, 0, sizeof(reqbufs));
1738   reqbufs.count  = output_dpb_size_ + kDpbOutputBufferExtraCount;
1739   reqbufs.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1740   reqbufs.memory = V4L2_MEMORY_MMAP;
1741   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1742
1743   output_buffer_map_.resize(reqbufs.count);
1744
1745   DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1746            << "buffer_count=" << output_buffer_map_.size()
1747            << ", width=" << frame_buffer_size_.width()
1748            << ", height=" << frame_buffer_size_.height();
1749   child_message_loop_proxy_->PostTask(FROM_HERE,
1750                                       base::Bind(&Client::ProvidePictureBuffers,
1751                                                  client_,
1752                                                  output_buffer_map_.size(),
1753                                                  frame_buffer_size_,
1754                                                  device_->GetTextureTarget()));
1755
1756   // Wait for the client to call AssignPictureBuffers() on the Child thread.
1757   // We do this, because if we continue decoding without finishing buffer
1758   // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1759   // resulting in unnecessary complications and subtle bugs.
1760   // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1761   // in a sequence, and Decode(Input1) results in us getting here and exiting
1762   // without waiting, we might end up running Reset{,Done}Task() before
1763   // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1764   // to the free_output_buffers_ map twice. If we somehow marked buffers as
1765   // not ready, we'd need special handling for restarting the second Decode
1766   // task and delaying it anyway.
1767   // Waiting here is not very costly and makes reasoning about different
1768   // situations much simpler.
1769   pictures_assigned_.Wait();
1770
1771   Enqueue();
1772   return true;
1773 }
1774
1775 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1776   DVLOG(3) << "DestroyInputBuffers()";
1777   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1778   DCHECK(!input_streamon_);
1779
1780   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1781     if (input_buffer_map_[i].address != NULL) {
1782       device_->Munmap(input_buffer_map_[i].address,
1783                       input_buffer_map_[i].length);
1784     }
1785   }
1786
1787   struct v4l2_requestbuffers reqbufs;
1788   memset(&reqbufs, 0, sizeof(reqbufs));
1789   reqbufs.count = 0;
1790   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1791   reqbufs.memory = V4L2_MEMORY_MMAP;
1792   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1793
1794   input_buffer_map_.clear();
1795   free_input_buffers_.clear();
1796 }
1797
1798 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1799   DVLOG(3) << "DestroyOutputBuffers()";
1800   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1801   DCHECK(!output_streamon_);
1802   bool success = true;
1803
1804   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1805     OutputRecord& output_record = output_buffer_map_[i];
1806
1807     if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1808       if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1809           EGL_TRUE) {
1810         DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1811         success = false;
1812       }
1813     }
1814
1815     if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1816       if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1817         DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1818         success = false;
1819       }
1820     }
1821
1822     DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1823              << output_record.picture_id;
1824     child_message_loop_proxy_->PostTask(
1825         FROM_HERE,
1826         base::Bind(
1827             &Client::DismissPictureBuffer, client_, output_record.picture_id));
1828   }
1829
1830   struct v4l2_requestbuffers reqbufs;
1831   memset(&reqbufs, 0, sizeof(reqbufs));
1832   reqbufs.count = 0;
1833   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1834   reqbufs.memory = V4L2_MEMORY_MMAP;
1835   if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1836     DPLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1837     success = false;
1838   }
1839
1840   output_buffer_map_.clear();
1841   while (!free_output_buffers_.empty())
1842     free_output_buffers_.pop();
1843
1844   return success;
1845 }
1846
1847 void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1848   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1849   DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1850
1851   if (!DestroyOutputBuffers()) {
1852     DLOG(FATAL) << __func__ << " Failed destroying output buffers.";
1853     NOTIFY_ERROR(PLATFORM_FAILURE);
1854     return;
1855   }
1856
1857   // Finish resolution change on decoder thread.
1858   decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1859       &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1860       base::Unretained(this)));
1861 }
1862
1863 void V4L2VideoDecodeAccelerator::SendPictureReady() {
1864   DVLOG(3) << "SendPictureReady()";
1865   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1866   bool resetting_or_flushing =
1867       (decoder_state_ == kResetting || decoder_flushing_);
1868   while (pending_picture_ready_.size() > 0) {
1869     bool cleared = pending_picture_ready_.front().cleared;
1870     const media::Picture& picture = pending_picture_ready_.front().picture;
1871     if (cleared && picture_clearing_count_ == 0) {
1872       // This picture is cleared. Post it to IO thread to reduce latency. This
1873       // should be the case after all pictures are cleared at the beginning.
1874       io_message_loop_proxy_->PostTask(
1875           FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
1876       pending_picture_ready_.pop();
1877     } else if (!cleared || resetting_or_flushing) {
1878       DVLOG(3) << "SendPictureReady()"
1879                << ". cleared=" << pending_picture_ready_.front().cleared
1880                << ", decoder_state_=" << decoder_state_
1881                << ", decoder_flushing_=" << decoder_flushing_
1882                << ", picture_clearing_count_=" << picture_clearing_count_;
1883       // If the picture is not cleared, post it to the child thread because it
1884       // has to be cleared in the child thread. A picture only needs to be
1885       // cleared once. If the decoder is resetting or flushing, send all
1886       // pictures to ensure PictureReady arrive before reset or flush done.
1887       child_message_loop_proxy_->PostTaskAndReply(
1888           FROM_HERE,
1889           base::Bind(&Client::PictureReady, client_, picture),
1890           // Unretained is safe. If Client::PictureReady gets to run, |this| is
1891           // alive. Destroy() will wait the decode thread to finish.
1892           base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
1893                      base::Unretained(this)));
1894       picture_clearing_count_++;
1895       pending_picture_ready_.pop();
1896     } else {
1897       // This picture is cleared. But some pictures are about to be cleared on
1898       // the child thread. To preserve the order, do not send this until those
1899       // pictures are cleared.
1900       break;
1901     }
1902   }
1903 }
1904
1905 void V4L2VideoDecodeAccelerator::PictureCleared() {
1906   DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
1907   DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1908   DCHECK_GT(picture_clearing_count_, 0);
1909   picture_clearing_count_--;
1910   SendPictureReady();
1911 }
1912
1913 bool V4L2VideoDecodeAccelerator::IsResolutionChangeNecessary() {
1914   DVLOG(3) << "IsResolutionChangeNecessary() ";
1915
1916   struct v4l2_control ctrl;
1917   memset(&ctrl, 0, sizeof(ctrl));
1918   ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1919   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1920   if (ctrl.value != output_dpb_size_) {
1921     DVLOG(3)
1922         << "IsResolutionChangeNecessary(): Returning true since DPB mismatch ";
1923     return true;
1924   }
1925   struct v4l2_format format;
1926   bool again = false;
1927   bool ret = GetFormatInfo(&format, &again);
1928   if (!ret || again) {
1929     DVLOG(3) << "IsResolutionChangeNecessary(): GetFormatInfo() failed";
1930     return false;
1931   }
1932   gfx::Size new_size(base::checked_cast<int>(format.fmt.pix_mp.width),
1933                      base::checked_cast<int>(format.fmt.pix_mp.height));
1934   if (frame_buffer_size_ != new_size) {
1935     DVLOG(3) << "IsResolutionChangeNecessary(): Resolution change detected";
1936     return true;
1937   }
1938   return false;
1939 }
1940
1941 }  // namespace content