Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / content / common / gpu / media / v4l2_video_encode_accelerator.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <fcntl.h>
6 #include <linux/videodev2.h>
7 #include <poll.h>
8 #include <sys/eventfd.h>
9 #include <sys/ioctl.h>
10 #include <sys/mman.h>
11
12 #include "base/callback.h"
13 #include "base/command_line.h"
14 #include "base/debug/trace_event.h"
15 #include "base/message_loop/message_loop_proxy.h"
16 #include "base/numerics/safe_conversions.h"
17 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h"
18 #include "content/public/common/content_switches.h"
19 #include "media/base/bitstream_buffer.h"
20
21 #define NOTIFY_ERROR(x)                            \
22   do {                                             \
23     SetEncoderState(kError);                       \
24     DLOG(ERROR) << "calling NotifyError(): " << x; \
25     NotifyError(x);                                \
26   } while (0)
27
28 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value)              \
29   do {                                                             \
30     if (device_->Ioctl(type, arg) != 0) {                          \
31       DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
32       NOTIFY_ERROR(kPlatformFailureError);                         \
33       return value;                                                \
34     }                                                              \
35   } while (0)
36
37 #define IOCTL_OR_ERROR_RETURN(type, arg) \
38   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
39
40 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
41   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
42
43 #define IOCTL_OR_LOG_ERROR(type, arg)                              \
44   do {                                                             \
45     if (device_->Ioctl(type, arg) != 0)                            \
46       DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
47   } while (0)
48
49 namespace content {
50
51 struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
52   BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size)
53       : id(id), shm(shm.Pass()), size(size) {}
54   const int32 id;
55   const scoped_ptr<base::SharedMemory> shm;
56   const size_t size;
57 };
58
59 V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {
60 }
61
62 V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord()
63     : at_device(false), address(NULL), length(0) {
64 }
65
66 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator(
67     scoped_ptr<V4L2Device> device)
68     : child_message_loop_proxy_(base::MessageLoopProxy::current()),
69       output_buffer_byte_size_(0),
70       device_input_format_(media::VideoFrame::UNKNOWN),
71       input_planes_count_(0),
72       output_format_fourcc_(0),
73       encoder_state_(kUninitialized),
74       stream_header_size_(0),
75       device_(device.Pass()),
76       input_streamon_(false),
77       input_buffer_queued_count_(0),
78       input_memory_type_(V4L2_MEMORY_USERPTR),
79       output_streamon_(false),
80       output_buffer_queued_count_(0),
81       encoder_thread_("V4L2EncoderThread"),
82       device_poll_thread_("V4L2EncoderDevicePollThread"),
83       weak_this_ptr_factory_(this) {
84   weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
85 }
86
87 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() {
88   DCHECK(!encoder_thread_.IsRunning());
89   DCHECK(!device_poll_thread_.IsRunning());
90   DVLOG(4) << __func__;
91
92   DestroyInputBuffers();
93   DestroyOutputBuffers();
94 }
95
96 bool V4L2VideoEncodeAccelerator::Initialize(
97     media::VideoFrame::Format input_format,
98     const gfx::Size& input_visible_size,
99     media::VideoCodecProfile output_profile,
100     uint32 initial_bitrate,
101     Client* client) {
102   DVLOG(3) << __func__ << ": input_format="
103            << media::VideoFrame::FormatToString(input_format)
104            << ", input_visible_size=" << input_visible_size.ToString()
105            << ", output_profile=" << output_profile
106            << ", initial_bitrate=" << initial_bitrate;
107
108   visible_size_ = input_visible_size;
109
110   client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
111   client_ = client_ptr_factory_->GetWeakPtr();
112
113   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
114   DCHECK_EQ(encoder_state_, kUninitialized);
115
116   struct v4l2_capability caps;
117   memset(&caps, 0, sizeof(caps));
118   const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
119                               V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
120   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
121   if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
122     DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: "
123                    "caps check failed: 0x" << std::hex << caps.capabilities;
124     return false;
125   }
126
127   if (!SetFormats(input_format, output_profile)) {
128     DLOG(ERROR) << "Failed setting up formats";
129     return false;
130   }
131
132   if (input_format != device_input_format_) {
133     DVLOG(1) << "Input format not supported by the HW, will convert to "
134              << media::VideoFrame::FormatToString(device_input_format_);
135
136     scoped_ptr<V4L2Device> device =
137         V4L2Device::Create(V4L2Device::kImageProcessor);
138     image_processor_.reset(new V4L2ImageProcessor(device.Pass()));
139
140     // Convert from input_format to device_input_format_, keeping the size
141     // at visible_size_ and requiring the output buffers to be of at least
142     // input_allocated_size_.
143     if (!image_processor_->Initialize(
144             input_format,
145             device_input_format_,
146             visible_size_,
147             visible_size_,
148             input_allocated_size_,
149             base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError,
150                        weak_this_))) {
151       DLOG(ERROR) << "Failed initializing image processor";
152       return false;
153     }
154   }
155
156   if (!InitControls())
157     return false;
158
159   if (!CreateOutputBuffers())
160     return false;
161
162   if (!encoder_thread_.Start()) {
163     DLOG(ERROR) << "Initialize(): encoder thread failed to start";
164     return false;
165   }
166
167   RequestEncodingParametersChange(initial_bitrate, kInitialFramerate);
168
169   SetEncoderState(kInitialized);
170
171   child_message_loop_proxy_->PostTask(
172       FROM_HERE,
173       base::Bind(&Client::RequireBitstreamBuffers,
174                  client_,
175                  kInputBufferCount,
176                  image_processor_.get() ?
177                      image_processor_->input_allocated_size() :
178                      input_allocated_size_,
179                  output_buffer_byte_size_));
180   return true;
181 }
182
183 void V4L2VideoEncodeAccelerator::ImageProcessorError() {
184   DVLOG(1) << "Image processor error";
185   NOTIFY_ERROR(kPlatformFailureError);
186 }
187
188 void V4L2VideoEncodeAccelerator::Encode(
189     const scoped_refptr<media::VideoFrame>& frame,
190     bool force_keyframe) {
191   DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe;
192   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
193
194   if (image_processor_) {
195     image_processor_->Process(
196         frame,
197         base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed,
198                    weak_this_,
199                    force_keyframe));
200   } else {
201     encoder_thread_.message_loop()->PostTask(
202         FROM_HERE,
203         base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
204                    base::Unretained(this),
205                    frame,
206                    force_keyframe));
207   }
208 }
209
210 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer(
211     const media::BitstreamBuffer& buffer) {
212   DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id();
213   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
214
215   if (buffer.size() < output_buffer_byte_size_) {
216     NOTIFY_ERROR(kInvalidArgumentError);
217     return;
218   }
219
220   scoped_ptr<base::SharedMemory> shm(
221       new base::SharedMemory(buffer.handle(), false));
222   if (!shm->Map(buffer.size())) {
223     NOTIFY_ERROR(kPlatformFailureError);
224     return;
225   }
226
227   scoped_ptr<BitstreamBufferRef> buffer_ref(
228       new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size()));
229   encoder_thread_.message_loop()->PostTask(
230       FROM_HERE,
231       base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask,
232                  base::Unretained(this),
233                  base::Passed(&buffer_ref)));
234 }
235
236 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange(
237     uint32 bitrate,
238     uint32 framerate) {
239   DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate
240            << ", framerate=" << framerate;
241   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
242
243   encoder_thread_.message_loop()->PostTask(
244       FROM_HERE,
245       base::Bind(
246           &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask,
247           base::Unretained(this),
248           bitrate,
249           framerate));
250 }
251
252 void V4L2VideoEncodeAccelerator::Destroy() {
253   DVLOG(3) << "Destroy()";
254   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
255
256   // We're destroying; cancel all callbacks.
257   client_ptr_factory_.reset();
258   weak_this_ptr_factory_.InvalidateWeakPtrs();
259
260   if (image_processor_.get())
261     image_processor_.release()->Destroy();
262
263   // If the encoder thread is running, destroy using posted task.
264   if (encoder_thread_.IsRunning()) {
265     encoder_thread_.message_loop()->PostTask(
266         FROM_HERE,
267         base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask,
268                    base::Unretained(this)));
269     // DestroyTask() will put the encoder into kError state and cause all tasks
270     // to no-op.
271     encoder_thread_.Stop();
272   } else {
273     // Otherwise, call the destroy task directly.
274     DestroyTask();
275   }
276
277   // Set to kError state just in case.
278   SetEncoderState(kError);
279
280   delete this;
281 }
282
283 std::vector<media::VideoEncodeAccelerator::SupportedProfile>
284 V4L2VideoEncodeAccelerator::GetSupportedProfiles() {
285   std::vector<SupportedProfile> profiles;
286   SupportedProfile profile;
287
288   const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
289   if (cmd_line->HasSwitch(switches::kEnableWebRtcHWVp8Encoding)) {
290     profile.profile = media::VP8PROFILE_ANY;
291     profile.max_resolution.SetSize(1920, 1088);
292     profile.max_framerate_numerator = 30;
293     profile.max_framerate_denominator = 1;
294     profiles.push_back(profile);
295   }
296
297   profile.profile = media::H264PROFILE_MAIN;
298   profile.max_resolution.SetSize(1920, 1088);
299   profile.max_framerate_numerator = 30;
300   profile.max_framerate_denominator = 1;
301   profiles.push_back(profile);
302
303   return profiles;
304 }
305
306 void V4L2VideoEncodeAccelerator::FrameProcessed(
307     bool force_keyframe,
308     const scoped_refptr<media::VideoFrame>& frame) {
309   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
310   DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe;
311
312   encoder_thread_.message_loop()->PostTask(
313       FROM_HERE,
314       base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
315                  base::Unretained(this),
316                  frame,
317                  force_keyframe));
318 }
319
320 void V4L2VideoEncodeAccelerator::EncodeTask(
321     const scoped_refptr<media::VideoFrame>& frame,
322     bool force_keyframe) {
323   DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe;
324   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
325   DCHECK_NE(encoder_state_, kUninitialized);
326
327   if (encoder_state_ == kError) {
328     DVLOG(2) << "EncodeTask(): early out: kError state";
329     return;
330   }
331
332   encoder_input_queue_.push_back(frame);
333   Enqueue();
334
335   if (force_keyframe) {
336     // TODO(posciak): this presently makes for slightly imprecise encoding
337     // parameters updates.  To precisely align the parameter updates with the
338     // incoming input frame, we should queue the parameters together with the
339     // frame onto encoder_input_queue_ and apply them when the input is about
340     // to be queued to the codec.
341     struct v4l2_ext_control ctrls[1];
342     struct v4l2_ext_controls control;
343     memset(&ctrls, 0, sizeof(ctrls));
344     memset(&control, 0, sizeof(control));
345     ctrls[0].id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE;
346     ctrls[0].value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME;
347     control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
348     control.count = 1;
349     control.controls = ctrls;
350     IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control);
351   }
352 }
353
354 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask(
355     scoped_ptr<BitstreamBufferRef> buffer_ref) {
356   DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id;
357   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
358
359   encoder_output_queue_.push_back(
360       linked_ptr<BitstreamBufferRef>(buffer_ref.release()));
361   Enqueue();
362
363   if (encoder_state_ == kInitialized) {
364     // Finish setting up our OUTPUT queue.  See: Initialize().
365     // VIDIOC_REQBUFS on OUTPUT queue.
366     if (!CreateInputBuffers())
367       return;
368     if (!StartDevicePoll())
369       return;
370     encoder_state_ = kEncoding;
371   }
372 }
373
374 void V4L2VideoEncodeAccelerator::DestroyTask() {
375   DVLOG(3) << "DestroyTask()";
376
377   // DestroyTask() should run regardless of encoder_state_.
378
379   // Stop streaming and the device_poll_thread_.
380   StopDevicePoll();
381
382   // Set our state to kError, and early-out all tasks.
383   encoder_state_ = kError;
384 }
385
386 void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
387   DVLOG(3) << "ServiceDeviceTask()";
388   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
389   DCHECK_NE(encoder_state_, kUninitialized);
390   DCHECK_NE(encoder_state_, kInitialized);
391
392   if (encoder_state_ == kError) {
393     DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
394     return;
395   }
396
397   Dequeue();
398   Enqueue();
399
400   // Clear the interrupt fd.
401   if (!device_->ClearDevicePollInterrupt())
402     return;
403
404   // Device can be polled as soon as either input or output buffers are queued.
405   bool poll_device =
406       (input_buffer_queued_count_ + output_buffer_queued_count_ > 0);
407
408   // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
409   // so either:
410   // * device_poll_thread_ is running normally
411   // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down,
412   //   in which case we're in kError state, and we should have early-outed
413   //   already.
414   DCHECK(device_poll_thread_.message_loop());
415   // Queue the DevicePollTask() now.
416   device_poll_thread_.message_loop()->PostTask(
417       FROM_HERE,
418       base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
419                  base::Unretained(this),
420                  poll_device));
421
422   DVLOG(2) << __func__ << ": buffer counts: ENC["
423            << encoder_input_queue_.size() << "] => DEVICE["
424            << free_input_buffers_.size() << "+"
425            << input_buffer_queued_count_ << "/"
426            << input_buffer_map_.size() << "->"
427            << free_output_buffers_.size() << "+"
428            << output_buffer_queued_count_ << "/"
429            << output_buffer_map_.size() << "] => OUT["
430            << encoder_output_queue_.size() << "]";
431 }
432
433 void V4L2VideoEncodeAccelerator::Enqueue() {
434   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
435
436   DVLOG(3) << "Enqueue() "
437            << "free_input_buffers: " << free_input_buffers_.size()
438            << "input_queue: " << encoder_input_queue_.size();
439
440   // Enqueue all the inputs we can.
441   const int old_inputs_queued = input_buffer_queued_count_;
442   // while (!ready_input_buffers_.empty()) {
443   while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) {
444     if (!EnqueueInputRecord())
445       return;
446   }
447   if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
448     // We just started up a previously empty queue.
449     // Queue state changed; signal interrupt.
450     if (!device_->SetDevicePollInterrupt())
451       return;
452     // Start VIDIOC_STREAMON if we haven't yet.
453     if (!input_streamon_) {
454       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
455       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
456       input_streamon_ = true;
457     }
458   }
459
460   // Enqueue all the outputs we can.
461   const int old_outputs_queued = output_buffer_queued_count_;
462   while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) {
463     if (!EnqueueOutputRecord())
464       return;
465   }
466   if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
467     // We just started up a previously empty queue.
468     // Queue state changed; signal interrupt.
469     if (!device_->SetDevicePollInterrupt())
470       return;
471     // Start VIDIOC_STREAMON if we haven't yet.
472     if (!output_streamon_) {
473       __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
474       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
475       output_streamon_ = true;
476     }
477   }
478 }
479
480 void V4L2VideoEncodeAccelerator::Dequeue() {
481   DVLOG(3) << "Dequeue()";
482   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
483
484   // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
485   // list.
486   struct v4l2_buffer dqbuf;
487   struct v4l2_plane planes[VIDEO_MAX_PLANES];
488   while (input_buffer_queued_count_ > 0) {
489     DVLOG(4) << "inputs queued: " << input_buffer_queued_count_;
490     DCHECK(input_streamon_);
491     memset(&dqbuf, 0, sizeof(dqbuf));
492     memset(&planes, 0, sizeof(planes));
493     dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
494     dqbuf.memory = V4L2_MEMORY_MMAP;
495     dqbuf.m.planes = planes;
496     dqbuf.length = input_planes_count_;
497     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
498       if (errno == EAGAIN) {
499         // EAGAIN if we're just out of buffers to dequeue.
500         break;
501       }
502       DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
503       NOTIFY_ERROR(kPlatformFailureError);
504       return;
505     }
506     InputRecord& input_record = input_buffer_map_[dqbuf.index];
507     DCHECK(input_record.at_device);
508     input_record.at_device = false;
509
510     input_record.frame = NULL;
511     free_input_buffers_.push_back(dqbuf.index);
512     input_buffer_queued_count_--;
513   }
514
515   // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the
516   // free list.  Notify the client that an output buffer is complete.
517   while (output_buffer_queued_count_ > 0) {
518     DCHECK(output_streamon_);
519     memset(&dqbuf, 0, sizeof(dqbuf));
520     memset(planes, 0, sizeof(planes));
521     dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
522     dqbuf.memory = V4L2_MEMORY_MMAP;
523     dqbuf.m.planes = planes;
524     dqbuf.length = 1;
525     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
526       if (errno == EAGAIN) {
527         // EAGAIN if we're just out of buffers to dequeue.
528         break;
529       }
530       DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
531       NOTIFY_ERROR(kPlatformFailureError);
532       return;
533     }
534     const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0);
535     OutputRecord& output_record = output_buffer_map_[dqbuf.index];
536     DCHECK(output_record.at_device);
537     DCHECK(output_record.buffer_ref.get());
538
539     void* output_data = output_record.address;
540     size_t output_size = dqbuf.m.planes[0].bytesused;
541     // This shouldn't happen, but just in case. We should be able to recover
542     // after next keyframe after showing some corruption.
543     DCHECK_LE(output_size, output_buffer_byte_size_);
544     if (output_size > output_buffer_byte_size_)
545       output_size = output_buffer_byte_size_;
546     uint8* target_data =
547         reinterpret_cast<uint8*>(output_record.buffer_ref->shm->memory());
548     if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
549       if (stream_header_size_ == 0) {
550         // Assume that the first buffer dequeued is the stream header.
551         stream_header_size_ = output_size;
552         stream_header_.reset(new uint8[stream_header_size_]);
553         memcpy(stream_header_.get(), output_data, stream_header_size_);
554       }
555       if (key_frame &&
556           output_buffer_byte_size_ - stream_header_size_ >= output_size) {
557         // Insert stream header before every keyframe.
558         memcpy(target_data, stream_header_.get(), stream_header_size_);
559         memcpy(target_data + stream_header_size_, output_data, output_size);
560         output_size += stream_header_size_;
561       } else {
562         memcpy(target_data, output_data, output_size);
563       }
564     } else {
565       memcpy(target_data, output_data, output_size);
566     }
567
568     DVLOG(3) << "Dequeue(): returning "
569                 "bitstream_buffer_id=" << output_record.buffer_ref->id
570              << ", size=" << output_size << ", key_frame=" << key_frame;
571     child_message_loop_proxy_->PostTask(
572         FROM_HERE,
573         base::Bind(&Client::BitstreamBufferReady,
574                    client_,
575                    output_record.buffer_ref->id,
576                    output_size,
577                    key_frame));
578     output_record.at_device = false;
579     output_record.buffer_ref.reset();
580     free_output_buffers_.push_back(dqbuf.index);
581     output_buffer_queued_count_--;
582   }
583 }
584
585 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
586   DVLOG(3) << "EnqueueInputRecord()";
587   DCHECK(!free_input_buffers_.empty());
588   DCHECK(!encoder_input_queue_.empty());
589
590   // Enqueue an input (VIDEO_OUTPUT) buffer.
591   scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front();
592   const int index = free_input_buffers_.back();
593   InputRecord& input_record = input_buffer_map_[index];
594   DCHECK(!input_record.at_device);
595   struct v4l2_buffer qbuf;
596   struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
597   memset(&qbuf, 0, sizeof(qbuf));
598   memset(qbuf_planes, 0, sizeof(qbuf_planes));
599   qbuf.index = index;
600   qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
601   qbuf.m.planes = qbuf_planes;
602
603   DCHECK_EQ(device_input_format_, frame->format());
604   for (size_t i = 0; i < input_planes_count_; ++i) {
605     qbuf.m.planes[i].bytesused =
606         base::checked_cast<__u32>(media::VideoFrame::PlaneAllocationSize(
607             frame->format(), i, input_allocated_size_));
608
609     switch (input_memory_type_) {
610       case V4L2_MEMORY_USERPTR:
611         qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused;
612         qbuf.m.planes[i].m.userptr =
613             reinterpret_cast<unsigned long>(frame->data(i));
614         DCHECK(qbuf.m.planes[i].m.userptr);
615         break;
616
617       case V4L2_MEMORY_DMABUF:
618         qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i);
619         DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
620         break;
621
622       default:
623         NOTREACHED();
624         return false;
625     }
626   }
627
628   qbuf.memory = input_memory_type_;
629   qbuf.length = input_planes_count_;
630
631   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
632   input_record.at_device = true;
633   input_record.frame = frame;
634   encoder_input_queue_.pop_front();
635   free_input_buffers_.pop_back();
636   input_buffer_queued_count_++;
637   return true;
638 }
639
640 bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() {
641   DVLOG(3) << "EnqueueOutputRecord()";
642   DCHECK(!free_output_buffers_.empty());
643   DCHECK(!encoder_output_queue_.empty());
644
645   // Enqueue an output (VIDEO_CAPTURE) buffer.
646   linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back();
647   const int index = free_output_buffers_.back();
648   OutputRecord& output_record = output_buffer_map_[index];
649   DCHECK(!output_record.at_device);
650   DCHECK(!output_record.buffer_ref.get());
651   struct v4l2_buffer qbuf;
652   struct v4l2_plane qbuf_planes[1];
653   memset(&qbuf, 0, sizeof(qbuf));
654   memset(qbuf_planes, 0, sizeof(qbuf_planes));
655   qbuf.index = index;
656   qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
657   qbuf.memory = V4L2_MEMORY_MMAP;
658   qbuf.m.planes = qbuf_planes;
659   qbuf.length = 1;
660   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
661   output_record.at_device = true;
662   output_record.buffer_ref = output_buffer;
663   encoder_output_queue_.pop_back();
664   free_output_buffers_.pop_back();
665   output_buffer_queued_count_++;
666   return true;
667 }
668
669 bool V4L2VideoEncodeAccelerator::StartDevicePoll() {
670   DVLOG(3) << "StartDevicePoll()";
671   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
672   DCHECK(!device_poll_thread_.IsRunning());
673
674   // Start up the device poll thread and schedule its first DevicePollTask().
675   if (!device_poll_thread_.Start()) {
676     DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
677     NOTIFY_ERROR(kPlatformFailureError);
678     return false;
679   }
680   // Enqueue a poll task with no devices to poll on -- it will wait only on the
681   // interrupt fd.
682   device_poll_thread_.message_loop()->PostTask(
683       FROM_HERE,
684       base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
685                  base::Unretained(this),
686                  false));
687
688   return true;
689 }
690
691 bool V4L2VideoEncodeAccelerator::StopDevicePoll() {
692   DVLOG(3) << "StopDevicePoll()";
693
694   // Signal the DevicePollTask() to stop, and stop the device poll thread.
695   if (!device_->SetDevicePollInterrupt())
696     return false;
697   device_poll_thread_.Stop();
698   // Clear the interrupt now, to be sure.
699   if (!device_->ClearDevicePollInterrupt())
700     return false;
701
702   if (input_streamon_) {
703     __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
704     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
705   }
706   input_streamon_ = false;
707
708   if (output_streamon_) {
709     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
710     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
711   }
712   output_streamon_ = false;
713
714   // Reset all our accounting info.
715   encoder_input_queue_.clear();
716   free_input_buffers_.clear();
717   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
718     InputRecord& input_record = input_buffer_map_[i];
719     input_record.at_device = false;
720     input_record.frame = NULL;
721     free_input_buffers_.push_back(i);
722   }
723   input_buffer_queued_count_ = 0;
724
725   free_output_buffers_.clear();
726   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
727     OutputRecord& output_record = output_buffer_map_[i];
728     output_record.at_device = false;
729     output_record.buffer_ref.reset();
730     free_output_buffers_.push_back(i);
731   }
732   output_buffer_queued_count_ = 0;
733
734   encoder_output_queue_.clear();
735
736   DVLOG(3) << "StopDevicePoll(): device poll stopped";
737   return true;
738 }
739
740 void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) {
741   DVLOG(3) << "DevicePollTask()";
742   DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
743
744   bool event_pending;
745   if (!device_->Poll(poll_device, &event_pending)) {
746     NOTIFY_ERROR(kPlatformFailureError);
747     return;
748   }
749
750   // All processing should happen on ServiceDeviceTask(), since we shouldn't
751   // touch encoder state from this thread.
752   encoder_thread_.message_loop()->PostTask(
753       FROM_HERE,
754       base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask,
755                  base::Unretained(this)));
756 }
757
758 void V4L2VideoEncodeAccelerator::NotifyError(Error error) {
759   DVLOG(1) << "NotifyError(): error=" << error;
760
761   if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
762     child_message_loop_proxy_->PostTask(
763         FROM_HERE,
764         base::Bind(
765             &V4L2VideoEncodeAccelerator::NotifyError, weak_this_, error));
766     return;
767   }
768
769   if (client_) {
770     client_->NotifyError(error);
771     client_ptr_factory_.reset();
772   }
773 }
774
775 void V4L2VideoEncodeAccelerator::SetEncoderState(State state) {
776   DVLOG(3) << "SetEncoderState(): state=" << state;
777
778   // We can touch encoder_state_ only if this is the encoder thread or the
779   // encoder thread isn't running.
780   if (encoder_thread_.message_loop() != NULL &&
781       encoder_thread_.message_loop() != base::MessageLoop::current()) {
782     encoder_thread_.message_loop()->PostTask(
783         FROM_HERE,
784         base::Bind(&V4L2VideoEncodeAccelerator::SetEncoderState,
785                    base::Unretained(this),
786                    state));
787   } else {
788     encoder_state_ = state;
789   }
790 }
791
792 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
793     uint32 bitrate,
794     uint32 framerate) {
795   DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate
796            << ", framerate=" << framerate;
797   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
798
799   if (bitrate < 1)
800     bitrate = 1;
801   if (framerate < 1)
802     framerate = 1;
803
804   struct v4l2_ext_control ctrls[1];
805   struct v4l2_ext_controls control;
806   memset(&ctrls, 0, sizeof(ctrls));
807   memset(&control, 0, sizeof(control));
808   ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE;
809   ctrls[0].value = bitrate;
810   control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
811   control.count = arraysize(ctrls);
812   control.controls = ctrls;
813   IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control);
814
815   struct v4l2_streamparm parms;
816   memset(&parms, 0, sizeof(parms));
817   parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
818   // Note that we are provided "frames per second" but V4L2 expects "time per
819   // frame"; hence we provide the reciprocal of the framerate here.
820   parms.parm.output.timeperframe.numerator = 1;
821   parms.parm.output.timeperframe.denominator = framerate;
822   IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms);
823 }
824
825 bool V4L2VideoEncodeAccelerator::SetOutputFormat(
826     media::VideoCodecProfile output_profile) {
827   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
828   DCHECK(!input_streamon_);
829   DCHECK(!output_streamon_);
830
831   output_format_fourcc_ =
832       V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile);
833   if (!output_format_fourcc_) {
834     DLOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile;
835     return false;
836   }
837
838   output_buffer_byte_size_ = kOutputBufferSize;
839
840   struct v4l2_format format;
841   memset(&format, 0, sizeof(format));
842   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
843   format.fmt.pix_mp.width = visible_size_.width();
844   format.fmt.pix_mp.height = visible_size_.height();
845   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
846   format.fmt.pix_mp.plane_fmt[0].sizeimage =
847       base::checked_cast<__u32>(output_buffer_byte_size_);
848   format.fmt.pix_mp.num_planes = 1;
849   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
850
851   // Device might have adjusted the required output size.
852   size_t adjusted_output_buffer_size =
853       base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage);
854   DCHECK_GE(adjusted_output_buffer_size, output_buffer_byte_size_);
855   output_buffer_byte_size_ = adjusted_output_buffer_size;
856
857   return true;
858 }
859
860 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
861     media::VideoFrame::Format input_format) {
862   DVLOG(3) << "NegotiateInputFormat()";
863   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
864   DCHECK(!input_streamon_);
865   DCHECK(!output_streamon_);
866
867   device_input_format_ = media::VideoFrame::UNKNOWN;
868   input_planes_count_ = 0;
869
870   uint32 input_format_fourcc =
871       V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format);
872   if (!input_format_fourcc) {
873     DVLOG(1) << "Unsupported input format";
874     return false;
875   }
876
877   size_t input_planes_count = media::VideoFrame::NumPlanes(input_format);
878   DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
879
880   // First see if we the device can use the provided input_format directly.
881   struct v4l2_format format;
882   memset(&format, 0, sizeof(format));
883   format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
884   format.fmt.pix_mp.width = visible_size_.width();
885   format.fmt.pix_mp.height = visible_size_.height();
886   format.fmt.pix_mp.pixelformat = input_format_fourcc;
887   format.fmt.pix_mp.num_planes = input_planes_count;
888   if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
889     // Error or format unsupported by device, try to negotiate a fallback.
890     input_format_fourcc = device_->PreferredInputFormat();
891     input_format =
892         V4L2Device::V4L2PixFmtToVideoFrameFormat(input_format_fourcc);
893     if (input_format == media::VideoFrame::UNKNOWN)
894       return false;
895
896     input_planes_count = media::VideoFrame::NumPlanes(input_format);
897     DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
898
899     // Device might have adjusted parameters, reset them along with the format.
900     memset(&format, 0, sizeof(format));
901     format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
902     format.fmt.pix_mp.width = visible_size_.width();
903     format.fmt.pix_mp.height = visible_size_.height();
904     format.fmt.pix_mp.pixelformat = input_format_fourcc;
905     format.fmt.pix_mp.num_planes = input_planes_count;
906     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
907     DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count);
908   }
909
910   // Take device-adjusted sizes for allocated size.
911   input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format);
912   DCHECK(gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_)));
913
914   device_input_format_ = input_format;
915   input_planes_count_ = input_planes_count;
916   return true;
917 }
918
919 bool V4L2VideoEncodeAccelerator::SetFormats(
920     media::VideoFrame::Format input_format,
921     media::VideoCodecProfile output_profile) {
922   DVLOG(3) << "SetFormats()";
923   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
924   DCHECK(!input_streamon_);
925   DCHECK(!output_streamon_);
926
927   if (!SetOutputFormat(output_profile))
928     return false;
929
930   if (!NegotiateInputFormat(input_format))
931     return false;
932
933   struct v4l2_crop crop;
934   memset(&crop, 0, sizeof(crop));
935   crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
936   crop.c.left = 0;
937   crop.c.top = 0;
938   crop.c.width = visible_size_.width();
939   crop.c.height = visible_size_.height();
940   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
941
942   return true;
943 }
944
945 bool V4L2VideoEncodeAccelerator::InitControls() {
946   struct v4l2_ext_control ctrls[9];
947   struct v4l2_ext_controls control;
948   memset(&ctrls, 0, sizeof(ctrls));
949   memset(&control, 0, sizeof(control));
950   // No B-frames, for lowest decoding latency.
951   ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES;
952   ctrls[0].value = 0;
953   // Enable frame-level bitrate control.
954   ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE;
955   ctrls[1].value = 1;
956   // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level
957   // bitrate controls have to be enabled as well.
958   ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF;
959   ctrls[2].value = 1;
960   // Force bitrate control to average over a GOP (for tight bitrate
961   // tolerance).
962   ctrls[3].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT;
963   ctrls[3].value = 1;
964   // Quantization parameter maximum value (for variable bitrate control).
965   ctrls[4].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP;
966   ctrls[4].value = 51;
967   // Separate stream header so we can cache it and insert into the stream.
968   ctrls[5].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
969   ctrls[5].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE;
970   // Enable macroblock-level bitrate control.
971   ctrls[6].id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE;
972   ctrls[6].value = 1;
973   // Use H.264 level 4.0 to match the supported max resolution.
974   ctrls[7].id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
975   ctrls[7].value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
976   // Disable periodic key frames.
977   ctrls[8].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
978   ctrls[8].value = 0;
979   control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
980   control.count = arraysize(ctrls);
981   control.controls = ctrls;
982   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, &control);
983
984   return true;
985 }
986
987 bool V4L2VideoEncodeAccelerator::CreateInputBuffers() {
988   DVLOG(3) << "CreateInputBuffers()";
989   // This function runs on encoder_thread_ after output buffers have been
990   // provided by the client.
991   DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
992   DCHECK(!input_streamon_);
993
994   struct v4l2_requestbuffers reqbufs;
995   memset(&reqbufs, 0, sizeof(reqbufs));
996   // Driver will modify to the appropriate number of buffers.
997   reqbufs.count = 1;
998   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
999   // TODO(posciak): Once we start doing zero-copy, we should decide based on
1000   // the current pipeline setup which memory type to use. This should probably
1001   // be decided based on an argument to Initialize().
1002   if (image_processor_.get())
1003     input_memory_type_ = V4L2_MEMORY_DMABUF;
1004   else
1005     input_memory_type_ = V4L2_MEMORY_USERPTR;
1006
1007   reqbufs.memory = input_memory_type_;
1008   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1009
1010   DCHECK(input_buffer_map_.empty());
1011   input_buffer_map_.resize(reqbufs.count);
1012   for (size_t i = 0; i < input_buffer_map_.size(); ++i)
1013     free_input_buffers_.push_back(i);
1014
1015   return true;
1016 }
1017
1018 bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() {
1019   DVLOG(3) << "CreateOutputBuffers()";
1020   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1021   DCHECK(!output_streamon_);
1022
1023   struct v4l2_requestbuffers reqbufs;
1024   memset(&reqbufs, 0, sizeof(reqbufs));
1025   reqbufs.count = kOutputBufferCount;
1026   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1027   reqbufs.memory = V4L2_MEMORY_MMAP;
1028   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1029
1030   DCHECK(output_buffer_map_.empty());
1031   output_buffer_map_.resize(reqbufs.count);
1032   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1033     struct v4l2_plane planes[1];
1034     struct v4l2_buffer buffer;
1035     memset(&buffer, 0, sizeof(buffer));
1036     memset(planes, 0, sizeof(planes));
1037     buffer.index = i;
1038     buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1039     buffer.memory = V4L2_MEMORY_MMAP;
1040     buffer.m.planes = planes;
1041     buffer.length = arraysize(planes);
1042     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1043     void* address = device_->Mmap(NULL,
1044                                   buffer.m.planes[0].length,
1045                                   PROT_READ | PROT_WRITE,
1046                                   MAP_SHARED,
1047                                   buffer.m.planes[0].m.mem_offset);
1048     if (address == MAP_FAILED) {
1049       DPLOG(ERROR) << "CreateOutputBuffers(): mmap() failed";
1050       return false;
1051     }
1052     output_buffer_map_[i].address = address;
1053     output_buffer_map_[i].length = buffer.m.planes[0].length;
1054     free_output_buffers_.push_back(i);
1055   }
1056
1057   return true;
1058 }
1059
1060 void V4L2VideoEncodeAccelerator::DestroyInputBuffers() {
1061   DVLOG(3) << "DestroyInputBuffers()";
1062   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1063   DCHECK(!input_streamon_);
1064
1065   struct v4l2_requestbuffers reqbufs;
1066   memset(&reqbufs, 0, sizeof(reqbufs));
1067   reqbufs.count = 0;
1068   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1069   reqbufs.memory = input_memory_type_;
1070   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1071
1072   input_buffer_map_.clear();
1073   free_input_buffers_.clear();
1074 }
1075
1076 void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() {
1077   DVLOG(3) << "DestroyOutputBuffers()";
1078   DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1079   DCHECK(!output_streamon_);
1080
1081   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1082     if (output_buffer_map_[i].address != NULL)
1083       device_->Munmap(output_buffer_map_[i].address,
1084                       output_buffer_map_[i].length);
1085   }
1086
1087   struct v4l2_requestbuffers reqbufs;
1088   memset(&reqbufs, 0, sizeof(reqbufs));
1089   reqbufs.count = 0;
1090   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1091   reqbufs.memory = V4L2_MEMORY_MMAP;
1092   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1093
1094   output_buffer_map_.clear();
1095   free_output_buffers_.clear();
1096 }
1097
1098 }  // namespace content