Upstream version 11.40.277.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / rtc_video_encoder.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/rtc_video_encoder.h"
6
7 #include "base/bind.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/message_loop/message_loop_proxy.h"
12 #include "base/metrics/histogram.h"
13 #include "base/rand_util.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "media/base/bitstream_buffer.h"
16 #include "media/base/video_frame.h"
17 #include "media/base/video_util.h"
18 #include "media/filters/gpu_video_accelerator_factories.h"
19 #include "media/filters/h264_parser.h"
20 #include "media/video/video_encode_accelerator.h"
21 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
22
23 #define NOTIFY_ERROR(x)                             \
24   do {                                              \
25     DLOG(ERROR) << "calling NotifyError(): " << x;  \
26     NotifyError(x);                                 \
27   } while (0)
28
29 namespace content {
30
31 namespace {
32
33 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
34 // media::VideoCodecProfile.
35 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
36     webrtc::VideoCodecType type, const webrtc::VideoCodec* codec_settings) {
37   DCHECK_EQ(type, codec_settings->codecType);
38   switch (type) {
39     case webrtc::kVideoCodecVP8:
40       return media::VP8PROFILE_ANY;
41     case webrtc::kVideoCodecH264: {
42       switch (codec_settings->codecSpecific.H264.profile) {
43         case webrtc::kProfileBase:
44           return media::H264PROFILE_BASELINE;
45         case webrtc::kProfileMain:
46           return media::H264PROFILE_MAIN;
47       }
48     }
49     default:
50       NOTREACHED() << "Unrecognized video codec type";
51       return media::VIDEO_CODEC_PROFILE_UNKNOWN;
52   }
53 }
54
55 // Populates struct webrtc::RTPFragmentationHeader for H264 codec.
56 // Each entry specifies the offset and length (excluding start code) of a NALU.
57 // Returns true if successful.
58 bool GetRTPFragmentationHeaderH264(webrtc::RTPFragmentationHeader* header,
59                                    const uint8_t* data, uint32_t length) {
60   media::H264Parser parser;
61   parser.SetStream(data, length);
62
63   std::vector<media::H264NALU> nalu_vector;
64   while (true) {
65     media::H264NALU nalu;
66     const media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
67     if (result == media::H264Parser::kOk) {
68       nalu_vector.push_back(nalu);
69     } else if (result == media::H264Parser::kEOStream) {
70       break;
71     } else {
72       DLOG(ERROR) << "Unexpected H264 parser result";
73       return false;
74     }
75   }
76
77   header->VerifyAndAllocateFragmentationHeader(nalu_vector.size());
78   for (size_t i = 0; i < nalu_vector.size(); ++i) {
79     header->fragmentationOffset[i] = nalu_vector[i].data - data;
80     header->fragmentationLength[i] = nalu_vector[i].size;
81     header->fragmentationPlType[i] = 0;
82     header->fragmentationTimeDiff[i] = 0;
83   }
84   return true;
85 }
86
87 }  // namespace
88
89 // This private class of RTCVideoEncoder does the actual work of communicating
90 // with a media::VideoEncodeAccelerator for handling video encoding.  It can
91 // be created on any thread, but should subsequently be posted to (and Destroy()
92 // called on) a single thread.  Callbacks to RTCVideoEncoder are posted to the
93 // thread on which the instance was constructed.
94 //
95 // This class separates state related to the thread that RTCVideoEncoder
96 // operates on (presently the libjingle worker thread) from the thread that
97 // |gpu_factories_| provides for accelerator operations (presently the media
98 // thread).  The RTCVideoEncoder class can be deleted directly by WebRTC, while
99 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
100 class RTCVideoEncoder::Impl
101     : public media::VideoEncodeAccelerator::Client,
102       public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
103  public:
104   Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
105        const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
106
107   // Create the VEA and call Initialize() on it.  Called once per instantiation,
108   // and then the instance is bound forevermore to whichever thread made the
109   // call.
110   // RTCVideoEncoder expects to be able to call this function synchronously from
111   // its own thread, hence the |async_waiter| and |async_retval| arguments.
112   void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
113                               uint32 bitrate,
114                               media::VideoCodecProfile profile,
115                               base::WaitableEvent* async_waiter,
116                               int32_t* async_retval);
117   // Enqueue a frame from WebRTC for encoding.
118   // RTCVideoEncoder expects to be able to call this function synchronously from
119   // its own thread, hence the |async_waiter| and |async_retval| arguments.
120   void Enqueue(const webrtc::I420VideoFrame* input_frame,
121                bool force_keyframe,
122                base::WaitableEvent* async_waiter,
123                int32_t* async_retval);
124
125   // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
126   // RTCVideoEncoder::ReturnEncodedImage() function.  When that is complete,
127   // the buffer is returned to Impl by its index using this function.
128   void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
129
130   // Request encoding parameter change for the underlying encoder.
131   void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
132
133   // Destroy this Impl's encoder.  The destructor is not explicitly called, as
134   // Impl is a base::RefCountedThreadSafe.
135   void Destroy();
136
137   // media::VideoEncodeAccelerator::Client implementation.
138   void RequireBitstreamBuffers(unsigned int input_count,
139                                const gfx::Size& input_coded_size,
140                                size_t output_buffer_size) override;
141   void BitstreamBufferReady(int32 bitstream_buffer_id,
142                             size_t payload_size,
143                             bool key_frame) override;
144   void NotifyError(media::VideoEncodeAccelerator::Error error) override;
145
146  private:
147   friend class base::RefCountedThreadSafe<Impl>;
148
149   enum {
150     kInputBufferExtraCount = 1,  // The number of input buffers allocated, more
151                                  // than what is requested by
152                                  // VEA::RequireBitstreamBuffers().
153     kOutputBufferCount = 3,
154   };
155
156   ~Impl() override;
157
158   // Perform encoding on an input frame from the input queue.
159   void EncodeOneFrame();
160
161   // Notify that an input frame is finished for encoding.  |index| is the index
162   // of the completed frame in |input_buffers_|.
163   void EncodeFrameFinished(int index);
164
165   // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
166   void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
167   void SignalAsyncWaiter(int32_t retval);
168
169   base::ThreadChecker thread_checker_;
170
171   // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
172   // notifications.
173   const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
174
175   // The message loop on which to post callbacks to |weak_encoder_|.
176   const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
177
178   // Factory for creating VEAs, shared memory buffers, etc.
179   const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
180
181   // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
182   // Do this by waiting on the |async_waiter_| and returning the return value in
183   // |async_retval_| when initialization completes, encoding completes, or
184   // an error occurs.
185   base::WaitableEvent* async_waiter_;
186   int32_t* async_retval_;
187
188   // The underlying VEA to perform encoding on.
189   scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
190
191   // Next input frame.  Since there is at most one next frame, a single-element
192   // queue is sufficient.
193   const webrtc::I420VideoFrame* input_next_frame_;
194
195   // Whether to encode a keyframe next.
196   bool input_next_frame_keyframe_;
197
198   // Frame sizes.
199   gfx::Size input_frame_coded_size_;
200   gfx::Size input_visible_size_;
201
202   // Shared memory buffers for input/output with the VEA.
203   ScopedVector<base::SharedMemory> input_buffers_;
204   ScopedVector<base::SharedMemory> output_buffers_;
205
206   // Input buffers ready to be filled with input from Encode().  As a LIFO since
207   // we don't care about ordering.
208   std::vector<int> input_buffers_free_;
209
210   // The number of output buffers ready to be filled with output from the
211   // encoder.
212   int output_buffers_free_count_;
213
214   // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
215   uint16 picture_id_;
216
217   DISALLOW_COPY_AND_ASSIGN(Impl);
218 };
219
220 RTCVideoEncoder::Impl::Impl(
221     const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
222     const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
223     : weak_encoder_(weak_encoder),
224       encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
225       gpu_factories_(gpu_factories),
226       async_waiter_(NULL),
227       async_retval_(NULL),
228       input_next_frame_(NULL),
229       input_next_frame_keyframe_(false),
230       output_buffers_free_count_(0) {
231   thread_checker_.DetachFromThread();
232   // Picture ID should start on a random number.
233   picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
234 }
235
236 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
237     const gfx::Size& input_visible_size,
238     uint32 bitrate,
239     media::VideoCodecProfile profile,
240     base::WaitableEvent* async_waiter,
241     int32_t* async_retval) {
242   DVLOG(3) << "Impl::CreateAndInitializeVEA()";
243   DCHECK(thread_checker_.CalledOnValidThread());
244
245   RegisterAsyncWaiter(async_waiter, async_retval);
246
247   // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
248   if (bitrate > kuint32max / 1000) {
249     NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
250     return;
251   }
252
253   video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass();
254   if (!video_encoder_) {
255     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
256     return;
257   }
258   input_visible_size_ = input_visible_size;
259   if (!video_encoder_->Initialize(media::VideoFrame::I420,
260                                   input_visible_size_,
261                                   profile,
262                                   bitrate * 1000,
263                                   this)) {
264     NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
265     return;
266   }
267 }
268
269 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
270                                     bool force_keyframe,
271                                     base::WaitableEvent* async_waiter,
272                                     int32_t* async_retval) {
273   DVLOG(3) << "Impl::Enqueue()";
274   DCHECK(thread_checker_.CalledOnValidThread());
275   DCHECK(!input_next_frame_);
276
277   RegisterAsyncWaiter(async_waiter, async_retval);
278   // If there are no free input and output buffers, drop the frame to avoid a
279   // deadlock. If there is a free input buffer, EncodeOneFrame will run and
280   // unblock Encode(). If there are no free input buffers but there is a free
281   // output buffer, EncodeFrameFinished will be called later to unblock
282   // Encode().
283   //
284   // The caller of Encode() holds a webrtc lock. The deadlock happens when:
285   // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame().
286   // (2) There are no free input buffers and they cannot be freed because
287   //     the encoder has no output buffers.
288   // (3) Output buffers cannot be freed because ReturnEncodedImage is queued
289   //     on libjingle worker thread to be run. But the worker thread is waiting
290   //     for the same webrtc lock held by the caller of Encode().
291   //
292   // Dropping a frame is fine. The encoder has been filled with all input
293   // buffers. Returning an error in Encode() is not fatal and WebRTC will just
294   // continue. If this is a key frame, WebRTC will request a key frame again.
295   // Besides, webrtc will drop a frame if Encode() blocks too long.
296   if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) {
297     DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
298     SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
299     return;
300   }
301   input_next_frame_ = input_frame;
302   input_next_frame_keyframe_ = force_keyframe;
303
304   if (!input_buffers_free_.empty())
305     EncodeOneFrame();
306 }
307
308 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
309     int32 bitstream_buffer_id) {
310   DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
311               "bitstream_buffer_id=" << bitstream_buffer_id;
312   DCHECK(thread_checker_.CalledOnValidThread());
313   if (video_encoder_) {
314     video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
315         bitstream_buffer_id,
316         output_buffers_[bitstream_buffer_id]->handle(),
317         output_buffers_[bitstream_buffer_id]->mapped_size()));
318     output_buffers_free_count_++;
319   }
320 }
321
322 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
323                                                             uint32 framerate) {
324   DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
325            << ", framerate=" << framerate;
326   DCHECK(thread_checker_.CalledOnValidThread());
327
328   // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
329   if (bitrate > kuint32max / 1000) {
330     NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
331     return;
332   }
333
334   if (video_encoder_)
335     video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
336 }
337
338 void RTCVideoEncoder::Impl::Destroy() {
339   DVLOG(3) << "Impl::Destroy()";
340   DCHECK(thread_checker_.CalledOnValidThread());
341   video_encoder_.reset();
342 }
343
344 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
345     unsigned int input_count,
346     const gfx::Size& input_coded_size,
347     size_t output_buffer_size) {
348   DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
349            << ", input_coded_size=" << input_coded_size.ToString()
350            << ", output_buffer_size=" << output_buffer_size;
351   DCHECK(thread_checker_.CalledOnValidThread());
352
353   if (!video_encoder_)
354     return;
355
356   input_frame_coded_size_ = input_coded_size;
357
358   for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
359     base::SharedMemory* shm =
360         gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
361             media::VideoFrame::I420, input_coded_size));
362     if (!shm) {
363       DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
364                      "failed to create input buffer " << i;
365       NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
366       return;
367     }
368     input_buffers_.push_back(shm);
369     input_buffers_free_.push_back(i);
370   }
371
372   for (int i = 0; i < kOutputBufferCount; ++i) {
373     base::SharedMemory* shm =
374         gpu_factories_->CreateSharedMemory(output_buffer_size);
375     if (!shm) {
376       DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
377                      "failed to create output buffer " << i;
378       NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
379       return;
380     }
381     output_buffers_.push_back(shm);
382   }
383
384   // Immediately provide all output buffers to the VEA.
385   for (size_t i = 0; i < output_buffers_.size(); ++i) {
386     video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
387         i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
388     output_buffers_free_count_++;
389   }
390   SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
391 }
392
393 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
394                                                  size_t payload_size,
395                                                  bool key_frame) {
396   DVLOG(3) << "Impl::BitstreamBufferReady(): "
397               "bitstream_buffer_id=" << bitstream_buffer_id
398            << ", payload_size=" << payload_size
399            << ", key_frame=" << key_frame;
400   DCHECK(thread_checker_.CalledOnValidThread());
401
402   if (bitstream_buffer_id < 0 ||
403       bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
404     DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
405                 << bitstream_buffer_id;
406     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
407     return;
408   }
409   base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
410   if (payload_size > output_buffer->mapped_size()) {
411     DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
412                 << payload_size;
413     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
414     return;
415   }
416   output_buffers_free_count_--;
417
418   // Use webrtc timestamps to ensure correct RTP sender behavior.
419   // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
420   const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp();
421
422   // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
423   int64 capture_time_ms = capture_time_us / 1000;
424   uint32_t rtp_timestamp = static_cast<uint32_t>(capture_time_us * 90 / 1000);
425
426   scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
427       reinterpret_cast<uint8_t*>(output_buffer->memory()),
428       payload_size,
429       output_buffer->mapped_size()));
430   image->_encodedWidth = input_visible_size_.width();
431   image->_encodedHeight = input_visible_size_.height();
432   image->_timeStamp = rtp_timestamp;
433   image->capture_time_ms_ = capture_time_ms;
434   image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
435   image->_completeFrame = true;
436
437   encoder_message_loop_proxy_->PostTask(
438       FROM_HERE,
439       base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
440                  weak_encoder_,
441                  base::Passed(&image),
442                  bitstream_buffer_id,
443                  picture_id_));
444   // Picture ID must wrap after reaching the maximum.
445   picture_id_ = (picture_id_ + 1) & 0x7FFF;
446 }
447
448 void RTCVideoEncoder::Impl::NotifyError(
449     media::VideoEncodeAccelerator::Error error) {
450   DVLOG(3) << "Impl::NotifyError(): error=" << error;
451   DCHECK(thread_checker_.CalledOnValidThread());
452   int32_t retval;
453   switch (error) {
454     case media::VideoEncodeAccelerator::kInvalidArgumentError:
455       retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
456       break;
457     default:
458       retval = WEBRTC_VIDEO_CODEC_ERROR;
459   }
460
461   video_encoder_.reset();
462
463   if (async_waiter_) {
464     SignalAsyncWaiter(retval);
465   } else {
466     encoder_message_loop_proxy_->PostTask(
467         FROM_HERE,
468         base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
469   }
470 }
471
472 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
473
474 void RTCVideoEncoder::Impl::EncodeOneFrame() {
475   DVLOG(3) << "Impl::EncodeOneFrame()";
476   DCHECK(thread_checker_.CalledOnValidThread());
477   DCHECK(input_next_frame_);
478   DCHECK(!input_buffers_free_.empty());
479
480   // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
481   // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
482   // Encode() gets destroyed early.  Handle this by resetting our
483   // input_next_frame_* state before we hand off the VideoFrame to the VEA.
484   const webrtc::I420VideoFrame* next_frame = input_next_frame_;
485   bool next_frame_keyframe = input_next_frame_keyframe_;
486   input_next_frame_ = NULL;
487   input_next_frame_keyframe_ = false;
488
489   if (!video_encoder_) {
490     SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
491     return;
492   }
493
494   const int index = input_buffers_free_.back();
495   base::SharedMemory* input_buffer = input_buffers_[index];
496   scoped_refptr<media::VideoFrame> frame =
497       media::VideoFrame::WrapExternalPackedMemory(
498           media::VideoFrame::I420,
499           input_frame_coded_size_,
500           gfx::Rect(input_visible_size_),
501           input_visible_size_,
502           reinterpret_cast<uint8*>(input_buffer->memory()),
503           input_buffer->mapped_size(),
504           input_buffer->handle(),
505           base::TimeDelta(),
506           base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
507   if (!frame.get()) {
508     DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
509     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
510     return;
511   }
512
513   // Do a strided copy of the input frame to match the input requirements for
514   // the encoder.
515   // TODO(sheu): support zero-copy from WebRTC.  http://crbug.com/269312
516   media::CopyYPlane(next_frame->buffer(webrtc::kYPlane),
517                     next_frame->stride(webrtc::kYPlane),
518                     next_frame->height(),
519                     frame.get());
520   media::CopyUPlane(next_frame->buffer(webrtc::kUPlane),
521                     next_frame->stride(webrtc::kUPlane),
522                     next_frame->height(),
523                     frame.get());
524   media::CopyVPlane(next_frame->buffer(webrtc::kVPlane),
525                     next_frame->stride(webrtc::kVPlane),
526                     next_frame->height(),
527                     frame.get());
528
529   video_encoder_->Encode(frame, next_frame_keyframe);
530   input_buffers_free_.pop_back();
531   SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
532 }
533
534 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
535   DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
536   DCHECK(thread_checker_.CalledOnValidThread());
537   DCHECK_GE(index, 0);
538   DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
539   input_buffers_free_.push_back(index);
540   if (input_next_frame_)
541     EncodeOneFrame();
542 }
543
544 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
545                                                 int32_t* retval) {
546   DCHECK(thread_checker_.CalledOnValidThread());
547   DCHECK(!async_waiter_);
548   DCHECK(!async_retval_);
549   async_waiter_ = waiter;
550   async_retval_ = retval;
551 }
552
553 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
554   DCHECK(thread_checker_.CalledOnValidThread());
555   *async_retval_ = retval;
556   async_waiter_->Signal();
557   async_retval_ = NULL;
558   async_waiter_ = NULL;
559 }
560
561 #undef NOTIFY_ERROR
562
563 ////////////////////////////////////////////////////////////////////////////////
564 //
565 // RTCVideoEncoder
566 //
567 ////////////////////////////////////////////////////////////////////////////////
568
569 RTCVideoEncoder::RTCVideoEncoder(
570     webrtc::VideoCodecType type,
571     const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
572     : video_codec_type_(type),
573       gpu_factories_(gpu_factories),
574       encoded_image_callback_(NULL),
575       impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
576       weak_factory_(this) {
577   DVLOG(1) << "RTCVideoEncoder(): codec type=" << type;
578 }
579
580 RTCVideoEncoder::~RTCVideoEncoder() {
581   DVLOG(3) << "~RTCVideoEncoder";
582   DCHECK(thread_checker_.CalledOnValidThread());
583   Release();
584   DCHECK(!impl_.get());
585 }
586
587 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
588                                     int32_t number_of_cores,
589                                     uint32_t max_payload_size) {
590   DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
591            << ", width=" << codec_settings->width
592            << ", height=" << codec_settings->height
593            << ", startBitrate=" << codec_settings->startBitrate;
594   DCHECK(thread_checker_.CalledOnValidThread());
595   DCHECK(!impl_.get());
596
597   media::VideoCodecProfile profile = WebRTCVideoCodecToVideoCodecProfile(
598       video_codec_type_, codec_settings);
599
600   weak_factory_.InvalidateWeakPtrs();
601   impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
602   base::WaitableEvent initialization_waiter(true, false);
603   int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
604   gpu_factories_->GetTaskRunner()->PostTask(
605       FROM_HERE,
606       base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
607                  impl_,
608                  gfx::Size(codec_settings->width, codec_settings->height),
609                  codec_settings->startBitrate,
610                  profile,
611                  &initialization_waiter,
612                  &initialization_retval));
613
614   // webrtc::VideoEncoder expects this call to be synchronous.
615   initialization_waiter.Wait();
616   RecordInitEncodeUMA(initialization_retval, profile);
617   return initialization_retval;
618 }
619
620 int32_t RTCVideoEncoder::Encode(
621     const webrtc::I420VideoFrame& input_image,
622     const webrtc::CodecSpecificInfo* codec_specific_info,
623     const std::vector<webrtc::VideoFrameType>* frame_types) {
624   DVLOG(3) << "Encode()";
625   if (!impl_.get()) {
626     DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
627     return impl_status_;
628   }
629
630   bool want_key_frame = frame_types && frame_types->size() &&
631                         frame_types->front() == webrtc::kKeyFrame;
632   base::WaitableEvent encode_waiter(true, false);
633   int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
634   gpu_factories_->GetTaskRunner()->PostTask(
635       FROM_HERE,
636       base::Bind(&RTCVideoEncoder::Impl::Enqueue,
637                  impl_,
638                  &input_image,
639                  want_key_frame,
640                  &encode_waiter,
641                  &encode_retval));
642
643   // webrtc::VideoEncoder expects this call to be synchronous.
644   encode_waiter.Wait();
645   DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
646   return encode_retval;
647 }
648
649 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
650     webrtc::EncodedImageCallback* callback) {
651   DVLOG(3) << "RegisterEncodeCompleteCallback()";
652   DCHECK(thread_checker_.CalledOnValidThread());
653   if (!impl_.get()) {
654     DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
655     return impl_status_;
656   }
657
658   encoded_image_callback_ = callback;
659   return WEBRTC_VIDEO_CODEC_OK;
660 }
661
662 int32_t RTCVideoEncoder::Release() {
663   DVLOG(3) << "Release()";
664   DCHECK(thread_checker_.CalledOnValidThread());
665
666   if (impl_.get()) {
667     gpu_factories_->GetTaskRunner()->PostTask(
668         FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
669     impl_ = NULL;
670     weak_factory_.InvalidateWeakPtrs();
671     impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
672   }
673   return WEBRTC_VIDEO_CODEC_OK;
674 }
675
676 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
677   DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
678            << ", rtt=" << rtt;
679   // Ignored.
680   return WEBRTC_VIDEO_CODEC_OK;
681 }
682
683 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
684   DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
685            << ", frame_rate=" << frame_rate;
686   if (!impl_.get()) {
687     DVLOG(3) << "SetRates(): returning " << impl_status_;
688     return impl_status_;
689   }
690
691   gpu_factories_->GetTaskRunner()->PostTask(
692       FROM_HERE,
693       base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
694                  impl_,
695                  new_bit_rate,
696                  frame_rate));
697   return WEBRTC_VIDEO_CODEC_OK;
698 }
699
700 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
701                                          int32 bitstream_buffer_id,
702                                          uint16 picture_id) {
703   DCHECK(thread_checker_.CalledOnValidThread());
704   DVLOG(3) << "ReturnEncodedImage(): "
705            << "bitstream_buffer_id=" << bitstream_buffer_id
706            << ", picture_id=" << picture_id;
707
708   if (!encoded_image_callback_)
709     return;
710
711   webrtc::RTPFragmentationHeader header;
712   memset(&header, 0, sizeof(header));
713   switch (video_codec_type_) {
714     case webrtc::kVideoCodecVP8:
715       // Generate a header describing a single fragment.
716       header.VerifyAndAllocateFragmentationHeader(1);
717       header.fragmentationOffset[0] = 0;
718       header.fragmentationLength[0] = image->_length;
719       header.fragmentationPlType[0] = 0;
720       header.fragmentationTimeDiff[0] = 0;
721       break;
722     case webrtc::kVideoCodecH264:
723       if (!GetRTPFragmentationHeaderH264(
724           &header, image->_buffer, image->_length)) {
725         DLOG(ERROR) << "Failed to get RTP fragmentation header for H264";
726         NotifyError(WEBRTC_VIDEO_CODEC_ERROR);
727         return;
728       }
729       break;
730     default:
731       NOTREACHED() << "Invalid video codec type";
732       return;
733   }
734
735   webrtc::CodecSpecificInfo info;
736   memset(&info, 0, sizeof(info));
737   info.codecType = video_codec_type_;
738   if (video_codec_type_ == webrtc::kVideoCodecVP8) {
739     info.codecSpecific.VP8.pictureId = picture_id;
740     info.codecSpecific.VP8.tl0PicIdx = -1;
741     info.codecSpecific.VP8.keyIdx = -1;
742   }
743
744   int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
745   if (retval < 0) {
746     DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
747              << retval;
748   }
749
750   // The call through webrtc::EncodedImageCallback is synchronous, so we can
751   // immediately recycle the output buffer back to the Impl.
752   gpu_factories_->GetTaskRunner()->PostTask(
753       FROM_HERE,
754       base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
755                  impl_,
756                  bitstream_buffer_id));
757 }
758
759 void RTCVideoEncoder::NotifyError(int32_t error) {
760   DCHECK(thread_checker_.CalledOnValidThread());
761   DVLOG(1) << "NotifyError(): error=" << error;
762
763   impl_status_ = error;
764   gpu_factories_->GetTaskRunner()->PostTask(
765       FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
766   impl_ = NULL;
767 }
768
769 void RTCVideoEncoder::RecordInitEncodeUMA(
770     int32_t init_retval, media::VideoCodecProfile profile) {
771   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
772                         init_retval == WEBRTC_VIDEO_CODEC_OK);
773   if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
774     UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
775                               profile,
776                               media::VIDEO_CODEC_PROFILE_MAX + 1);
777   }
778 }
779
780 }  // namespace content