- add sources.
[platform/framework/web/crosswalk.git] / src / content / renderer / media / rtc_video_decoder.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/rtc_video_decoder.h"
6
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/safe_numerics.h"
13 #include "base/stl_util.h"
14 #include "base/task_runner_util.h"
15 #include "content/child/child_thread.h"
16 #include "content/renderer/media/native_handle_impl.h"
17 #include "media/base/bind_to_loop.h"
18 #include "media/filters/gpu_video_accelerator_factories.h"
19 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
20 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
21
22 namespace content {
23
24 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
25 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
26 const int32 RTCVideoDecoder::ID_INVALID = -1;
27
28 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
29 // Higher values allow better pipelining in the GPU, but also require more
30 // resources.
31 static const size_t kMaxInFlightDecodes = 8;
32
33 // Size of shared-memory segments we allocate.  Since we reuse them we let them
34 // be on the beefy side.
35 static const size_t kSharedMemorySegmentBytes = 100 << 10;
36
37 // Maximum number of allocated shared-memory segments.
38 static const int kMaxNumSharedMemorySegments = 16;
39
40 // Maximum number of pending WebRTC buffers that are waiting for the shared
41 // memory. 10 seconds for 30 fps.
42 static const size_t kMaxNumOfPendingBuffers = 300;
43
44 // A shared memory segment and its allocated size. This class has the ownership
45 // of |shm|.
46 class RTCVideoDecoder::SHMBuffer {
47  public:
48   SHMBuffer(base::SharedMemory* shm, size_t size);
49   ~SHMBuffer();
50   base::SharedMemory* const shm;
51   const size_t size;
52 };
53
54 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
55     : shm(shm), size(size) {}
56
57 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
58
59 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
60                                         uint32_t timestamp,
61                                         int width,
62                                         int height,
63                                         size_t size)
64     : bitstream_buffer_id(bitstream_buffer_id),
65       timestamp(timestamp),
66       width(width),
67       height(height),
68       size(size) {}
69
70 RTCVideoDecoder::BufferData::BufferData() {}
71
72 RTCVideoDecoder::BufferData::~BufferData() {}
73
74 RTCVideoDecoder::RTCVideoDecoder(
75     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
76     : weak_factory_(this),
77       weak_this_(weak_factory_.GetWeakPtr()),
78       factories_(factories),
79       vda_loop_proxy_(factories->GetMessageLoop()),
80       decoder_texture_target_(0),
81       next_picture_buffer_id_(0),
82       state_(UNINITIALIZED),
83       decode_complete_callback_(NULL),
84       num_shm_buffers_(0),
85       next_bitstream_buffer_id_(0),
86       reset_bitstream_buffer_id_(ID_INVALID) {
87   DCHECK(!vda_loop_proxy_->BelongsToCurrentThread());
88   base::WaitableEvent message_loop_async_waiter(false, false);
89   // Waiting here is safe. The media thread is stopped in the child thread and
90   // the child thread is blocked when VideoDecoderFactory::CreateVideoDecoder
91   // runs.
92   vda_loop_proxy_->PostTask(FROM_HERE,
93                             base::Bind(&RTCVideoDecoder::Initialize,
94                                        base::Unretained(this),
95                                        &message_loop_async_waiter));
96   message_loop_async_waiter.Wait();
97 }
98
99 RTCVideoDecoder::~RTCVideoDecoder() {
100   DVLOG(2) << "~RTCVideoDecoder";
101   // Destroy VDA and remove |this| from the observer if this is vda thread.
102   if (vda_loop_proxy_->BelongsToCurrentThread()) {
103     base::MessageLoop::current()->RemoveDestructionObserver(this);
104     DestroyVDA();
105   } else {
106     // VDA should have been destroyed in WillDestroyCurrentMessageLoop.
107     DCHECK(!vda_);
108   }
109
110   // Delete all shared memories.
111   STLDeleteElements(&available_shm_segments_);
112   STLDeleteValues(&bitstream_buffers_in_decoder_);
113   STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
114                                       decode_buffers_.end());
115   decode_buffers_.clear();
116
117   // Delete WebRTC input buffers.
118   for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
119            pending_buffers_.begin();
120        it != pending_buffers_.end();
121        ++it) {
122     delete[] it->first._buffer;
123   }
124 }
125
126 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
127     webrtc::VideoCodecType type,
128     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
129   scoped_ptr<RTCVideoDecoder> decoder;
130   // Convert WebRTC codec type to media codec profile.
131   media::VideoCodecProfile profile;
132   switch (type) {
133     case webrtc::kVideoCodecVP8:
134       profile = media::VP8PROFILE_MAIN;
135       break;
136     default:
137       DVLOG(2) << "Video codec not supported:" << type;
138       return decoder.Pass();
139   }
140
141   decoder.reset(new RTCVideoDecoder(factories));
142   decoder->vda_ =
143       factories->CreateVideoDecodeAccelerator(profile, decoder.get()).Pass();
144   // vda can be NULL if VP8 is not supported.
145   if (decoder->vda_ != NULL) {
146     decoder->state_ = INITIALIZED;
147   } else {
148     factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release());
149   }
150   return decoder.Pass();
151 }
152
153 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
154                                     int32_t /*numberOfCores*/) {
155   DVLOG(2) << "InitDecode";
156   DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8);
157   if (codecSettings->codecSpecific.VP8.feedbackModeOn) {
158     LOG(ERROR) << "Feedback mode not supported";
159     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
160   }
161
162   base::AutoLock auto_lock(lock_);
163   if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
164     LOG(ERROR) << "VDA is not initialized. state=" << state_;
165     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
166   }
167   // Create some shared memory if the queue is empty.
168   if (available_shm_segments_.size() == 0) {
169     vda_loop_proxy_->PostTask(FROM_HERE,
170                               base::Bind(&RTCVideoDecoder::CreateSHM,
171                                          weak_this_,
172                                          kMaxInFlightDecodes,
173                                          kSharedMemorySegmentBytes));
174   }
175   return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
176 }
177
178 int32_t RTCVideoDecoder::Decode(
179     const webrtc::EncodedImage& inputImage,
180     bool missingFrames,
181     const webrtc::RTPFragmentationHeader* /*fragmentation*/,
182     const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
183     int64_t /*renderTimeMs*/) {
184   DVLOG(3) << "Decode";
185
186   base::AutoLock auto_lock(lock_);
187
188   if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
189     LOG(ERROR) << "The decoder has not initialized.";
190     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
191   }
192
193   if (state_ == DECODE_ERROR) {
194     LOG(ERROR) << "Decoding error occurred.";
195     return WEBRTC_VIDEO_CODEC_ERROR;
196   }
197
198   if (missingFrames || !inputImage._completeFrame) {
199     DLOG(ERROR) << "Missing or incomplete frames.";
200     // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
201     // Return an error to request a key frame.
202     return WEBRTC_VIDEO_CODEC_ERROR;
203   }
204
205   // Most platforms' VDA implementations support mid-stream resolution change
206   // internally.  Platforms whose VDAs fail to support mid-stream resolution
207   // change gracefully need to have their clients cover for them, and we do that
208   // here.
209 #ifdef ANDROID
210   const bool kVDACanHandleMidstreamResize = false;
211 #else
212   const bool kVDACanHandleMidstreamResize = true;
213 #endif
214
215   bool need_to_reset_for_midstream_resize = false;
216   if (inputImage._frameType == webrtc::kKeyFrame) {
217     DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
218              << inputImage._encodedHeight;
219     gfx::Size prev_frame_size = frame_size_;
220     frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
221     if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
222         prev_frame_size != frame_size_) {
223       need_to_reset_for_midstream_resize = true;
224     }
225   } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
226                                      reset_bitstream_buffer_id_)) {
227     // TODO(wuchengli): VDA should handle it. Remove this when
228     // http://crosbug.com/p/21913 is fixed.
229     DVLOG(1) << "The first frame should be a key frame. Drop this.";
230     return WEBRTC_VIDEO_CODEC_ERROR;
231   }
232
233   // Create buffer metadata.
234   BufferData buffer_data(next_bitstream_buffer_id_,
235                          inputImage._timeStamp,
236                          frame_size_.width(),
237                          frame_size_.height(),
238                          inputImage._length);
239   // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
240   next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
241
242   // If a shared memory segment is available, there are no pending buffers, and
243   // this isn't a mid-stream resolution change, then send the buffer for decode
244   // immediately. Otherwise, save the buffer in the queue for later decode.
245   scoped_ptr<SHMBuffer> shm_buffer;
246   if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
247     shm_buffer = GetSHM_Locked(inputImage._length);
248   if (!shm_buffer) {
249     if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
250       return WEBRTC_VIDEO_CODEC_ERROR;
251     if (need_to_reset_for_midstream_resize) {
252       base::AutoUnlock auto_unlock(lock_);
253       Reset();
254     }
255     return WEBRTC_VIDEO_CODEC_OK;
256   }
257
258   SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
259   vda_loop_proxy_->PostTask(
260       FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_));
261   return WEBRTC_VIDEO_CODEC_OK;
262 }
263
264 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
265     webrtc::DecodedImageCallback* callback) {
266   DVLOG(2) << "RegisterDecodeCompleteCallback";
267   base::AutoLock auto_lock(lock_);
268   decode_complete_callback_ = callback;
269   return WEBRTC_VIDEO_CODEC_OK;
270 }
271
272 int32_t RTCVideoDecoder::Release() {
273   DVLOG(2) << "Release";
274   // Do not destroy VDA because WebRTC can call InitDecode and start decoding
275   // again.
276   return Reset();
277 }
278
279 int32_t RTCVideoDecoder::Reset() {
280   DVLOG(2) << "Reset";
281   base::AutoLock auto_lock(lock_);
282   if (state_ == UNINITIALIZED) {
283     LOG(ERROR) << "Decoder not initialized.";
284     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
285   }
286   if (next_bitstream_buffer_id_ != 0)
287     reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
288   else
289     reset_bitstream_buffer_id_ = ID_LAST;
290   // If VDA is already resetting, no need to request the reset again.
291   if (state_ != RESETTING) {
292     state_ = RESETTING;
293     vda_loop_proxy_->PostTask(
294         FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_));
295   }
296   return WEBRTC_VIDEO_CODEC_OK;
297 }
298
299 void RTCVideoDecoder::NotifyInitializeDone() {
300   DVLOG(2) << "NotifyInitializeDone";
301   NOTREACHED();
302 }
303
304 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
305                                             const gfx::Size& size,
306                                             uint32 texture_target) {
307   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
308   DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
309
310   if (!vda_)
311     return;
312
313   std::vector<uint32> texture_ids;
314   std::vector<gpu::Mailbox> texture_mailboxes;
315   decoder_texture_target_ = texture_target;
316   // Discards the sync point returned here since PictureReady will imply that
317   // the produce has already happened, and the texture is ready for use.
318   if (!factories_->CreateTextures(count,
319                                   size,
320                                   &texture_ids,
321                                   &texture_mailboxes,
322                                   decoder_texture_target_)) {
323     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
324     return;
325   }
326   DCHECK_EQ(count, texture_ids.size());
327   DCHECK_EQ(count, texture_mailboxes.size());
328
329   std::vector<media::PictureBuffer> picture_buffers;
330   for (size_t i = 0; i < texture_ids.size(); ++i) {
331     picture_buffers.push_back(media::PictureBuffer(
332         next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
333     bool inserted = assigned_picture_buffers_.insert(std::make_pair(
334         picture_buffers.back().id(), picture_buffers.back())).second;
335     DCHECK(inserted);
336   }
337   vda_->AssignPictureBuffers(picture_buffers);
338 }
339
340 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
341   DVLOG(3) << "DismissPictureBuffer. id=" << id;
342   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
343
344   std::map<int32, media::PictureBuffer>::iterator it =
345       assigned_picture_buffers_.find(id);
346   if (it == assigned_picture_buffers_.end()) {
347     NOTREACHED() << "Missing picture buffer: " << id;
348     return;
349   }
350
351   media::PictureBuffer buffer_to_dismiss = it->second;
352   assigned_picture_buffers_.erase(it);
353
354   std::set<int32>::iterator at_display_it =
355       picture_buffers_at_display_.find(id);
356
357   if (at_display_it == picture_buffers_at_display_.end()) {
358     // We can delete the texture immediately as it's not being displayed.
359     factories_->DeleteTexture(buffer_to_dismiss.texture_id());
360   } else {
361     // Texture in display. Postpone deletion until after it's returned to us.
362     bool inserted = dismissed_picture_buffers_
363         .insert(std::make_pair(id, buffer_to_dismiss)).second;
364     DCHECK(inserted);
365   }
366 }
367
368 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
369   DVLOG(3) << "PictureReady";
370   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
371
372   std::map<int32, media::PictureBuffer>::iterator it =
373       assigned_picture_buffers_.find(picture.picture_buffer_id());
374   if (it == assigned_picture_buffers_.end()) {
375     NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
376     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
377     return;
378   }
379   const media::PictureBuffer& pb = it->second;
380
381   // Create a media::VideoFrame.
382   uint32_t timestamp = 0, width = 0, height = 0;
383   size_t size = 0;
384   GetBufferData(
385       picture.bitstream_buffer_id(), &timestamp, &width, &height, &size);
386   scoped_refptr<media::VideoFrame> frame =
387       CreateVideoFrame(picture, pb, timestamp, width, height, size);
388   bool inserted =
389       picture_buffers_at_display_.insert(picture.picture_buffer_id()).second;
390   DCHECK(inserted);
391
392   // Create a WebRTC video frame.
393   webrtc::RefCountImpl<NativeHandleImpl>* handle =
394       new webrtc::RefCountImpl<NativeHandleImpl>(frame);
395   webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0);
396
397   // Invoke decode callback. WebRTC expects no callback after Reset or Release.
398   {
399     base::AutoLock auto_lock(lock_);
400     DCHECK(decode_complete_callback_ != NULL);
401     if (IsBufferAfterReset(picture.bitstream_buffer_id(),
402                            reset_bitstream_buffer_id_)) {
403       decode_complete_callback_->Decoded(decoded_image);
404     }
405   }
406 }
407
408 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
409     const media::Picture& picture,
410     const media::PictureBuffer& pb,
411     uint32_t timestamp,
412     uint32_t width,
413     uint32_t height,
414     size_t size) {
415   gfx::Rect visible_rect(width, height);
416   gfx::Size natural_size(width, height);
417   DCHECK(decoder_texture_target_);
418   // Convert timestamp from 90KHz to ms.
419   base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
420       base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90);
421   return media::VideoFrame::WrapNativeTexture(
422       new media::VideoFrame::MailboxHolder(
423           pb.texture_mailbox(),
424           0,  // sync_point
425           media::BindToCurrentLoop(
426               base::Bind(&RTCVideoDecoder::ReusePictureBuffer,
427                          weak_this_,
428                          picture.picture_buffer_id()))),
429       decoder_texture_target_,
430       pb.size(),
431       visible_rect,
432       natural_size,
433       timestamp_ms,
434       base::Bind(&media::GpuVideoAcceleratorFactories::ReadPixels,
435                  factories_,
436                  pb.texture_id(),
437                  natural_size),
438       base::Closure());
439 }
440
441 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
442   DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
443   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
444
445   std::map<int32, SHMBuffer*>::iterator it =
446       bitstream_buffers_in_decoder_.find(id);
447   if (it == bitstream_buffers_in_decoder_.end()) {
448     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
449     NOTREACHED() << "Missing bitstream buffer: " << id;
450     return;
451   }
452
453   {
454     base::AutoLock auto_lock(lock_);
455     PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
456   }
457   bitstream_buffers_in_decoder_.erase(it);
458
459   RequestBufferDecode();
460 }
461
462 void RTCVideoDecoder::NotifyFlushDone() {
463   DVLOG(3) << "NotifyFlushDone";
464   NOTREACHED() << "Unexpected flush done notification.";
465 }
466
467 void RTCVideoDecoder::NotifyResetDone() {
468   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
469   DVLOG(3) << "NotifyResetDone";
470
471   if (!vda_)
472     return;
473
474   input_buffer_data_.clear();
475   {
476     base::AutoLock auto_lock(lock_);
477     state_ = INITIALIZED;
478   }
479   // Send the pending buffers for decoding.
480   RequestBufferDecode();
481 }
482
483 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
484   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
485   if (!vda_)
486     return;
487
488   LOG(ERROR) << "VDA Error:" << error;
489   UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
490                             error,
491                             media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
492   DestroyVDA();
493
494   base::AutoLock auto_lock(lock_);
495   state_ = DECODE_ERROR;
496 }
497
498 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() {
499   DVLOG(2) << "WillDestroyCurrentMessageLoop";
500   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
501   factories_->Abort();
502   weak_factory_.InvalidateWeakPtrs();
503   DestroyVDA();
504 }
505
506 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) {
507   DVLOG(2) << "Initialize";
508   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
509   base::MessageLoop::current()->AddDestructionObserver(this);
510   waiter->Signal();
511 }
512
513 void RTCVideoDecoder::RequestBufferDecode() {
514   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
515   if (!vda_)
516     return;
517
518   MovePendingBuffersToDecodeBuffers();
519
520   while (CanMoreDecodeWorkBeDone()) {
521     // Get a buffer and data from the queue.
522     SHMBuffer* shm_buffer = NULL;
523     BufferData buffer_data;
524     {
525       base::AutoLock auto_lock(lock_);
526       // Do not request decode if VDA is resetting.
527       if (decode_buffers_.size() == 0 || state_ == RESETTING)
528         return;
529       shm_buffer = decode_buffers_.front().first;
530       buffer_data = decode_buffers_.front().second;
531       decode_buffers_.pop_front();
532       // Drop the buffers before Reset or Release is called.
533       if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
534                               reset_bitstream_buffer_id_)) {
535         PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
536         continue;
537       }
538     }
539
540     // Create a BitstreamBuffer and send to VDA to decode.
541     media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
542                                             shm_buffer->shm->handle(),
543                                             buffer_data.size);
544     bool inserted = bitstream_buffers_in_decoder_
545         .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
546     DCHECK(inserted);
547     RecordBufferData(buffer_data);
548     vda_->Decode(bitstream_buffer);
549   }
550 }
551
552 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
553   return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
554 }
555
556 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
557   if (id_reset == ID_INVALID)
558     return true;
559   int32 diff = id_buffer - id_reset;
560   if (diff <= 0)
561     diff += ID_LAST + 1;
562   return diff < ID_HALF;
563 }
564
565 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
566   if (id_reset == ID_INVALID)
567     return id_buffer == 0;
568   return id_buffer == ((id_reset + 1) & ID_LAST);
569 }
570
571 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
572     const webrtc::EncodedImage& input_image,
573     scoped_ptr<SHMBuffer> shm_buffer,
574     const BufferData& buffer_data) {
575   memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
576   std::pair<SHMBuffer*, BufferData> buffer_pair =
577       std::make_pair(shm_buffer.release(), buffer_data);
578
579   // Store the buffer and the metadata to the queue.
580   decode_buffers_.push_back(buffer_pair);
581 }
582
583 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
584     const webrtc::EncodedImage& input_image,
585     const BufferData& buffer_data) {
586   DVLOG(2) << "SaveToPendingBuffers_Locked"
587            << ". pending_buffers size=" << pending_buffers_.size()
588            << ". decode_buffers_ size=" << decode_buffers_.size()
589            << ". available_shm size=" << available_shm_segments_.size();
590   // Queued too many buffers. Something goes wrong.
591   if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
592     LOG(WARNING) << "Too many pending buffers!";
593     return false;
594   }
595
596   // Clone the input image and save it to the queue.
597   uint8_t* buffer = new uint8_t[input_image._length];
598   // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
599   // interface to take a non-const ptr to the frame and add a method to the
600   // frame that will swap buffers with another.
601   memcpy(buffer, input_image._buffer, input_image._length);
602   webrtc::EncodedImage encoded_image(
603       buffer, input_image._length, input_image._length);
604   std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
605       std::make_pair(encoded_image, buffer_data);
606
607   pending_buffers_.push_back(buffer_pair);
608   return true;
609 }
610
611 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
612   base::AutoLock auto_lock(lock_);
613   while (pending_buffers_.size() > 0) {
614     // Get a pending buffer from the queue.
615     const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
616     const BufferData& buffer_data = pending_buffers_.front().second;
617
618     // Drop the frame if it comes before Reset or Release.
619     if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
620                             reset_bitstream_buffer_id_)) {
621       delete[] input_image._buffer;
622       pending_buffers_.pop_front();
623       continue;
624     }
625     // Get shared memory and save it to decode buffers.
626     scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
627     if (!shm_buffer)
628       return;
629     SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
630     delete[] input_image._buffer;
631     pending_buffers_.pop_front();
632   }
633 }
634
635 void RTCVideoDecoder::ResetInternal() {
636   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
637   DVLOG(2) << "ResetInternal";
638   if (vda_)
639     vda_->Reset();
640 }
641
642 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id,
643                                          uint32 sync_point) {
644   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
645   DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
646
647   if (!vda_)
648     return;
649
650   CHECK(!picture_buffers_at_display_.empty());
651
652   size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id);
653   DCHECK(num_erased);
654
655   std::map<int32, media::PictureBuffer>::iterator it =
656       assigned_picture_buffers_.find(picture_buffer_id);
657
658   if (it == assigned_picture_buffers_.end()) {
659     // This picture was dismissed while in display, so we postponed deletion.
660     it = dismissed_picture_buffers_.find(picture_buffer_id);
661     DCHECK(it != dismissed_picture_buffers_.end());
662     factories_->DeleteTexture(it->second.texture_id());
663     dismissed_picture_buffers_.erase(it);
664     return;
665   }
666
667   factories_->WaitSyncPoint(sync_point);
668
669   vda_->ReusePictureBuffer(picture_buffer_id);
670 }
671
672 void RTCVideoDecoder::DestroyTextures() {
673   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
674   std::map<int32, media::PictureBuffer>::iterator it;
675
676   for (it = assigned_picture_buffers_.begin();
677        it != assigned_picture_buffers_.end();
678        ++it) {
679     factories_->DeleteTexture(it->second.texture_id());
680   }
681   assigned_picture_buffers_.clear();
682
683   for (it = dismissed_picture_buffers_.begin();
684        it != dismissed_picture_buffers_.end();
685        ++it) {
686     factories_->DeleteTexture(it->second.texture_id());
687   }
688   dismissed_picture_buffers_.clear();
689 }
690
691 void RTCVideoDecoder::DestroyVDA() {
692   DVLOG(2) << "DestroyVDA";
693   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
694   if (vda_)
695     vda_.release()->Destroy();
696   DestroyTextures();
697   base::AutoLock auto_lock(lock_);
698   state_ = UNINITIALIZED;
699 }
700
701 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
702     size_t min_size) {
703   // Reuse a SHM if possible.
704   SHMBuffer* ret = NULL;
705   if (!available_shm_segments_.empty() &&
706       available_shm_segments_.back()->size >= min_size) {
707     ret = available_shm_segments_.back();
708     available_shm_segments_.pop_back();
709   }
710   // Post to vda thread to create shared memory if SHM cannot be reused or the
711   // queue is almost empty.
712   if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
713       (ret == NULL || available_shm_segments_.size() <= 1)) {
714     vda_loop_proxy_->PostTask(
715         FROM_HERE,
716         base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, 1, min_size));
717   }
718   return scoped_ptr<SHMBuffer>(ret);
719 }
720
721 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
722   available_shm_segments_.push_back(shm_buffer.release());
723 }
724
725 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
726   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
727   DVLOG(2) << "CreateSHM. size=" << min_size;
728   int number_to_allocate;
729   {
730     base::AutoLock auto_lock(lock_);
731     number_to_allocate =
732         std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
733   }
734   size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
735   for (int i = 0; i < number_to_allocate; i++) {
736     base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
737     if (shm != NULL) {
738       base::AutoLock auto_lock(lock_);
739       num_shm_buffers_++;
740       PutSHM_Locked(
741           scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
742     }
743   }
744   // Kick off the decoding.
745   RequestBufferDecode();
746 }
747
748 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
749   input_buffer_data_.push_front(buffer_data);
750   // Why this value?  Because why not.  avformat.h:MAX_REORDER_DELAY is 16, but
751   // that's too small for some pathological B-frame test videos.  The cost of
752   // using too-high a value is low (192 bits per extra slot).
753   static const size_t kMaxInputBufferDataSize = 128;
754   // Pop from the back of the list, because that's the oldest and least likely
755   // to be useful in the future data.
756   if (input_buffer_data_.size() > kMaxInputBufferDataSize)
757     input_buffer_data_.pop_back();
758 }
759
760 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
761                                     uint32_t* timestamp,
762                                     uint32_t* width,
763                                     uint32_t* height,
764                                     size_t* size) {
765   for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
766        it != input_buffer_data_.end();
767        ++it) {
768     if (it->bitstream_buffer_id != bitstream_buffer_id)
769       continue;
770     *timestamp = it->timestamp;
771     *width = it->width;
772     *height = it->height;
773     return;
774   }
775   NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
776 }
777
778 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
779   // Logging boolean is enough to know if HW decoding has been used. Also,
780   // InitDecode is less likely to return an error so enum is not used here.
781   bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
782   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
783   return status;
784 }
785
786 }  // namespace content