Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / rtc_video_decoder.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/rtc_video_decoder.h"
6
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/numerics/safe_conversions.h"
13 #include "base/stl_util.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/task_runner_util.h"
16 #include "content/child/child_thread.h"
17 #include "content/renderer/media/native_handle_impl.h"
18 #include "gpu/command_buffer/common/mailbox_holder.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/filters/gpu_video_accelerator_factories.h"
21 #include "third_party/skia/include/core/SkBitmap.h"
22 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
23 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
24
25 namespace content {
26
27 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
28 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
29 const int32 RTCVideoDecoder::ID_INVALID = -1;
30
31 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
32 // Higher values allow better pipelining in the GPU, but also require more
33 // resources.
34 static const size_t kMaxInFlightDecodes = 8;
35
36 // Size of shared-memory segments we allocate.  Since we reuse them we let them
37 // be on the beefy side.
38 static const size_t kSharedMemorySegmentBytes = 100 << 10;
39
40 // Maximum number of allocated shared-memory segments.
41 static const int kMaxNumSharedMemorySegments = 16;
42
43 // Maximum number of pending WebRTC buffers that are waiting for the shared
44 // memory. 10 seconds for 30 fps.
45 static const size_t kMaxNumOfPendingBuffers = 300;
46
47 // A shared memory segment and its allocated size. This class has the ownership
48 // of |shm|.
49 class RTCVideoDecoder::SHMBuffer {
50  public:
51   SHMBuffer(base::SharedMemory* shm, size_t size);
52   ~SHMBuffer();
53   base::SharedMemory* const shm;
54   const size_t size;
55 };
56
57 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
58     : shm(shm), size(size) {}
59
60 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
61
62 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
63                                         uint32_t timestamp,
64                                         int width,
65                                         int height,
66                                         size_t size)
67     : bitstream_buffer_id(bitstream_buffer_id),
68       timestamp(timestamp),
69       width(width),
70       height(height),
71       size(size) {}
72
73 RTCVideoDecoder::BufferData::BufferData() {}
74
75 RTCVideoDecoder::BufferData::~BufferData() {}
76
77 RTCVideoDecoder::RTCVideoDecoder(
78     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
79     : factories_(factories),
80       decoder_texture_target_(0),
81       next_picture_buffer_id_(0),
82       state_(UNINITIALIZED),
83       decode_complete_callback_(NULL),
84       num_shm_buffers_(0),
85       next_bitstream_buffer_id_(0),
86       reset_bitstream_buffer_id_(ID_INVALID),
87       weak_factory_(this) {
88   DCHECK(!factories_->GetTaskRunner()->BelongsToCurrentThread());
89 }
90
91 RTCVideoDecoder::~RTCVideoDecoder() {
92   DVLOG(2) << "~RTCVideoDecoder";
93   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
94   DestroyVDA();
95
96   // Delete all shared memories.
97   STLDeleteElements(&available_shm_segments_);
98   STLDeleteValues(&bitstream_buffers_in_decoder_);
99   STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
100                                       decode_buffers_.end());
101   decode_buffers_.clear();
102
103   // Delete WebRTC input buffers.
104   for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
105            pending_buffers_.begin();
106        it != pending_buffers_.end();
107        ++it) {
108     delete[] it->first._buffer;
109   }
110 }
111
112 // static
113 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
114     webrtc::VideoCodecType type,
115     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
116   scoped_ptr<RTCVideoDecoder> decoder;
117   // Convert WebRTC codec type to media codec profile.
118   media::VideoCodecProfile profile;
119   switch (type) {
120     case webrtc::kVideoCodecVP8:
121       profile = media::VP8PROFILE_MAIN;
122       break;
123     default:
124       DVLOG(2) << "Video codec not supported:" << type;
125       return decoder.Pass();
126   }
127
128   base::WaitableEvent waiter(true, false);
129   decoder.reset(new RTCVideoDecoder(factories));
130   decoder->factories_->GetTaskRunner()->PostTask(
131       FROM_HERE,
132       base::Bind(&RTCVideoDecoder::CreateVDA,
133                  base::Unretained(decoder.get()),
134                  profile,
135                  &waiter));
136   waiter.Wait();
137   // vda can be NULL if VP8 is not supported.
138   if (decoder->vda_ != NULL) {
139     decoder->state_ = INITIALIZED;
140   } else {
141     factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release());
142   }
143   return decoder.Pass();
144 }
145
146 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
147                                     int32_t /*numberOfCores*/) {
148   DVLOG(2) << "InitDecode";
149   DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8);
150   if (codecSettings->codecSpecific.VP8.feedbackModeOn) {
151     LOG(ERROR) << "Feedback mode not supported";
152     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
153   }
154
155   base::AutoLock auto_lock(lock_);
156   if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
157     LOG(ERROR) << "VDA is not initialized. state=" << state_;
158     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
159   }
160   // Create some shared memory if the queue is empty.
161   if (available_shm_segments_.size() == 0) {
162     factories_->GetTaskRunner()->PostTask(
163         FROM_HERE,
164         base::Bind(&RTCVideoDecoder::CreateSHM,
165                    weak_factory_.GetWeakPtr(),
166                    kMaxInFlightDecodes,
167                    kSharedMemorySegmentBytes));
168   }
169   return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
170 }
171
172 int32_t RTCVideoDecoder::Decode(
173     const webrtc::EncodedImage& inputImage,
174     bool missingFrames,
175     const webrtc::RTPFragmentationHeader* /*fragmentation*/,
176     const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
177     int64_t /*renderTimeMs*/) {
178   DVLOG(3) << "Decode";
179
180   base::AutoLock auto_lock(lock_);
181
182   if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
183     LOG(ERROR) << "The decoder has not initialized.";
184     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
185   }
186
187   if (state_ == DECODE_ERROR) {
188     LOG(ERROR) << "Decoding error occurred.";
189     return WEBRTC_VIDEO_CODEC_ERROR;
190   }
191
192   if (missingFrames || !inputImage._completeFrame) {
193     DLOG(ERROR) << "Missing or incomplete frames.";
194     // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
195     // Return an error to request a key frame.
196     return WEBRTC_VIDEO_CODEC_ERROR;
197   }
198
199   // Most platforms' VDA implementations support mid-stream resolution change
200   // internally.  Platforms whose VDAs fail to support mid-stream resolution
201   // change gracefully need to have their clients cover for them, and we do that
202   // here.
203 #ifdef ANDROID
204   const bool kVDACanHandleMidstreamResize = false;
205 #else
206   const bool kVDACanHandleMidstreamResize = true;
207 #endif
208
209   bool need_to_reset_for_midstream_resize = false;
210   if (inputImage._frameType == webrtc::kKeyFrame) {
211     DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
212              << inputImage._encodedHeight;
213     gfx::Size prev_frame_size = frame_size_;
214     frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
215     if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
216         prev_frame_size != frame_size_) {
217       need_to_reset_for_midstream_resize = true;
218     }
219   } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
220                                      reset_bitstream_buffer_id_)) {
221     // TODO(wuchengli): VDA should handle it. Remove this when
222     // http://crosbug.com/p/21913 is fixed.
223     DVLOG(1) << "The first frame should be a key frame. Drop this.";
224     return WEBRTC_VIDEO_CODEC_ERROR;
225   }
226
227   // Create buffer metadata.
228   BufferData buffer_data(next_bitstream_buffer_id_,
229                          inputImage._timeStamp,
230                          frame_size_.width(),
231                          frame_size_.height(),
232                          inputImage._length);
233   // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
234   next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
235
236   // If a shared memory segment is available, there are no pending buffers, and
237   // this isn't a mid-stream resolution change, then send the buffer for decode
238   // immediately. Otherwise, save the buffer in the queue for later decode.
239   scoped_ptr<SHMBuffer> shm_buffer;
240   if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
241     shm_buffer = GetSHM_Locked(inputImage._length);
242   if (!shm_buffer) {
243     if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
244       return WEBRTC_VIDEO_CODEC_ERROR;
245     if (need_to_reset_for_midstream_resize) {
246       base::AutoUnlock auto_unlock(lock_);
247       Reset();
248     }
249     return WEBRTC_VIDEO_CODEC_OK;
250   }
251
252   SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
253   factories_->GetTaskRunner()->PostTask(
254       FROM_HERE,
255       base::Bind(&RTCVideoDecoder::RequestBufferDecode,
256                  weak_factory_.GetWeakPtr()));
257   return WEBRTC_VIDEO_CODEC_OK;
258 }
259
260 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
261     webrtc::DecodedImageCallback* callback) {
262   DVLOG(2) << "RegisterDecodeCompleteCallback";
263   base::AutoLock auto_lock(lock_);
264   decode_complete_callback_ = callback;
265   return WEBRTC_VIDEO_CODEC_OK;
266 }
267
268 int32_t RTCVideoDecoder::Release() {
269   DVLOG(2) << "Release";
270   // Do not destroy VDA because WebRTC can call InitDecode and start decoding
271   // again.
272   return Reset();
273 }
274
275 int32_t RTCVideoDecoder::Reset() {
276   DVLOG(2) << "Reset";
277   base::AutoLock auto_lock(lock_);
278   if (state_ == UNINITIALIZED) {
279     LOG(ERROR) << "Decoder not initialized.";
280     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
281   }
282   if (next_bitstream_buffer_id_ != 0)
283     reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
284   else
285     reset_bitstream_buffer_id_ = ID_LAST;
286   // If VDA is already resetting, no need to request the reset again.
287   if (state_ != RESETTING) {
288     state_ = RESETTING;
289     factories_->GetTaskRunner()->PostTask(
290         FROM_HERE,
291         base::Bind(&RTCVideoDecoder::ResetInternal,
292                    weak_factory_.GetWeakPtr()));
293   }
294   return WEBRTC_VIDEO_CODEC_OK;
295 }
296
297 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
298                                             const gfx::Size& size,
299                                             uint32 texture_target) {
300   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
301   DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
302
303   if (!vda_)
304     return;
305
306   std::vector<uint32> texture_ids;
307   std::vector<gpu::Mailbox> texture_mailboxes;
308   decoder_texture_target_ = texture_target;
309   if (!factories_->CreateTextures(count,
310                                   size,
311                                   &texture_ids,
312                                   &texture_mailboxes,
313                                   decoder_texture_target_)) {
314     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
315     return;
316   }
317   DCHECK_EQ(count, texture_ids.size());
318   DCHECK_EQ(count, texture_mailboxes.size());
319
320   std::vector<media::PictureBuffer> picture_buffers;
321   for (size_t i = 0; i < texture_ids.size(); ++i) {
322     picture_buffers.push_back(media::PictureBuffer(
323         next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
324     bool inserted = assigned_picture_buffers_.insert(std::make_pair(
325         picture_buffers.back().id(), picture_buffers.back())).second;
326     DCHECK(inserted);
327   }
328   vda_->AssignPictureBuffers(picture_buffers);
329 }
330
331 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
332   DVLOG(3) << "DismissPictureBuffer. id=" << id;
333   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
334
335   std::map<int32, media::PictureBuffer>::iterator it =
336       assigned_picture_buffers_.find(id);
337   if (it == assigned_picture_buffers_.end()) {
338     NOTREACHED() << "Missing picture buffer: " << id;
339     return;
340   }
341
342   media::PictureBuffer buffer_to_dismiss = it->second;
343   assigned_picture_buffers_.erase(it);
344
345   if (!picture_buffers_at_display_.count(id)) {
346     // We can delete the texture immediately as it's not being displayed.
347     factories_->DeleteTexture(buffer_to_dismiss.texture_id());
348     return;
349   }
350   // Not destroying a texture in display in |picture_buffers_at_display_|.
351   // Postpone deletion until after it's returned to us.
352 }
353
354 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
355   DVLOG(3) << "PictureReady";
356   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
357
358   std::map<int32, media::PictureBuffer>::iterator it =
359       assigned_picture_buffers_.find(picture.picture_buffer_id());
360   if (it == assigned_picture_buffers_.end()) {
361     NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
362     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
363     return;
364   }
365   const media::PictureBuffer& pb = it->second;
366
367   // Create a media::VideoFrame.
368   uint32_t timestamp = 0, width = 0, height = 0;
369   size_t size = 0;
370   GetBufferData(
371       picture.bitstream_buffer_id(), &timestamp, &width, &height, &size);
372   scoped_refptr<media::VideoFrame> frame =
373       CreateVideoFrame(picture, pb, timestamp, width, height, size);
374   bool inserted =
375       picture_buffers_at_display_.insert(std::make_pair(
376                                              picture.picture_buffer_id(),
377                                              pb.texture_id())).second;
378   DCHECK(inserted);
379
380   // Create a WebRTC video frame.
381   webrtc::RefCountImpl<NativeHandleImpl>* handle =
382       new webrtc::RefCountImpl<NativeHandleImpl>(frame);
383   webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0);
384
385   // Invoke decode callback. WebRTC expects no callback after Reset or Release.
386   {
387     base::AutoLock auto_lock(lock_);
388     DCHECK(decode_complete_callback_ != NULL);
389     if (IsBufferAfterReset(picture.bitstream_buffer_id(),
390                            reset_bitstream_buffer_id_)) {
391       decode_complete_callback_->Decoded(decoded_image);
392     }
393   }
394 }
395
396 static void ReadPixelsSyncInner(
397     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
398     uint32 texture_id,
399     const gfx::Rect& visible_rect,
400     const SkBitmap& pixels,
401     base::WaitableEvent* event) {
402   factories->ReadPixels(texture_id, visible_rect, pixels);
403   event->Signal();
404 }
405
406 static void ReadPixelsSync(
407     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
408     uint32 texture_id,
409     const gfx::Rect& visible_rect,
410     const SkBitmap& pixels) {
411   base::WaitableEvent event(true, false);
412   if (!factories->GetTaskRunner()->PostTask(FROM_HERE,
413                                             base::Bind(&ReadPixelsSyncInner,
414                                                        factories,
415                                                        texture_id,
416                                                        visible_rect,
417                                                        pixels,
418                                                        &event)))
419     return;
420   event.Wait();
421 }
422
423 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
424     const media::Picture& picture,
425     const media::PictureBuffer& pb,
426     uint32_t timestamp,
427     uint32_t width,
428     uint32_t height,
429     size_t size) {
430   gfx::Rect visible_rect(width, height);
431   DCHECK(decoder_texture_target_);
432   // Convert timestamp from 90KHz to ms.
433   base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
434       base::checked_cast<uint64_t>(timestamp) * 1000 / 90);
435   return media::VideoFrame::WrapNativeTexture(
436       make_scoped_ptr(new gpu::MailboxHolder(
437           pb.texture_mailbox(), decoder_texture_target_, 0)),
438       media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReleaseMailbox,
439                                           weak_factory_.GetWeakPtr(),
440                                           factories_,
441                                           picture.picture_buffer_id(),
442                                           pb.texture_id())),
443       pb.size(),
444       visible_rect,
445       visible_rect.size(),
446       timestamp_ms,
447       base::Bind(&ReadPixelsSync, factories_, pb.texture_id(), visible_rect));
448 }
449
450 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
451   DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
452   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
453
454   std::map<int32, SHMBuffer*>::iterator it =
455       bitstream_buffers_in_decoder_.find(id);
456   if (it == bitstream_buffers_in_decoder_.end()) {
457     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
458     NOTREACHED() << "Missing bitstream buffer: " << id;
459     return;
460   }
461
462   {
463     base::AutoLock auto_lock(lock_);
464     PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
465   }
466   bitstream_buffers_in_decoder_.erase(it);
467
468   RequestBufferDecode();
469 }
470
471 void RTCVideoDecoder::NotifyFlushDone() {
472   DVLOG(3) << "NotifyFlushDone";
473   NOTREACHED() << "Unexpected flush done notification.";
474 }
475
476 void RTCVideoDecoder::NotifyResetDone() {
477   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
478   DVLOG(3) << "NotifyResetDone";
479
480   if (!vda_)
481     return;
482
483   input_buffer_data_.clear();
484   {
485     base::AutoLock auto_lock(lock_);
486     state_ = INITIALIZED;
487   }
488   // Send the pending buffers for decoding.
489   RequestBufferDecode();
490 }
491
492 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
493   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
494   if (!vda_)
495     return;
496
497   LOG(ERROR) << "VDA Error:" << error;
498   UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
499                             error,
500                             media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
501   DestroyVDA();
502
503   base::AutoLock auto_lock(lock_);
504   state_ = DECODE_ERROR;
505 }
506
507 void RTCVideoDecoder::RequestBufferDecode() {
508   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
509   if (!vda_)
510     return;
511
512   MovePendingBuffersToDecodeBuffers();
513
514   while (CanMoreDecodeWorkBeDone()) {
515     // Get a buffer and data from the queue.
516     SHMBuffer* shm_buffer = NULL;
517     BufferData buffer_data;
518     {
519       base::AutoLock auto_lock(lock_);
520       // Do not request decode if VDA is resetting.
521       if (decode_buffers_.size() == 0 || state_ == RESETTING)
522         return;
523       shm_buffer = decode_buffers_.front().first;
524       buffer_data = decode_buffers_.front().second;
525       decode_buffers_.pop_front();
526       // Drop the buffers before Reset or Release is called.
527       if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
528                               reset_bitstream_buffer_id_)) {
529         PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
530         continue;
531       }
532     }
533
534     // Create a BitstreamBuffer and send to VDA to decode.
535     media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
536                                             shm_buffer->shm->handle(),
537                                             buffer_data.size);
538     bool inserted = bitstream_buffers_in_decoder_
539         .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
540     DCHECK(inserted);
541     RecordBufferData(buffer_data);
542     vda_->Decode(bitstream_buffer);
543   }
544 }
545
546 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
547   return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
548 }
549
550 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
551   if (id_reset == ID_INVALID)
552     return true;
553   int32 diff = id_buffer - id_reset;
554   if (diff <= 0)
555     diff += ID_LAST + 1;
556   return diff < ID_HALF;
557 }
558
559 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
560   if (id_reset == ID_INVALID)
561     return id_buffer == 0;
562   return id_buffer == ((id_reset + 1) & ID_LAST);
563 }
564
565 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
566     const webrtc::EncodedImage& input_image,
567     scoped_ptr<SHMBuffer> shm_buffer,
568     const BufferData& buffer_data) {
569   memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
570   std::pair<SHMBuffer*, BufferData> buffer_pair =
571       std::make_pair(shm_buffer.release(), buffer_data);
572
573   // Store the buffer and the metadata to the queue.
574   decode_buffers_.push_back(buffer_pair);
575 }
576
577 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
578     const webrtc::EncodedImage& input_image,
579     const BufferData& buffer_data) {
580   DVLOG(2) << "SaveToPendingBuffers_Locked"
581            << ". pending_buffers size=" << pending_buffers_.size()
582            << ". decode_buffers_ size=" << decode_buffers_.size()
583            << ". available_shm size=" << available_shm_segments_.size();
584   // Queued too many buffers. Something goes wrong.
585   if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
586     LOG(WARNING) << "Too many pending buffers!";
587     return false;
588   }
589
590   // Clone the input image and save it to the queue.
591   uint8_t* buffer = new uint8_t[input_image._length];
592   // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
593   // interface to take a non-const ptr to the frame and add a method to the
594   // frame that will swap buffers with another.
595   memcpy(buffer, input_image._buffer, input_image._length);
596   webrtc::EncodedImage encoded_image(
597       buffer, input_image._length, input_image._length);
598   std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
599       std::make_pair(encoded_image, buffer_data);
600
601   pending_buffers_.push_back(buffer_pair);
602   return true;
603 }
604
605 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
606   base::AutoLock auto_lock(lock_);
607   while (pending_buffers_.size() > 0) {
608     // Get a pending buffer from the queue.
609     const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
610     const BufferData& buffer_data = pending_buffers_.front().second;
611
612     // Drop the frame if it comes before Reset or Release.
613     if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
614                             reset_bitstream_buffer_id_)) {
615       delete[] input_image._buffer;
616       pending_buffers_.pop_front();
617       continue;
618     }
619     // Get shared memory and save it to decode buffers.
620     scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
621     if (!shm_buffer)
622       return;
623     SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
624     delete[] input_image._buffer;
625     pending_buffers_.pop_front();
626   }
627 }
628
629 void RTCVideoDecoder::ResetInternal() {
630   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
631   DVLOG(2) << "ResetInternal";
632   if (vda_)
633     vda_->Reset();
634 }
635
636 // static
637 void RTCVideoDecoder::ReleaseMailbox(
638     base::WeakPtr<RTCVideoDecoder> decoder,
639     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
640     int64 picture_buffer_id,
641     uint32 texture_id,
642     const std::vector<uint32>& release_sync_points) {
643   DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
644
645   for (size_t i = 0; i < release_sync_points.size(); i++)
646     factories->WaitSyncPoint(release_sync_points[i]);
647
648   if (decoder) {
649     decoder->ReusePictureBuffer(picture_buffer_id);
650     return;
651   }
652   // It's the last chance to delete the texture after display,
653   // because RTCVideoDecoder was destructed.
654   factories->DeleteTexture(texture_id);
655 }
656
657 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) {
658   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
659   DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
660
661   DCHECK(!picture_buffers_at_display_.empty());
662   PictureBufferTextureMap::iterator display_iterator =
663       picture_buffers_at_display_.find(picture_buffer_id);
664   uint32 texture_id = display_iterator->second;
665   DCHECK(display_iterator != picture_buffers_at_display_.end());
666   picture_buffers_at_display_.erase(display_iterator);
667
668   if (!assigned_picture_buffers_.count(picture_buffer_id)) {
669     // This picture was dismissed while in display, so we postponed deletion.
670     factories_->DeleteTexture(texture_id);
671     return;
672   }
673
674   // DestroyVDA() might already have been called.
675   if (vda_)
676     vda_->ReusePictureBuffer(picture_buffer_id);
677 }
678
679 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile,
680                                 base::WaitableEvent* waiter) {
681   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
682   vda_ = factories_->CreateVideoDecodeAccelerator();
683   if (vda_ && !vda_->Initialize(profile, this))
684     vda_.release()->Destroy();
685   waiter->Signal();
686 }
687
688 void RTCVideoDecoder::DestroyTextures() {
689   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
690
691   // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
692   // their textures may still be in use by the user of this RTCVideoDecoder.
693   for (PictureBufferTextureMap::iterator it =
694            picture_buffers_at_display_.begin();
695        it != picture_buffers_at_display_.end();
696        ++it) {
697     assigned_picture_buffers_.erase(it->first);
698   }
699
700   for (std::map<int32, media::PictureBuffer>::iterator it =
701            assigned_picture_buffers_.begin();
702        it != assigned_picture_buffers_.end();
703        ++it) {
704     factories_->DeleteTexture(it->second.texture_id());
705   }
706   assigned_picture_buffers_.clear();
707 }
708
709 void RTCVideoDecoder::DestroyVDA() {
710   DVLOG(2) << "DestroyVDA";
711   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
712   if (vda_)
713     vda_.release()->Destroy();
714   DestroyTextures();
715   base::AutoLock auto_lock(lock_);
716   state_ = UNINITIALIZED;
717 }
718
719 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
720     size_t min_size) {
721   // Reuse a SHM if possible.
722   SHMBuffer* ret = NULL;
723   if (!available_shm_segments_.empty() &&
724       available_shm_segments_.back()->size >= min_size) {
725     ret = available_shm_segments_.back();
726     available_shm_segments_.pop_back();
727   }
728   // Post to vda thread to create shared memory if SHM cannot be reused or the
729   // queue is almost empty.
730   if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
731       (ret == NULL || available_shm_segments_.size() <= 1)) {
732     factories_->GetTaskRunner()->PostTask(
733         FROM_HERE,
734         base::Bind(&RTCVideoDecoder::CreateSHM,
735                    weak_factory_.GetWeakPtr(),
736                    1,
737                    min_size));
738   }
739   return scoped_ptr<SHMBuffer>(ret);
740 }
741
742 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
743   available_shm_segments_.push_back(shm_buffer.release());
744 }
745
746 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
747   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
748   DVLOG(2) << "CreateSHM. size=" << min_size;
749   int number_to_allocate;
750   {
751     base::AutoLock auto_lock(lock_);
752     number_to_allocate =
753         std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
754   }
755   size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
756   for (int i = 0; i < number_to_allocate; i++) {
757     base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
758     if (shm != NULL) {
759       base::AutoLock auto_lock(lock_);
760       num_shm_buffers_++;
761       PutSHM_Locked(
762           scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
763     }
764   }
765   // Kick off the decoding.
766   RequestBufferDecode();
767 }
768
769 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
770   input_buffer_data_.push_front(buffer_data);
771   // Why this value?  Because why not.  avformat.h:MAX_REORDER_DELAY is 16, but
772   // that's too small for some pathological B-frame test videos.  The cost of
773   // using too-high a value is low (192 bits per extra slot).
774   static const size_t kMaxInputBufferDataSize = 128;
775   // Pop from the back of the list, because that's the oldest and least likely
776   // to be useful in the future data.
777   if (input_buffer_data_.size() > kMaxInputBufferDataSize)
778     input_buffer_data_.pop_back();
779 }
780
781 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
782                                     uint32_t* timestamp,
783                                     uint32_t* width,
784                                     uint32_t* height,
785                                     size_t* size) {
786   for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
787        it != input_buffer_data_.end();
788        ++it) {
789     if (it->bitstream_buffer_id != bitstream_buffer_id)
790       continue;
791     *timestamp = it->timestamp;
792     *width = it->width;
793     *height = it->height;
794     return;
795   }
796   NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
797 }
798
799 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
800   // Logging boolean is enough to know if HW decoding has been used. Also,
801   // InitDecode is less likely to return an error so enum is not used here.
802   bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
803   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
804   return status;
805 }
806
807 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
808     const {
809   DCHECK(factories_->GetTaskRunner()->BelongsToCurrentThread());
810 }
811
812 }  // namespace content