1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_decoder.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/numerics/safe_conversions.h"
13 #include "base/stl_util.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/task_runner_util.h"
16 #include "content/child/child_thread.h"
17 #include "content/renderer/media/native_handle_impl.h"
18 #include "gpu/command_buffer/common/mailbox_holder.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/filters/gpu_video_accelerator_factories.h"
21 #include "third_party/skia/include/core/SkBitmap.h"
22 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
23 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
27 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
28 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
29 const int32 RTCVideoDecoder::ID_INVALID = -1;
31 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
32 // Higher values allow better pipelining in the GPU, but also require more
34 static const size_t kMaxInFlightDecodes = 8;
36 // Size of shared-memory segments we allocate. Since we reuse them we let them
37 // be on the beefy side.
38 static const size_t kSharedMemorySegmentBytes = 100 << 10;
40 // Maximum number of allocated shared-memory segments.
41 static const int kMaxNumSharedMemorySegments = 16;
43 // Maximum number of pending WebRTC buffers that are waiting for the shared
44 // memory. 10 seconds for 30 fps.
45 static const size_t kMaxNumOfPendingBuffers = 300;
47 // A shared memory segment and its allocated size. This class has the ownership
49 class RTCVideoDecoder::SHMBuffer {
51 SHMBuffer(base::SharedMemory* shm, size_t size);
53 base::SharedMemory* const shm;
57 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
58 : shm(shm), size(size) {}
60 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
62 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
67 : bitstream_buffer_id(bitstream_buffer_id),
73 RTCVideoDecoder::BufferData::BufferData() {}
75 RTCVideoDecoder::BufferData::~BufferData() {}
77 RTCVideoDecoder::RTCVideoDecoder(
78 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
79 : factories_(factories),
80 decoder_texture_target_(0),
81 next_picture_buffer_id_(0),
82 state_(UNINITIALIZED),
83 decode_complete_callback_(NULL),
85 next_bitstream_buffer_id_(0),
86 reset_bitstream_buffer_id_(ID_INVALID),
88 DCHECK(!factories_->GetTaskRunner()->BelongsToCurrentThread());
91 RTCVideoDecoder::~RTCVideoDecoder() {
92 DVLOG(2) << "~RTCVideoDecoder";
93 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
96 // Delete all shared memories.
97 STLDeleteElements(&available_shm_segments_);
98 STLDeleteValues(&bitstream_buffers_in_decoder_);
99 STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
100 decode_buffers_.end());
101 decode_buffers_.clear();
103 // Delete WebRTC input buffers.
104 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
105 pending_buffers_.begin();
106 it != pending_buffers_.end();
108 delete[] it->first._buffer;
113 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
114 webrtc::VideoCodecType type,
115 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
116 scoped_ptr<RTCVideoDecoder> decoder;
117 // Convert WebRTC codec type to media codec profile.
118 media::VideoCodecProfile profile;
120 case webrtc::kVideoCodecVP8:
121 profile = media::VP8PROFILE_MAIN;
124 DVLOG(2) << "Video codec not supported:" << type;
125 return decoder.Pass();
128 base::WaitableEvent waiter(true, false);
129 decoder.reset(new RTCVideoDecoder(factories));
130 decoder->factories_->GetTaskRunner()->PostTask(
132 base::Bind(&RTCVideoDecoder::CreateVDA,
133 base::Unretained(decoder.get()),
137 // vda can be NULL if VP8 is not supported.
138 if (decoder->vda_ != NULL) {
139 decoder->state_ = INITIALIZED;
141 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release());
143 return decoder.Pass();
146 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
147 int32_t /*numberOfCores*/) {
148 DVLOG(2) << "InitDecode";
149 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8);
150 if (codecSettings->codecSpecific.VP8.feedbackModeOn) {
151 LOG(ERROR) << "Feedback mode not supported";
152 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
155 base::AutoLock auto_lock(lock_);
156 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
157 LOG(ERROR) << "VDA is not initialized. state=" << state_;
158 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
160 // Create some shared memory if the queue is empty.
161 if (available_shm_segments_.size() == 0) {
162 factories_->GetTaskRunner()->PostTask(
164 base::Bind(&RTCVideoDecoder::CreateSHM,
165 weak_factory_.GetWeakPtr(),
167 kSharedMemorySegmentBytes));
169 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
172 int32_t RTCVideoDecoder::Decode(
173 const webrtc::EncodedImage& inputImage,
175 const webrtc::RTPFragmentationHeader* /*fragmentation*/,
176 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
177 int64_t /*renderTimeMs*/) {
178 DVLOG(3) << "Decode";
180 base::AutoLock auto_lock(lock_);
182 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
183 LOG(ERROR) << "The decoder has not initialized.";
184 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
187 if (state_ == DECODE_ERROR) {
188 LOG(ERROR) << "Decoding error occurred.";
189 return WEBRTC_VIDEO_CODEC_ERROR;
192 if (missingFrames || !inputImage._completeFrame) {
193 DLOG(ERROR) << "Missing or incomplete frames.";
194 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
195 // Return an error to request a key frame.
196 return WEBRTC_VIDEO_CODEC_ERROR;
199 // Most platforms' VDA implementations support mid-stream resolution change
200 // internally. Platforms whose VDAs fail to support mid-stream resolution
201 // change gracefully need to have their clients cover for them, and we do that
204 const bool kVDACanHandleMidstreamResize = false;
206 const bool kVDACanHandleMidstreamResize = true;
209 bool need_to_reset_for_midstream_resize = false;
210 if (inputImage._frameType == webrtc::kKeyFrame) {
211 DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
212 << inputImage._encodedHeight;
213 gfx::Size prev_frame_size = frame_size_;
214 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
215 if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
216 prev_frame_size != frame_size_) {
217 need_to_reset_for_midstream_resize = true;
219 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
220 reset_bitstream_buffer_id_)) {
221 // TODO(wuchengli): VDA should handle it. Remove this when
222 // http://crosbug.com/p/21913 is fixed.
223 DVLOG(1) << "The first frame should be a key frame. Drop this.";
224 return WEBRTC_VIDEO_CODEC_ERROR;
227 // Create buffer metadata.
228 BufferData buffer_data(next_bitstream_buffer_id_,
229 inputImage._timeStamp,
231 frame_size_.height(),
233 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
234 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
236 // If a shared memory segment is available, there are no pending buffers, and
237 // this isn't a mid-stream resolution change, then send the buffer for decode
238 // immediately. Otherwise, save the buffer in the queue for later decode.
239 scoped_ptr<SHMBuffer> shm_buffer;
240 if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
241 shm_buffer = GetSHM_Locked(inputImage._length);
243 if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
244 return WEBRTC_VIDEO_CODEC_ERROR;
245 if (need_to_reset_for_midstream_resize) {
246 base::AutoUnlock auto_unlock(lock_);
249 return WEBRTC_VIDEO_CODEC_OK;
252 SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
253 factories_->GetTaskRunner()->PostTask(
255 base::Bind(&RTCVideoDecoder::RequestBufferDecode,
256 weak_factory_.GetWeakPtr()));
257 return WEBRTC_VIDEO_CODEC_OK;
260 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
261 webrtc::DecodedImageCallback* callback) {
262 DVLOG(2) << "RegisterDecodeCompleteCallback";
263 base::AutoLock auto_lock(lock_);
264 decode_complete_callback_ = callback;
265 return WEBRTC_VIDEO_CODEC_OK;
268 int32_t RTCVideoDecoder::Release() {
269 DVLOG(2) << "Release";
270 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
275 int32_t RTCVideoDecoder::Reset() {
277 base::AutoLock auto_lock(lock_);
278 if (state_ == UNINITIALIZED) {
279 LOG(ERROR) << "Decoder not initialized.";
280 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
282 if (next_bitstream_buffer_id_ != 0)
283 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
285 reset_bitstream_buffer_id_ = ID_LAST;
286 // If VDA is already resetting, no need to request the reset again.
287 if (state_ != RESETTING) {
289 factories_->GetTaskRunner()->PostTask(
291 base::Bind(&RTCVideoDecoder::ResetInternal,
292 weak_factory_.GetWeakPtr()));
294 return WEBRTC_VIDEO_CODEC_OK;
297 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
298 const gfx::Size& size,
299 uint32 texture_target) {
300 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
301 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
306 std::vector<uint32> texture_ids;
307 std::vector<gpu::Mailbox> texture_mailboxes;
308 decoder_texture_target_ = texture_target;
309 if (!factories_->CreateTextures(count,
313 decoder_texture_target_)) {
314 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
317 DCHECK_EQ(count, texture_ids.size());
318 DCHECK_EQ(count, texture_mailboxes.size());
320 std::vector<media::PictureBuffer> picture_buffers;
321 for (size_t i = 0; i < texture_ids.size(); ++i) {
322 picture_buffers.push_back(media::PictureBuffer(
323 next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
324 bool inserted = assigned_picture_buffers_.insert(std::make_pair(
325 picture_buffers.back().id(), picture_buffers.back())).second;
328 vda_->AssignPictureBuffers(picture_buffers);
331 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
332 DVLOG(3) << "DismissPictureBuffer. id=" << id;
333 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
335 std::map<int32, media::PictureBuffer>::iterator it =
336 assigned_picture_buffers_.find(id);
337 if (it == assigned_picture_buffers_.end()) {
338 NOTREACHED() << "Missing picture buffer: " << id;
342 media::PictureBuffer buffer_to_dismiss = it->second;
343 assigned_picture_buffers_.erase(it);
345 if (!picture_buffers_at_display_.count(id)) {
346 // We can delete the texture immediately as it's not being displayed.
347 factories_->DeleteTexture(buffer_to_dismiss.texture_id());
350 // Not destroying a texture in display in |picture_buffers_at_display_|.
351 // Postpone deletion until after it's returned to us.
354 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
355 DVLOG(3) << "PictureReady";
356 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
358 std::map<int32, media::PictureBuffer>::iterator it =
359 assigned_picture_buffers_.find(picture.picture_buffer_id());
360 if (it == assigned_picture_buffers_.end()) {
361 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
362 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
365 const media::PictureBuffer& pb = it->second;
367 // Create a media::VideoFrame.
368 uint32_t timestamp = 0, width = 0, height = 0;
371 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size);
372 scoped_refptr<media::VideoFrame> frame =
373 CreateVideoFrame(picture, pb, timestamp, width, height, size);
375 picture_buffers_at_display_.insert(std::make_pair(
376 picture.picture_buffer_id(),
377 pb.texture_id())).second;
380 // Create a WebRTC video frame.
381 webrtc::RefCountImpl<NativeHandleImpl>* handle =
382 new webrtc::RefCountImpl<NativeHandleImpl>(frame);
383 webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0);
385 // Invoke decode callback. WebRTC expects no callback after Reset or Release.
387 base::AutoLock auto_lock(lock_);
388 DCHECK(decode_complete_callback_ != NULL);
389 if (IsBufferAfterReset(picture.bitstream_buffer_id(),
390 reset_bitstream_buffer_id_)) {
391 decode_complete_callback_->Decoded(decoded_image);
396 static void ReadPixelsSyncInner(
397 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
399 const gfx::Rect& visible_rect,
400 const SkBitmap& pixels,
401 base::WaitableEvent* event) {
402 factories->ReadPixels(texture_id, visible_rect, pixels);
406 static void ReadPixelsSync(
407 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
409 const gfx::Rect& visible_rect,
410 const SkBitmap& pixels) {
411 base::WaitableEvent event(true, false);
412 if (!factories->GetTaskRunner()->PostTask(FROM_HERE,
413 base::Bind(&ReadPixelsSyncInner,
423 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
424 const media::Picture& picture,
425 const media::PictureBuffer& pb,
430 gfx::Rect visible_rect(width, height);
431 DCHECK(decoder_texture_target_);
432 // Convert timestamp from 90KHz to ms.
433 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
434 base::checked_cast<uint64_t>(timestamp) * 1000 / 90);
435 return media::VideoFrame::WrapNativeTexture(
436 make_scoped_ptr(new gpu::MailboxHolder(
437 pb.texture_mailbox(), decoder_texture_target_, 0)),
438 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReleaseMailbox,
439 weak_factory_.GetWeakPtr(),
441 picture.picture_buffer_id(),
447 base::Bind(&ReadPixelsSync, factories_, pb.texture_id(), visible_rect));
450 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
451 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
452 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
454 std::map<int32, SHMBuffer*>::iterator it =
455 bitstream_buffers_in_decoder_.find(id);
456 if (it == bitstream_buffers_in_decoder_.end()) {
457 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
458 NOTREACHED() << "Missing bitstream buffer: " << id;
463 base::AutoLock auto_lock(lock_);
464 PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
466 bitstream_buffers_in_decoder_.erase(it);
468 RequestBufferDecode();
471 void RTCVideoDecoder::NotifyFlushDone() {
472 DVLOG(3) << "NotifyFlushDone";
473 NOTREACHED() << "Unexpected flush done notification.";
476 void RTCVideoDecoder::NotifyResetDone() {
477 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
478 DVLOG(3) << "NotifyResetDone";
483 input_buffer_data_.clear();
485 base::AutoLock auto_lock(lock_);
486 state_ = INITIALIZED;
488 // Send the pending buffers for decoding.
489 RequestBufferDecode();
492 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
493 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
497 LOG(ERROR) << "VDA Error:" << error;
498 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
500 media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
503 base::AutoLock auto_lock(lock_);
504 state_ = DECODE_ERROR;
507 void RTCVideoDecoder::RequestBufferDecode() {
508 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
512 MovePendingBuffersToDecodeBuffers();
514 while (CanMoreDecodeWorkBeDone()) {
515 // Get a buffer and data from the queue.
516 SHMBuffer* shm_buffer = NULL;
517 BufferData buffer_data;
519 base::AutoLock auto_lock(lock_);
520 // Do not request decode if VDA is resetting.
521 if (decode_buffers_.size() == 0 || state_ == RESETTING)
523 shm_buffer = decode_buffers_.front().first;
524 buffer_data = decode_buffers_.front().second;
525 decode_buffers_.pop_front();
526 // Drop the buffers before Reset or Release is called.
527 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
528 reset_bitstream_buffer_id_)) {
529 PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
534 // Create a BitstreamBuffer and send to VDA to decode.
535 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
536 shm_buffer->shm->handle(),
538 bool inserted = bitstream_buffers_in_decoder_
539 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
541 RecordBufferData(buffer_data);
542 vda_->Decode(bitstream_buffer);
546 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
547 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
550 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
551 if (id_reset == ID_INVALID)
553 int32 diff = id_buffer - id_reset;
556 return diff < ID_HALF;
559 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
560 if (id_reset == ID_INVALID)
561 return id_buffer == 0;
562 return id_buffer == ((id_reset + 1) & ID_LAST);
565 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
566 const webrtc::EncodedImage& input_image,
567 scoped_ptr<SHMBuffer> shm_buffer,
568 const BufferData& buffer_data) {
569 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
570 std::pair<SHMBuffer*, BufferData> buffer_pair =
571 std::make_pair(shm_buffer.release(), buffer_data);
573 // Store the buffer and the metadata to the queue.
574 decode_buffers_.push_back(buffer_pair);
577 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
578 const webrtc::EncodedImage& input_image,
579 const BufferData& buffer_data) {
580 DVLOG(2) << "SaveToPendingBuffers_Locked"
581 << ". pending_buffers size=" << pending_buffers_.size()
582 << ". decode_buffers_ size=" << decode_buffers_.size()
583 << ". available_shm size=" << available_shm_segments_.size();
584 // Queued too many buffers. Something goes wrong.
585 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
586 LOG(WARNING) << "Too many pending buffers!";
590 // Clone the input image and save it to the queue.
591 uint8_t* buffer = new uint8_t[input_image._length];
592 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
593 // interface to take a non-const ptr to the frame and add a method to the
594 // frame that will swap buffers with another.
595 memcpy(buffer, input_image._buffer, input_image._length);
596 webrtc::EncodedImage encoded_image(
597 buffer, input_image._length, input_image._length);
598 std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
599 std::make_pair(encoded_image, buffer_data);
601 pending_buffers_.push_back(buffer_pair);
605 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
606 base::AutoLock auto_lock(lock_);
607 while (pending_buffers_.size() > 0) {
608 // Get a pending buffer from the queue.
609 const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
610 const BufferData& buffer_data = pending_buffers_.front().second;
612 // Drop the frame if it comes before Reset or Release.
613 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
614 reset_bitstream_buffer_id_)) {
615 delete[] input_image._buffer;
616 pending_buffers_.pop_front();
619 // Get shared memory and save it to decode buffers.
620 scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
623 SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
624 delete[] input_image._buffer;
625 pending_buffers_.pop_front();
629 void RTCVideoDecoder::ResetInternal() {
630 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
631 DVLOG(2) << "ResetInternal";
637 void RTCVideoDecoder::ReleaseMailbox(
638 base::WeakPtr<RTCVideoDecoder> decoder,
639 const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
640 int64 picture_buffer_id,
642 const std::vector<uint32>& release_sync_points) {
643 DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
645 for (size_t i = 0; i < release_sync_points.size(); i++)
646 factories->WaitSyncPoint(release_sync_points[i]);
649 decoder->ReusePictureBuffer(picture_buffer_id);
652 // It's the last chance to delete the texture after display,
653 // because RTCVideoDecoder was destructed.
654 factories->DeleteTexture(texture_id);
657 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) {
658 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
659 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
661 DCHECK(!picture_buffers_at_display_.empty());
662 PictureBufferTextureMap::iterator display_iterator =
663 picture_buffers_at_display_.find(picture_buffer_id);
664 uint32 texture_id = display_iterator->second;
665 DCHECK(display_iterator != picture_buffers_at_display_.end());
666 picture_buffers_at_display_.erase(display_iterator);
668 if (!assigned_picture_buffers_.count(picture_buffer_id)) {
669 // This picture was dismissed while in display, so we postponed deletion.
670 factories_->DeleteTexture(texture_id);
674 // DestroyVDA() might already have been called.
676 vda_->ReusePictureBuffer(picture_buffer_id);
679 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile,
680 base::WaitableEvent* waiter) {
681 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
682 vda_ = factories_->CreateVideoDecodeAccelerator();
683 if (vda_ && !vda_->Initialize(profile, this))
684 vda_.release()->Destroy();
688 void RTCVideoDecoder::DestroyTextures() {
689 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
691 // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
692 // their textures may still be in use by the user of this RTCVideoDecoder.
693 for (PictureBufferTextureMap::iterator it =
694 picture_buffers_at_display_.begin();
695 it != picture_buffers_at_display_.end();
697 assigned_picture_buffers_.erase(it->first);
700 for (std::map<int32, media::PictureBuffer>::iterator it =
701 assigned_picture_buffers_.begin();
702 it != assigned_picture_buffers_.end();
704 factories_->DeleteTexture(it->second.texture_id());
706 assigned_picture_buffers_.clear();
709 void RTCVideoDecoder::DestroyVDA() {
710 DVLOG(2) << "DestroyVDA";
711 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
713 vda_.release()->Destroy();
715 base::AutoLock auto_lock(lock_);
716 state_ = UNINITIALIZED;
719 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
721 // Reuse a SHM if possible.
722 SHMBuffer* ret = NULL;
723 if (!available_shm_segments_.empty() &&
724 available_shm_segments_.back()->size >= min_size) {
725 ret = available_shm_segments_.back();
726 available_shm_segments_.pop_back();
728 // Post to vda thread to create shared memory if SHM cannot be reused or the
729 // queue is almost empty.
730 if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
731 (ret == NULL || available_shm_segments_.size() <= 1)) {
732 factories_->GetTaskRunner()->PostTask(
734 base::Bind(&RTCVideoDecoder::CreateSHM,
735 weak_factory_.GetWeakPtr(),
739 return scoped_ptr<SHMBuffer>(ret);
742 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
743 available_shm_segments_.push_back(shm_buffer.release());
746 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
747 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
748 DVLOG(2) << "CreateSHM. size=" << min_size;
749 int number_to_allocate;
751 base::AutoLock auto_lock(lock_);
753 std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
755 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
756 for (int i = 0; i < number_to_allocate; i++) {
757 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
759 base::AutoLock auto_lock(lock_);
762 scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
765 // Kick off the decoding.
766 RequestBufferDecode();
769 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
770 input_buffer_data_.push_front(buffer_data);
771 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
772 // that's too small for some pathological B-frame test videos. The cost of
773 // using too-high a value is low (192 bits per extra slot).
774 static const size_t kMaxInputBufferDataSize = 128;
775 // Pop from the back of the list, because that's the oldest and least likely
776 // to be useful in the future data.
777 if (input_buffer_data_.size() > kMaxInputBufferDataSize)
778 input_buffer_data_.pop_back();
781 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
786 for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
787 it != input_buffer_data_.end();
789 if (it->bitstream_buffer_id != bitstream_buffer_id)
791 *timestamp = it->timestamp;
793 *height = it->height;
796 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
799 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
800 // Logging boolean is enough to know if HW decoding has been used. Also,
801 // InitDecode is less likely to return an error so enum is not used here.
802 bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
803 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
807 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
809 DCHECK(factories_->GetTaskRunner()->BelongsToCurrentThread());
812 } // namespace content