X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=src%2Fmedia%2Ffilters%2Fffmpeg_video_decoder.cc;h=bc2346ddf8843526fde8ae980e847488e7ba0b13;hb=004985e17e624662a4c85c76a7654039dc83f028;hp=d52bc71690232564c8c384fa7a79e1d098aab847;hpb=2f108dbacb161091e42a3479f4e171339b7e7623;p=platform%2Fframework%2Fweb%2Fcrosswalk.git diff --git a/src/media/filters/ffmpeg_video_decoder.cc b/src/media/filters/ffmpeg_video_decoder.cc index d52bc71..bc2346d 100644 --- a/src/media/filters/ffmpeg_video_decoder.cc +++ b/src/media/filters/ffmpeg_video_decoder.cc @@ -72,8 +72,8 @@ int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, format == VideoFrame::YV12J); gfx::Size size(codec_context->width, codec_context->height); - int ret; - if ((ret = av_image_check_size(size.width(), size.height(), 0, NULL)) < 0) + const int ret = av_image_check_size(size.width(), size.height(), 0, NULL); + if (ret < 0) return ret; gfx::Size natural_size; @@ -85,12 +85,22 @@ int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, natural_size = config_.natural_size(); } - if (!VideoFrame::IsValidConfig(format, size, gfx::Rect(size), natural_size)) + // FFmpeg has specific requirements on the allocation size of the frame. The + // following logic replicates FFmpeg's allocation strategy to ensure buffers + // are not overread / overwritten. See ff_init_buffer_info() for details. + // + // When lowres is non-zero, dimensions should be divided by 2^(lowres), but + // since we don't use this, just DCHECK that it's zero. + DCHECK_EQ(codec_context->lowres, 0); + gfx::Size coded_size(std::max(size.width(), codec_context->coded_width), + std::max(size.height(), codec_context->coded_height)); + + if (!VideoFrame::IsValidConfig( + format, coded_size, gfx::Rect(size), natural_size)) return AVERROR(EINVAL); - scoped_refptr video_frame = - frame_pool_.CreateFrame(format, size, gfx::Rect(size), - natural_size, kNoTimestamp()); + scoped_refptr video_frame = frame_pool_.CreateFrame( + format, coded_size, gfx::Rect(size), natural_size, kNoTimestamp()); for (int i = 0; i < 3; i++) { frame->base[i] = video_frame->data(i); @@ -101,8 +111,8 @@ int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, frame->opaque = NULL; video_frame.swap(reinterpret_cast(&frame->opaque)); frame->type = FF_BUFFER_TYPE_USER; - frame->width = codec_context->width; - frame->height = codec_context->height; + frame->width = coded_size.width(); + frame->height = coded_size.height(); frame->format = codec_context->pix_fmt; return 0; @@ -124,10 +134,10 @@ static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { } void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, + bool low_delay, const PipelineStatusCB& status_cb) { DCHECK(task_runner_->BelongsToCurrentThread()); DCHECK(decode_cb_.is_null()); - DCHECK(reset_cb_.is_null()); DCHECK(!config.is_encrypted()); FFmpegGlue::InitializeFFmpeg(); @@ -135,7 +145,7 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, config_ = config; PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb); - if (!config.IsValidConfig() || !ConfigureDecoder()) { + if (!config.IsValidConfig() || !ConfigureDecoder(low_delay)) { initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED); return; } @@ -169,38 +179,19 @@ void FFmpegVideoDecoder::Decode(const scoped_refptr& buffer, void FFmpegVideoDecoder::Reset(const base::Closure& closure) { DCHECK(task_runner_->BelongsToCurrentThread()); - DCHECK(reset_cb_.is_null()); - reset_cb_ = BindToCurrentLoop(closure); - - // Defer the reset if a decode is pending. - if (!decode_cb_.is_null()) - return; - - DoReset(); -} - -void FFmpegVideoDecoder::DoReset() { DCHECK(decode_cb_.is_null()); avcodec_flush_buffers(codec_context_.get()); state_ = kNormal; - base::ResetAndReturn(&reset_cb_).Run(); + task_runner_->PostTask(FROM_HERE, closure); } -void FFmpegVideoDecoder::Stop(const base::Closure& closure) { +void FFmpegVideoDecoder::Stop() { DCHECK(task_runner_->BelongsToCurrentThread()); - base::ScopedClosureRunner runner(BindToCurrentLoop(closure)); if (state_ == kUninitialized) return; - if (!decode_cb_.is_null()) { - base::ResetAndReturn(&decode_cb_).Run(kAborted, NULL); - // Reset is pending only when decode is pending. - if (!reset_cb_.is_null()) - base::ResetAndReturn(&reset_cb_).Run(); - } - ReleaseFFmpegResources(); state_ = kUninitialized; } @@ -217,7 +208,6 @@ void FFmpegVideoDecoder::DecodeBuffer( DCHECK_NE(state_, kUninitialized); DCHECK_NE(state_, kDecodeFinished); DCHECK_NE(state_, kError); - DCHECK(reset_cb_.is_null()); DCHECK(!decode_cb_.is_null()); DCHECK(buffer); @@ -342,7 +332,7 @@ bool FFmpegVideoDecoder::FFmpegDecode( } *video_frame = static_cast(av_frame_->opaque); - (*video_frame)->SetTimestamp( + (*video_frame)->set_timestamp( base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); return true; @@ -353,7 +343,7 @@ void FFmpegVideoDecoder::ReleaseFFmpegResources() { av_frame_.reset(); } -bool FFmpegVideoDecoder::ConfigureDecoder() { +bool FFmpegVideoDecoder::ConfigureDecoder(bool low_delay) { // Release existing decoder resources if necessary. ReleaseFFmpegResources(); @@ -365,6 +355,7 @@ bool FFmpegVideoDecoder::ConfigureDecoder() { // for damaged macroblocks, and set our error detection sensitivity. codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); + codec_context_->thread_type = low_delay ? FF_THREAD_SLICE : FF_THREAD_FRAME; codec_context_->opaque = this; codec_context_->flags |= CODEC_FLAG_EMU_EDGE; codec_context_->get_buffer = GetVideoBufferImpl;