#include "media/base/limits.h"
#include "media/video/picture.h"
#include "ui/gl/android/scoped_java_surface.h"
+#include "ui/gl/android/surface_texture.h"
#include "ui/gl/gl_bindings.h"
namespace content {
// Helper macros for dealing with failure. If |result| evaluates false, emit
// |log| to ERROR, register |error| with the decoder, and return.
-#define RETURN_ON_FAILURE(result, log, error) \
- do { \
- if (!(result)) { \
- DLOG(ERROR) << log; \
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \
- &AndroidVideoDecodeAccelerator::NotifyError, \
- base::AsWeakPtr(this), error)); \
- state_ = ERROR; \
- return; \
- } \
+#define RETURN_ON_FAILURE(result, log, error) \
+ do { \
+ if (!(result)) { \
+ DLOG(ERROR) << log; \
+ base::MessageLoop::current()->PostTask( \
+ FROM_HERE, \
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
+ weak_this_factory_.GetWeakPtr(), \
+ error)); \
+ state_ = ERROR; \
+ return; \
+ } \
} while (0)
// TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
// NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
enum { kMaxBitstreamsNotifiedInAdvance = 32 };
-// static
+// Because MediaCodec is thread-hostile (must be poked on a single thread) and
+// has no callback mechanism (b/11990118), we must drive it by polling for
+// complete frames (and available input buffers, when the codec is fully
+// saturated). This function defines the polling delay. The value used is an
+// arbitrary choice that trades off CPU utilization (spinning) against latency.
+// Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
static inline const base::TimeDelta DecodePollDelay() {
+ // An alternative to this polling scheme could be to dedicate a new thread
+ // (instead of using the ChildThread) to run the MediaCodec, and make that
+ // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
+ // believes the codec should complete "soon" (e.g. waiting for an input
+ // buffer, or waiting for a picture when it knows enough complete input
+ // pictures have been fed to saturate any internal buffering). This is
+ // speculative and it's unclear that this would be a win (nor that there's a
+ // reasonably device-agnostic way to fill in the "believes" above).
return base::TimeDelta::FromMilliseconds(10);
}
}
AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
- media::VideoDecodeAccelerator::Client* client,
const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
const base::Callback<bool(void)>& make_context_current)
- : client_(client),
+ : client_(NULL),
make_context_current_(make_context_current),
codec_(media::kCodecH264),
state_(NO_ERROR),
surface_texture_id_(0),
picturebuffers_requested_(false),
- gl_decoder_(decoder) {
-}
+ gl_decoder_(decoder),
+ weak_this_factory_(this) {}
AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
DCHECK(thread_checker_.CalledOnValidThread());
}
-bool AndroidVideoDecodeAccelerator::Initialize(
- media::VideoCodecProfile profile) {
+bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
+ Client* client) {
DCHECK(!media_codec_);
DCHECK(thread_checker_.CalledOnValidThread());
- if (!media::MediaCodecBridge::IsAvailable())
- return false;
+ client_ = client;
- if (profile == media::VP8PROFILE_MAIN) {
+ if (profile == media::VP8PROFILE_ANY) {
codec_ = media::kCodecVP8;
} else {
// TODO(dwkang): enable H264 once b/8125974 is fixed.
}
// Only consider using MediaCodec if it's likely backed by hardware.
- if (media::VideoCodecBridge::IsKnownUnaccelerated(codec_))
+ if (media::VideoCodecBridge::IsKnownUnaccelerated(
+ codec_, media::MEDIA_CODEC_DECODER)) {
return false;
+ }
if (!make_context_current_.Run()) {
LOG(ERROR) << "Failed to make this decoder's GL context current.";
gl_decoder_->RestoreTextureUnitBindings(0);
gl_decoder_->RestoreActiveTexture();
- surface_texture_ = new gfx::SurfaceTexture(surface_texture_id_);
+ surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_);
if (!ConfigureMediaCodec()) {
LOG(ERROR) << "Failed to create MediaCodec instance.";
return false;
}
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyInitializeDone,
- base::AsWeakPtr(this)));
return true;
}
void AndroidVideoDecodeAccelerator::DoIOTask() {
+ DCHECK(thread_checker_.CalledOnValidThread());
if (state_ == ERROR) {
return;
}
}
void AndroidVideoDecodeAccelerator::QueueInput() {
+ DCHECK(thread_checker_.CalledOnValidThread());
if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
return;
if (pending_bitstream_buffers_.empty())
base::Time queued_time = pending_bitstream_buffers_.front().second;
UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
base::Time::Now() - queued_time);
- media::BitstreamBuffer& bitstream_buffer =
+ media::BitstreamBuffer bitstream_buffer =
pending_bitstream_buffers_.front().first;
+ pending_bitstream_buffers_.pop();
if (bitstream_buffer.id() == -1) {
media_codec_->QueueEOS(input_buf_index);
- pending_bitstream_buffers_.pop();
return;
}
RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
"Failed to QueueInputBuffer: " << status,
PLATFORM_FAILURE);
- pending_bitstream_buffers_.pop();
// We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
// will be returned from the bitstream buffer. However, MediaCodec API is
// keep getting more bitstreams from the client, and throttle them by using
// |bitstreams_notified_in_advance_|.
// TODO(dwkang): check if there is a way to remove this workaround.
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
- base::AsWeakPtr(this), bitstream_buffer.id()));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
+ weak_this_factory_.GetWeakPtr(),
+ bitstream_buffer.id()));
bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
}
void AndroidVideoDecodeAccelerator::DequeueOutput() {
+ DCHECK(thread_checker_.CalledOnValidThread());
if (picturebuffers_requested_ && output_picture_buffers_.empty())
return;
size_t size = 0;
media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
- NoWaitTimeOut(), &buf_index, &offset, &size, ×tamp, &eos);
+ NoWaitTimeOut(), &buf_index, &offset, &size, ×tamp, &eos, NULL);
switch (status) {
case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
case media::MEDIA_CODEC_ERROR:
if (!picturebuffers_requested_) {
picturebuffers_requested_ = true;
size_ = gfx::Size(width, height);
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::RequestPictureBuffers,
- base::AsWeakPtr(this)));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
+ weak_this_factory_.GetWeakPtr()));
} else {
// Dynamic resolution change support is not specified by the Android
// platform at and before JB-MR1, so it's not possible to smoothly
}
} while (buf_index < 0);
+ // This ignores the emitted ByteBuffer and instead relies on rendering to the
+ // codec's SurfaceTexture and then copying from that texture to the client's
+ // PictureBuffer's texture. This means that each picture's data is written
+ // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
+ // to the client's texture. It would be nicer to either:
+ // 1) Render directly to the client's texture from MediaCodec (one write); or
+ // 2) Upload the ByteBuffer to the client's texture (two writes).
+ // Unfortunately neither is possible:
+ // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
+ // written to can't change during the codec's lifetime. b/11990461
+ // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
+ // opaque/non-standard format. It's not possible to negotiate the decoder
+ // to emit a specific colorspace, even using HW CSC. b/10706245
+ // So, we live with these two extra copies per picture :(
media_codec_->ReleaseOutputBuffer(buf_index, true);
if (eos) {
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyFlushDone,
- base::AsWeakPtr(this)));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
+ weak_this_factory_.GetWeakPtr()));
} else {
int64 bitstream_buffer_id = timestamp.InMicroseconds();
SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
// attached.
// 2. SurfaceTexture requires us to apply a transform matrix when we show
// the texture.
- copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES,
- GL_TEXTURE_2D, surface_texture_id_,
- picture_buffer_texture_id, 0, size_.width(),
- size_.height(), false, false, false);
-
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyPictureReady,
- base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id)));
+ // TODO(hkuang): get the StreamTexture transform matrix in GPU process
+ // instead of using default matrix crbug.com/226218.
+ const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, 1.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f};
+ copier_->DoCopyTextureWithTransform(gl_decoder_.get(),
+ GL_TEXTURE_EXTERNAL_OES,
+ surface_texture_id_,
+ picture_buffer_texture_id,
+ 0,
+ size_.width(),
+ size_.height(),
+ false,
+ false,
+ false,
+ default_matrix);
+
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &AndroidVideoDecodeAccelerator::NotifyPictureReady,
+ weak_this_factory_.GetWeakPtr(),
+ media::Picture(picture_buffer_id, bitstream_id, gfx::Rect(size_))));
}
void AndroidVideoDecodeAccelerator::Decode(
const media::BitstreamBuffer& bitstream_buffer) {
DCHECK(thread_checker_.CalledOnValidThread());
if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
- base::AsWeakPtr(this), bitstream_buffer.id()));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
+ weak_this_factory_.GetWeakPtr(),
+ bitstream_buffer.id()));
return;
}
}
bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
+ DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(surface_texture_.get());
- media_codec_.reset(media::VideoCodecBridge::Create(codec_, false));
-
- if (!media_codec_)
- return false;
gfx::ScopedJavaSurface surface(surface_texture_.get());
+
// Pass a dummy 320x240 canvas size and let the codec signal the real size
// when it's known from the bitstream.
- if (!media_codec_->Start(
- codec_, gfx::Size(320, 240), surface.j_surface().obj(), NULL)) {
+ media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
+ codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
+ if (!media_codec_)
return false;
- }
+
io_timer_.Start(FROM_HERE,
DecodePollDelay(),
this,
&AndroidVideoDecodeAccelerator::DoIOTask);
- return media_codec_->GetOutputBuffers();
+ return true;
}
void AndroidVideoDecodeAccelerator::Reset() {
pending_bitstream_buffers_.pop();
if (bitstream_buffer_id != -1) {
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
- base::AsWeakPtr(this), bitstream_buffer_id));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
+ weak_this_factory_.GetWeakPtr(),
+ bitstream_buffer_id));
}
}
bitstreams_notified_in_advance_.clear();
ConfigureMediaCodec();
state_ = NO_ERROR;
- base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
- &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
+ weak_this_factory_.GetWeakPtr()));
}
void AndroidVideoDecodeAccelerator::Destroy() {
DCHECK(thread_checker_.CalledOnValidThread());
+ weak_this_factory_.InvalidateWeakPtrs();
if (media_codec_) {
io_timer_.Stop();
media_codec_->Stop();
delete this;
}
-void AndroidVideoDecodeAccelerator::NotifyInitializeDone() {
- client_->NotifyInitializeDone();
+bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
+ return false;
}
void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {