private static final int MEDIA_CODEC_DECODER = 0;
private static final int MEDIA_CODEC_ENCODER = 1;
+ // Max adaptive playback size to be supplied to the decoder.
+ private static final int MAX_ADAPTIVE_PLAYBACK_WIDTH = 1920;
+ private static final int MAX_ADAPTIVE_PLAYBACK_HEIGHT = 1080;
+
// After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps
// for several frames. As a result, the player may find that the time does not increase
// after decoding a frame. To detect this, we check whether the presentation timestamp from
// non-decreasing for the remaining frames.
private static final long MAX_PRESENTATION_TIMESTAMP_SHIFT_US = 100000;
+ // TODO(qinmin): Use MediaFormat constants when part of the public API.
+ private static final String KEY_CROP_LEFT = "crop-left";
+ private static final String KEY_CROP_RIGHT = "crop-right";
+ private static final String KEY_CROP_BOTTOM = "crop-bottom";
+ private static final String KEY_CROP_TOP = "crop-top";
+
private ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
private AudioTrack mAudioTrack;
private boolean mFlushed;
private long mLastPresentationTimeUs;
+ private String mMime;
+ private boolean mAdaptivePlaybackSupported;
private static class DequeueInputResult {
private final int mStatus;
}
@CalledByNative("DequeueInputResult")
- private int status() { return mStatus; }
+ private int status() {
+ return mStatus;
+ }
@CalledByNative("DequeueInputResult")
- private int index() { return mIndex; }
+ private int index() {
+ return mIndex;
+ }
}
/**
}
@CalledByNative("CodecInfo")
- private String codecType() { return mCodecType; }
+ private String codecType() {
+ return mCodecType;
+ }
@CalledByNative("CodecInfo")
- private String codecName() { return mCodecName; }
+ private String codecName() {
+ return mCodecName;
+ }
@CalledByNative("CodecInfo")
- private int direction() { return mDirection; }
+ private int direction() {
+ return mDirection;
+ }
}
private static class DequeueOutputResult {
}
@CalledByNative("DequeueOutputResult")
- private int status() { return mStatus; }
+ private int status() {
+ return mStatus;
+ }
@CalledByNative("DequeueOutputResult")
- private int index() { return mIndex; }
+ private int index() {
+ return mIndex;
+ }
@CalledByNative("DequeueOutputResult")
- private int flags() { return mFlags; }
+ private int flags() {
+ return mFlags;
+ }
@CalledByNative("DequeueOutputResult")
- private int offset() { return mOffset; }
+ private int offset() {
+ return mOffset;
+ }
@CalledByNative("DequeueOutputResult")
- private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; }
+ private long presentationTimeMicroseconds() {
+ return mPresentationTimeMicroseconds;
+ }
@CalledByNative("DequeueOutputResult")
- private int numBytes() { return mNumBytes; }
+ private int numBytes() {
+ return mNumBytes;
+ }
}
/**
* Get a list of supported android codec mimes.
*/
+ @SuppressWarnings("deprecation")
@CalledByNative
private static CodecInfo[] getCodecsInfo() {
// Return the first (highest-priority) codec for each MIME type.
for (int i = 0; i < count; ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
int direction =
- info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER;
+ info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER;
String codecString = info.getName();
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; ++j) {
Map<String, CodecInfo> map = info.isEncoder() ? encoderInfoMap : decoderInfoMap;
if (!map.containsKey(supportedTypes[j])) {
map.put(supportedTypes[j], new CodecInfo(
- supportedTypes[j], codecString, direction));
+ supportedTypes[j], codecString, direction));
}
}
}
ArrayList<CodecInfo> codecInfos = new ArrayList<CodecInfo>(
- decoderInfoMap.size() + encoderInfoMap.size());
+ decoderInfoMap.size() + encoderInfoMap.size());
codecInfos.addAll(encoderInfoMap.values());
codecInfos.addAll(decoderInfoMap.values());
return codecInfos.toArray(new CodecInfo[codecInfos.size()]);
}
- private static String getSecureDecoderNameForMime(String mime) {
+ /**
+ * Get a name of default android codec.
+ */
+ @SuppressWarnings("deprecation")
+ @CalledByNative
+ private static String getDefaultCodecName(String mime, int direction) {
+ String codecName = "";
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
+ try {
+ MediaCodec mediaCodec = null;
+ if (direction == MEDIA_CODEC_ENCODER) {
+ mediaCodec = MediaCodec.createEncoderByType(mime);
+ } else {
+ mediaCodec = MediaCodec.createDecoderByType(mime);
+ }
+ codecName = mediaCodec.getName();
+ mediaCodec.release();
+ } catch (Exception e) {
+ Log.w(TAG, "getDefaultCodecName: Failed to create MediaCodec: " +
+ mime + ", direction: " + direction, e);
+ }
+ }
+ return codecName;
+ }
+
+ /**
+ * Get a list of encoder supported color formats for specified mime type.
+ */
+ @CalledByNative
+ private static int[] getEncoderColorFormatsForMime(String mime) {
+ int count = MediaCodecList.getCodecCount();
+ for (int i = 0; i < count; ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (!info.isEncoder())
+ continue;
+
+ String[] supportedTypes = info.getSupportedTypes();
+ for (int j = 0; j < supportedTypes.length; ++j) {
+ if (!supportedTypes[j].equalsIgnoreCase(mime))
+ continue;
+
+ MediaCodecInfo.CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+ return capabilities.colorFormats;
+ }
+ }
+ return null;
+ }
+
+ @SuppressWarnings("deprecation")
+ private static String getDecoderNameForMime(String mime) {
int count = MediaCodecList.getCodecCount();
for (int i = 0; i < count; ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; ++j) {
if (supportedTypes[j].equalsIgnoreCase(mime)) {
- return info.getName() + ".secure";
+ return info.getName();
}
}
}
return null;
}
- private MediaCodecBridge(MediaCodec mediaCodec) {
+ private MediaCodecBridge(
+ MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupported) {
assert mediaCodec != null;
mMediaCodec = mediaCodec;
+ mMime = mime;
mLastPresentationTimeUs = 0;
mFlushed = true;
+ mAdaptivePlaybackSupported = adaptivePlaybackSupported;
}
@CalledByNative
return null;
}
MediaCodec mediaCodec = null;
+ boolean adaptivePlaybackSupported = false;
try {
// |isSecure| only applies to video decoders.
if (mime.startsWith("video") && isSecure && direction == MEDIA_CODEC_DECODER) {
- mediaCodec = MediaCodec.createByCodecName(getSecureDecoderNameForMime(mime));
+ String decoderName = getDecoderNameForMime(mime);
+ if (decoderName == null) {
+ return null;
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+ // To work around an issue that we cannot get the codec info from the secure
+ // decoder, create an insecure decoder first so that we can query its codec
+ // info. http://b/15587335.
+ MediaCodec insecureCodec = MediaCodec.createByCodecName(decoderName);
+ adaptivePlaybackSupported = codecSupportsAdaptivePlayback(insecureCodec, mime);
+ insecureCodec.release();
+ }
+ mediaCodec = MediaCodec.createByCodecName(decoderName + ".secure");
} else {
if (direction == MEDIA_CODEC_ENCODER) {
mediaCodec = MediaCodec.createEncoderByType(mime);
} else {
mediaCodec = MediaCodec.createDecoderByType(mime);
+ adaptivePlaybackSupported = codecSupportsAdaptivePlayback(mediaCodec, mime);
}
}
} catch (Exception e) {
if (mediaCodec == null) {
return null;
}
-
- return new MediaCodecBridge(mediaCodec);
+ return new MediaCodecBridge(mediaCodec, mime, adaptivePlaybackSupported);
}
@CalledByNative
private void release() {
- mMediaCodec.stop();
- mMediaCodec.release();
+ try {
+ mMediaCodec.release();
+ } catch (IllegalStateException e) {
+ // The MediaCodec is stuck in a wrong state, possibly due to losing
+ // the surface.
+ Log.e(TAG, "Cannot release media codec", e);
+ }
mMediaCodec = null;
if (mAudioTrack != null) {
mAudioTrack.release();
}
}
+ @SuppressWarnings("deprecation")
@CalledByNative
private boolean start() {
try {
try {
mFlushed = true;
if (mAudioTrack != null) {
+ // Need to call pause() here, or otherwise flush() is a no-op.
+ mAudioTrack.pause();
mAudioTrack.flush();
}
mMediaCodec.flush();
}
}
+ private boolean outputFormatHasCropValues(MediaFormat format) {
+ return format.containsKey(KEY_CROP_RIGHT) && format.containsKey(KEY_CROP_LEFT)
+ && format.containsKey(KEY_CROP_BOTTOM) && format.containsKey(KEY_CROP_TOP);
+ }
+
@CalledByNative
private int getOutputHeight() {
- return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_HEIGHT);
+ MediaFormat format = mMediaCodec.getOutputFormat();
+ return outputFormatHasCropValues(format)
+ ? format.getInteger(KEY_CROP_BOTTOM) - format.getInteger(KEY_CROP_TOP) + 1
+ : format.getInteger(MediaFormat.KEY_HEIGHT);
}
@CalledByNative
private int getOutputWidth() {
- return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_WIDTH);
+ MediaFormat format = mMediaCodec.getOutputFormat();
+ return outputFormatHasCropValues(format)
+ ? format.getInteger(KEY_CROP_RIGHT) - format.getInteger(KEY_CROP_LEFT) + 1
+ : format.getInteger(MediaFormat.KEY_WIDTH);
}
@CalledByNative
return mOutputBuffers != null ? mOutputBuffers[0].capacity() : -1;
}
+ @SuppressWarnings("deprecation")
@CalledByNative
private boolean getOutputBuffers() {
try {
private void releaseOutputBuffer(int index, boolean render) {
try {
mMediaCodec.releaseOutputBuffer(index, render);
- } catch(IllegalStateException e) {
+ } catch (IllegalStateException e) {
// TODO(qinmin): May need to report the error to the caller. crbug.com/356498.
Log.e(TAG, "Failed to release output buffer", e);
}
}
+ @SuppressWarnings("deprecation")
@CalledByNative
private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
private boolean configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto,
int flags) {
try {
+ if (mAdaptivePlaybackSupported) {
+ format.setInteger(MediaFormat.KEY_MAX_WIDTH, MAX_ADAPTIVE_PLAYBACK_WIDTH);
+ format.setInteger(MediaFormat.KEY_MAX_HEIGHT, MAX_ADAPTIVE_PLAYBACK_HEIGHT);
+ }
mMediaCodec.configure(format, surface, crypto, flags);
return true;
} catch (IllegalStateException e) {
}
@CalledByNative
+ private boolean isAdaptivePlaybackSupported(int width, int height) {
+ if (!mAdaptivePlaybackSupported)
+ return false;
+ return width <= MAX_ADAPTIVE_PLAYBACK_WIDTH && height <= MAX_ADAPTIVE_PLAYBACK_HEIGHT;
+ }
+
+ private static boolean codecSupportsAdaptivePlayback(MediaCodec mediaCodec, String mime) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT || mediaCodec == null) {
+ return false;
+ }
+ try {
+ MediaCodecInfo info = mediaCodec.getCodecInfo();
+ if (info.isEncoder()) {
+ return false;
+ }
+ MediaCodecInfo.CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+ return (capabilities != null) && capabilities.isFeatureSupported(
+ MediaCodecInfo.CodecCapabilities.FEATURE_AdaptivePlayback);
+ } catch (IllegalArgumentException e) {
+ Log.e(TAG, "Cannot retrieve codec information", e);
+ }
+ return false;
+ }
+
+ @CalledByNative
private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) {
String name = null;
if (index == 0) {
return false;
}
+ /**
+ * Play the audio buffer that is passed in.
+ *
+ * @param buf Audio buffer to be rendered.
+ * @return The number of frames that have already been consumed by the
+ * hardware. This number resets to 0 after each flush call.
+ */
@CalledByNative
- private void playOutputBuffer(byte[] buf) {
- if (mAudioTrack != null) {
- if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
- mAudioTrack.play();
- }
- int size = mAudioTrack.write(buf, 0, buf.length);
- if (buf.length != size) {
- Log.i(TAG, "Failed to send all data to audio output, expected size: " +
- buf.length + ", actual size: " + size);
- }
+ private long playOutputBuffer(byte[] buf) {
+ if (mAudioTrack == null) {
+ return 0;
}
- }
+ if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
+ mAudioTrack.play();
+ }
+ int size = mAudioTrack.write(buf, 0, buf.length);
+ if (buf.length != size) {
+ Log.i(TAG, "Failed to send all data to audio output, expected size: " +
+ buf.length + ", actual size: " + size);
+ }
+ // TODO(qinmin): Returning the head position allows us to estimate
+ // the current presentation time in native code. However, it is
+ // better to use AudioTrack.getCurrentTimestamp() to get the last
+ // known time when a frame is played. However, we will need to
+ // convert the java nano time to C++ timestamp.
+ // If the stream runs too long, getPlaybackHeadPosition() could
+ // overflow. AudioTimestampHelper in MediaSourcePlayer has the same
+ // issue. See http://crbug.com/358801.
+ return mAudioTrack.getPlaybackHeadPosition();
+ }
+
+ @SuppressWarnings("deprecation")
@CalledByNative
private void setVolume(double volume) {
if (mAudioTrack != null) {