1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 package org.chromium.media;
7 import android.media.AudioFormat;
8 import android.media.AudioManager;
9 import android.media.AudioTrack;
10 import android.media.MediaCodec;
11 import android.media.MediaCodecInfo;
12 import android.media.MediaCodecList;
13 import android.media.MediaCrypto;
14 import android.media.MediaFormat;
15 import android.os.Build;
16 import android.os.Bundle;
17 import android.util.Log;
18 import android.view.Surface;
20 import org.chromium.base.CalledByNative;
21 import org.chromium.base.JNINamespace;
23 import java.nio.ByteBuffer;
24 import java.util.ArrayList;
25 import java.util.HashMap;
29 * A wrapper of the MediaCodec class to facilitate exception capturing and
32 @JNINamespace("media")
33 class MediaCodecBridge {
34 private static final String TAG = "MediaCodecBridge";
36 // Error code for MediaCodecBridge. Keep this value in sync with
37 // MediaCodecStatus in media_codec_bridge.h.
38 private static final int MEDIA_CODEC_OK = 0;
39 private static final int MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER = 1;
40 private static final int MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER = 2;
41 private static final int MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED = 3;
42 private static final int MEDIA_CODEC_OUTPUT_FORMAT_CHANGED = 4;
43 private static final int MEDIA_CODEC_INPUT_END_OF_STREAM = 5;
44 private static final int MEDIA_CODEC_OUTPUT_END_OF_STREAM = 6;
45 private static final int MEDIA_CODEC_NO_KEY = 7;
46 private static final int MEDIA_CODEC_STOPPED = 8;
47 private static final int MEDIA_CODEC_ERROR = 9;
49 // Codec direction. Keep this in sync with media_codec_bridge.h.
50 private static final int MEDIA_CODEC_DECODER = 0;
51 private static final int MEDIA_CODEC_ENCODER = 1;
53 // Max adaptive playback size to be supplied to the decoder.
54 private static final int MAX_ADAPTIVE_PLAYBACK_WIDTH = 1920;
55 private static final int MAX_ADAPTIVE_PLAYBACK_HEIGHT = 1080;
57 // After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps
58 // for several frames. As a result, the player may find that the time does not increase
59 // after decoding a frame. To detect this, we check whether the presentation timestamp from
60 // dequeueOutputBuffer() is larger than input_timestamp - MAX_PRESENTATION_TIMESTAMP_SHIFT_US
61 // after a flush. And we set the presentation timestamp from dequeueOutputBuffer() to be
62 // non-decreasing for the remaining frames.
63 private static final long MAX_PRESENTATION_TIMESTAMP_SHIFT_US = 100000;
65 private ByteBuffer[] mInputBuffers;
66 private ByteBuffer[] mOutputBuffers;
68 private MediaCodec mMediaCodec;
69 private AudioTrack mAudioTrack;
70 private boolean mFlushed;
71 private long mLastPresentationTimeUs;
73 private boolean mAdaptivePlaybackSupported;
75 private static class DequeueInputResult {
76 private final int mStatus;
77 private final int mIndex;
79 private DequeueInputResult(int status, int index) {
84 @CalledByNative("DequeueInputResult")
85 private int status() { return mStatus; }
87 @CalledByNative("DequeueInputResult")
88 private int index() { return mIndex; }
92 * This class represents supported android codec information.
94 private static class CodecInfo {
95 private final String mCodecType; // e.g. "video/x-vnd.on2.vp8".
96 private final String mCodecName; // e.g. "OMX.google.vp8.decoder".
97 private final int mDirection;
99 private CodecInfo(String codecType, String codecName,
101 mCodecType = codecType;
102 mCodecName = codecName;
103 mDirection = direction;
106 @CalledByNative("CodecInfo")
107 private String codecType() { return mCodecType; }
109 @CalledByNative("CodecInfo")
110 private String codecName() { return mCodecName; }
112 @CalledByNative("CodecInfo")
113 private int direction() { return mDirection; }
116 private static class DequeueOutputResult {
117 private final int mStatus;
118 private final int mIndex;
119 private final int mFlags;
120 private final int mOffset;
121 private final long mPresentationTimeMicroseconds;
122 private final int mNumBytes;
124 private DequeueOutputResult(int status, int index, int flags, int offset,
125 long presentationTimeMicroseconds, int numBytes) {
130 mPresentationTimeMicroseconds = presentationTimeMicroseconds;
131 mNumBytes = numBytes;
134 @CalledByNative("DequeueOutputResult")
135 private int status() { return mStatus; }
137 @CalledByNative("DequeueOutputResult")
138 private int index() { return mIndex; }
140 @CalledByNative("DequeueOutputResult")
141 private int flags() { return mFlags; }
143 @CalledByNative("DequeueOutputResult")
144 private int offset() { return mOffset; }
146 @CalledByNative("DequeueOutputResult")
147 private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; }
149 @CalledByNative("DequeueOutputResult")
150 private int numBytes() { return mNumBytes; }
154 * Get a list of supported android codec mimes.
156 @SuppressWarnings("deprecation")
158 private static CodecInfo[] getCodecsInfo() {
159 // Return the first (highest-priority) codec for each MIME type.
160 Map<String, CodecInfo> encoderInfoMap = new HashMap<String, CodecInfo>();
161 Map<String, CodecInfo> decoderInfoMap = new HashMap<String, CodecInfo>();
162 int count = MediaCodecList.getCodecCount();
163 for (int i = 0; i < count; ++i) {
164 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
166 info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER;
167 String codecString = info.getName();
168 String[] supportedTypes = info.getSupportedTypes();
169 for (int j = 0; j < supportedTypes.length; ++j) {
170 Map<String, CodecInfo> map = info.isEncoder() ? encoderInfoMap : decoderInfoMap;
171 if (!map.containsKey(supportedTypes[j])) {
172 map.put(supportedTypes[j], new CodecInfo(
173 supportedTypes[j], codecString, direction));
177 ArrayList<CodecInfo> codecInfos = new ArrayList<CodecInfo>(
178 decoderInfoMap.size() + encoderInfoMap.size());
179 codecInfos.addAll(encoderInfoMap.values());
180 codecInfos.addAll(decoderInfoMap.values());
181 return codecInfos.toArray(new CodecInfo[codecInfos.size()]);
185 * Get a name of default android codec.
187 @SuppressWarnings("deprecation")
189 private static String getDefaultCodecName(String mime, int direction) {
190 String codecName = "";
191 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
193 MediaCodec mediaCodec = null;
194 if (direction == MEDIA_CODEC_ENCODER) {
195 mediaCodec = MediaCodec.createEncoderByType(mime);
197 mediaCodec = MediaCodec.createDecoderByType(mime);
199 codecName = mediaCodec.getName();
200 mediaCodec.release();
201 } catch (Exception e) {
202 Log.w(TAG, "getDefaultCodecName: Failed to create MediaCodec: " +
203 mime + ", direction: " + direction, e);
209 @SuppressWarnings("deprecation")
210 private static String getDecoderNameForMime(String mime) {
211 int count = MediaCodecList.getCodecCount();
212 for (int i = 0; i < count; ++i) {
213 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
214 if (info.isEncoder()) {
218 String[] supportedTypes = info.getSupportedTypes();
219 for (int j = 0; j < supportedTypes.length; ++j) {
220 if (supportedTypes[j].equalsIgnoreCase(mime)) {
221 return info.getName();
229 private MediaCodecBridge(
230 MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupported) {
231 assert mediaCodec != null;
232 mMediaCodec = mediaCodec;
234 mLastPresentationTimeUs = 0;
236 mAdaptivePlaybackSupported = adaptivePlaybackSupported;
240 private static MediaCodecBridge create(String mime, boolean isSecure, int direction) {
241 // Creation of ".secure" codecs sometimes crash instead of throwing exceptions
242 // on pre-JBMR2 devices.
243 if (isSecure && Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
246 MediaCodec mediaCodec = null;
247 boolean adaptivePlaybackSupported = false;
249 // |isSecure| only applies to video decoders.
250 if (mime.startsWith("video") && isSecure && direction == MEDIA_CODEC_DECODER) {
251 String decoderName = getDecoderNameForMime(mime);
252 if (decoderName == null) {
255 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
256 // To work around an issue that we cannot get the codec info from the secure
257 // decoder, create an insecure decoder first so that we can query its codec
258 // info. http://b/15587335.
259 MediaCodec insecureCodec = MediaCodec.createByCodecName(decoderName);
260 adaptivePlaybackSupported = codecSupportsAdaptivePlayback(insecureCodec, mime);
261 insecureCodec.release();
263 mediaCodec = MediaCodec.createByCodecName(decoderName + ".secure");
265 if (direction == MEDIA_CODEC_ENCODER) {
266 mediaCodec = MediaCodec.createEncoderByType(mime);
268 mediaCodec = MediaCodec.createDecoderByType(mime);
269 adaptivePlaybackSupported = codecSupportsAdaptivePlayback(mediaCodec, mime);
272 } catch (Exception e) {
273 Log.e(TAG, "Failed to create MediaCodec: " + mime + ", isSecure: "
274 + isSecure + ", direction: " + direction, e);
277 if (mediaCodec == null) {
280 return new MediaCodecBridge(mediaCodec, mime, adaptivePlaybackSupported);
284 private void release() {
286 mMediaCodec.release();
287 } catch (IllegalStateException e) {
288 // The MediaCodec is stuck in a wrong state, possibly due to losing
290 Log.e(TAG, "Cannot release media codec", e);
293 if (mAudioTrack != null) {
294 mAudioTrack.release();
298 @SuppressWarnings("deprecation")
300 private boolean start() {
303 mInputBuffers = mMediaCodec.getInputBuffers();
304 } catch (IllegalStateException e) {
305 Log.e(TAG, "Cannot start the media codec", e);
312 private DequeueInputResult dequeueInputBuffer(long timeoutUs) {
313 int status = MEDIA_CODEC_ERROR;
316 int indexOrStatus = mMediaCodec.dequeueInputBuffer(timeoutUs);
317 if (indexOrStatus >= 0) { // index!
318 status = MEDIA_CODEC_OK;
319 index = indexOrStatus;
320 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
321 Log.e(TAG, "dequeueInputBuffer: MediaCodec.INFO_TRY_AGAIN_LATER");
322 status = MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER;
324 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus);
327 } catch (Exception e) {
328 Log.e(TAG, "Failed to dequeue input buffer", e);
330 return new DequeueInputResult(status, index);
334 private int flush() {
337 if (mAudioTrack != null) {
338 // Need to call pause() here, or otherwise flush() is a no-op.
343 } catch (IllegalStateException e) {
344 Log.e(TAG, "Failed to flush MediaCodec", e);
345 return MEDIA_CODEC_ERROR;
347 return MEDIA_CODEC_OK;
351 private void stop() {
353 if (mAudioTrack != null) {
359 private int getOutputHeight() {
360 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_HEIGHT);
364 private int getOutputWidth() {
365 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_WIDTH);
369 private ByteBuffer getInputBuffer(int index) {
370 return mInputBuffers[index];
374 private ByteBuffer getOutputBuffer(int index) {
375 return mOutputBuffers[index];
379 private int getInputBuffersCount() {
380 return mInputBuffers.length;
384 private int getOutputBuffersCount() {
385 return mOutputBuffers != null ? mOutputBuffers.length : -1;
389 private int getOutputBuffersCapacity() {
390 return mOutputBuffers != null ? mOutputBuffers[0].capacity() : -1;
393 @SuppressWarnings("deprecation")
395 private boolean getOutputBuffers() {
397 mOutputBuffers = mMediaCodec.getOutputBuffers();
398 } catch (IllegalStateException e) {
399 Log.e(TAG, "Cannot get output buffers", e);
406 private int queueInputBuffer(
407 int index, int offset, int size, long presentationTimeUs, int flags) {
408 resetLastPresentationTimeIfNeeded(presentationTimeUs);
410 mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
411 } catch (Exception e) {
412 Log.e(TAG, "Failed to queue input buffer", e);
413 return MEDIA_CODEC_ERROR;
415 return MEDIA_CODEC_OK;
419 private void setVideoBitrate(int bps) {
420 Bundle b = new Bundle();
421 b.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bps);
422 mMediaCodec.setParameters(b);
426 private void requestKeyFrameSoon() {
427 Bundle b = new Bundle();
428 b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
429 mMediaCodec.setParameters(b);
433 private int queueSecureInputBuffer(
434 int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData,
435 int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs) {
436 resetLastPresentationTimeIfNeeded(presentationTimeUs);
438 MediaCodec.CryptoInfo cryptoInfo = new MediaCodec.CryptoInfo();
439 cryptoInfo.set(numSubSamples, numBytesOfClearData, numBytesOfEncryptedData,
440 keyId, iv, MediaCodec.CRYPTO_MODE_AES_CTR);
441 mMediaCodec.queueSecureInputBuffer(index, offset, cryptoInfo, presentationTimeUs, 0);
442 } catch (MediaCodec.CryptoException e) {
443 Log.e(TAG, "Failed to queue secure input buffer", e);
444 if (e.getErrorCode() == MediaCodec.CryptoException.ERROR_NO_KEY) {
445 Log.e(TAG, "MediaCodec.CryptoException.ERROR_NO_KEY");
446 return MEDIA_CODEC_NO_KEY;
448 Log.e(TAG, "MediaCodec.CryptoException with error code " + e.getErrorCode());
449 return MEDIA_CODEC_ERROR;
450 } catch (IllegalStateException e) {
451 Log.e(TAG, "Failed to queue secure input buffer", e);
452 return MEDIA_CODEC_ERROR;
454 return MEDIA_CODEC_OK;
458 private void releaseOutputBuffer(int index, boolean render) {
460 mMediaCodec.releaseOutputBuffer(index, render);
461 } catch (IllegalStateException e) {
462 // TODO(qinmin): May need to report the error to the caller. crbug.com/356498.
463 Log.e(TAG, "Failed to release output buffer", e);
467 @SuppressWarnings("deprecation")
469 private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
470 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
471 int status = MEDIA_CODEC_ERROR;
474 int indexOrStatus = mMediaCodec.dequeueOutputBuffer(info, timeoutUs);
475 if (info.presentationTimeUs < mLastPresentationTimeUs) {
476 // TODO(qinmin): return a special code through DequeueOutputResult
477 // to notify the native code the the frame has a wrong presentation
478 // timestamp and should be skipped.
479 info.presentationTimeUs = mLastPresentationTimeUs;
481 mLastPresentationTimeUs = info.presentationTimeUs;
483 if (indexOrStatus >= 0) { // index!
484 status = MEDIA_CODEC_OK;
485 index = indexOrStatus;
486 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
487 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED;
488 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
489 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED;
490 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
491 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER;
493 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus);
496 } catch (IllegalStateException e) {
497 Log.e(TAG, "Failed to dequeue output buffer", e);
500 return new DequeueOutputResult(
501 status, index, info.flags, info.offset, info.presentationTimeUs, info.size);
505 private boolean configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto,
508 if (mAdaptivePlaybackSupported) {
509 format.setInteger(MediaFormat.KEY_MAX_WIDTH, MAX_ADAPTIVE_PLAYBACK_WIDTH);
510 format.setInteger(MediaFormat.KEY_MAX_HEIGHT, MAX_ADAPTIVE_PLAYBACK_HEIGHT);
512 mMediaCodec.configure(format, surface, crypto, flags);
514 } catch (IllegalStateException e) {
515 Log.e(TAG, "Cannot configure the video codec", e);
521 private static MediaFormat createAudioFormat(String mime, int sampleRate, int channelCount) {
522 return MediaFormat.createAudioFormat(mime, sampleRate, channelCount);
526 private static MediaFormat createVideoDecoderFormat(String mime, int width, int height) {
527 return MediaFormat.createVideoFormat(mime, width, height);
531 private static MediaFormat createVideoEncoderFormat(String mime, int width, int height,
532 int bitRate, int frameRate, int iFrameInterval, int colorFormat) {
533 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
534 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
535 format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
536 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval);
537 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
542 private boolean isAdaptivePlaybackSupported(int width, int height) {
543 if (!mAdaptivePlaybackSupported)
545 return width <= MAX_ADAPTIVE_PLAYBACK_WIDTH && height <= MAX_ADAPTIVE_PLAYBACK_HEIGHT;
548 private static boolean codecSupportsAdaptivePlayback(MediaCodec mediaCodec, String mime) {
549 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT || mediaCodec == null) {
553 MediaCodecInfo info = mediaCodec.getCodecInfo();
554 if (info.isEncoder()) {
557 MediaCodecInfo.CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
558 return (capabilities != null) && capabilities.isFeatureSupported(
559 MediaCodecInfo.CodecCapabilities.FEATURE_AdaptivePlayback);
560 } catch (IllegalArgumentException e) {
561 Log.e(TAG, "Cannot retrieve codec information", e);
567 private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) {
571 } else if (index == 1) {
575 format.setByteBuffer(name, ByteBuffer.wrap(bytes));
580 private static void setFrameHasADTSHeader(MediaFormat format) {
581 format.setInteger(MediaFormat.KEY_IS_ADTS, 1);
585 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
588 mMediaCodec.configure(format, null, crypto, flags);
590 int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
591 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
592 int channelConfig = getAudioFormat(channelCount);
593 // Using 16bit PCM for output. Keep this value in sync with
594 // kBytesPerAudioOutputSample in media_codec_bridge.cc.
595 int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
596 AudioFormat.ENCODING_PCM_16BIT);
597 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
598 AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
599 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
605 } catch (IllegalStateException e) {
606 Log.e(TAG, "Cannot configure the audio codec", e);
612 * Play the audio buffer that is passed in.
614 * @param buf Audio buffer to be rendered.
615 * @return The number of frames that have already been consumed by the
616 * hardware. This number resets to 0 after each flush call.
619 private long playOutputBuffer(byte[] buf) {
620 if (mAudioTrack == null) {
624 if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
627 int size = mAudioTrack.write(buf, 0, buf.length);
628 if (buf.length != size) {
629 Log.i(TAG, "Failed to send all data to audio output, expected size: " +
630 buf.length + ", actual size: " + size);
632 // TODO(qinmin): Returning the head position allows us to estimate
633 // the current presentation time in native code. However, it is
634 // better to use AudioTrack.getCurrentTimestamp() to get the last
635 // known time when a frame is played. However, we will need to
636 // convert the java nano time to C++ timestamp.
637 // If the stream runs too long, getPlaybackHeadPosition() could
638 // overflow. AudioTimestampHelper in MediaSourcePlayer has the same
639 // issue. See http://crbug.com/358801.
640 return mAudioTrack.getPlaybackHeadPosition();
643 @SuppressWarnings("deprecation")
645 private void setVolume(double volume) {
646 if (mAudioTrack != null) {
647 mAudioTrack.setStereoVolume((float) volume, (float) volume);
651 private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) {
653 mLastPresentationTimeUs =
654 Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHIFT_US, 0);
659 private int getAudioFormat(int channelCount) {
660 switch (channelCount) {
662 return AudioFormat.CHANNEL_OUT_MONO;
664 return AudioFormat.CHANNEL_OUT_STEREO;
666 return AudioFormat.CHANNEL_OUT_QUAD;
668 return AudioFormat.CHANNEL_OUT_5POINT1;
670 return AudioFormat.CHANNEL_OUT_7POINT1;
672 return AudioFormat.CHANNEL_OUT_DEFAULT;