3 * Copyright 2014, Google Inc.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList;
35 import android.media.MediaFormat;
36 import android.opengl.EGL14;
37 import android.opengl.EGLConfig;
38 import android.opengl.EGLContext;
39 import android.opengl.EGLDisplay;
40 import android.opengl.EGLSurface;
41 import android.opengl.GLES11Ext;
42 import android.opengl.GLES20;
43 import android.os.Build;
44 import android.os.Bundle;
45 import android.util.Log;
46 import android.view.Surface;
48 import java.nio.ByteBuffer;
50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
51 // This class is an implementation detail of the Java PeerConnection API.
52 // MediaCodec is thread-hostile so this class must be operated on a single
54 class MediaCodecVideoDecoder {
55 // This class is constructed, operated, and destroyed by its C++ incarnation,
56 // so the class and its methods have non-public visibility. The API this
57 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
58 // possibly to minimize the amount of translation work necessary.
60 private static final String TAG = "MediaCodecVideoDecoder";
62 private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
63 private Thread mediaCodecThread;
64 private MediaCodec mediaCodec;
65 private ByteBuffer[] inputBuffers;
66 private ByteBuffer[] outputBuffers;
67 private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
68 // List of supported HW VP8 decoders.
69 private static final String[] supportedHwCodecPrefixes =
70 {"OMX.qcom.", "OMX.Nvidia." };
71 // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
72 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
73 private static final int
74 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
75 // Allowable color formats supported by codec - in order of preference.
76 private static final int[] supportedColorList = {
77 CodecCapabilities.COLOR_FormatYUV420Planar,
78 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
79 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
80 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
82 private int colorFormat;
86 private int sliceHeight;
87 private boolean useSurface;
88 private int textureID = -1;
89 private SurfaceTexture surfaceTexture = null;
90 private Surface surface = null;
91 private float[] stMatrix = new float[16];
92 private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
93 private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
94 private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
97 private MediaCodecVideoDecoder() { }
99 // Helper struct for findVp8HwDecoder() below.
100 private static class DecoderProperties {
101 public DecoderProperties(String codecName, int colorFormat) {
102 this.codecName = codecName;
103 this.colorFormat = colorFormat;
105 public final String codecName; // OpenMax component name for VP8 codec.
106 public final int colorFormat; // Color format supported by codec.
109 private static DecoderProperties findVp8HwDecoder() {
110 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT)
111 return null; // MediaCodec.setParameters is missing.
113 for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
114 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
115 if (info.isEncoder()) {
119 for (String mimeType : info.getSupportedTypes()) {
120 if (mimeType.equals(VP8_MIME_TYPE)) {
121 name = info.getName();
126 continue; // No VP8 support in this codec; try the next one.
128 Log.d(TAG, "Found candidate decoder " + name);
130 // Check if this is supported HW decoder.
131 boolean supportedCodec = false;
132 for (String hwCodecPrefix : supportedHwCodecPrefixes) {
133 if (name.startsWith(hwCodecPrefix)) {
134 supportedCodec = true;
138 if (!supportedCodec) {
142 // Check if codec supports either yuv420 or nv12.
143 CodecCapabilities capabilities =
144 info.getCapabilitiesForType(VP8_MIME_TYPE);
145 for (int colorFormat : capabilities.colorFormats) {
146 Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
148 for (int supportedColorFormat : supportedColorList) {
149 for (int codecColorFormat : capabilities.colorFormats) {
150 if (codecColorFormat == supportedColorFormat) {
151 // Found supported HW VP8 decoder.
152 Log.d(TAG, "Found target decoder " + name +
153 ". Color: 0x" + Integer.toHexString(codecColorFormat));
154 return new DecoderProperties(name, codecColorFormat);
159 return null; // No HW VP8 decoder.
162 private static boolean isPlatformSupported() {
163 return findVp8HwDecoder() != null;
166 private void checkOnMediaCodecThread() {
167 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
168 throw new RuntimeException(
169 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
170 " but is now called on " + Thread.currentThread());
174 private void checkEglError(String msg) {
176 if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
177 Log.e(TAG, msg + ": EGL Error: 0x" + Integer.toHexString(error));
178 throw new RuntimeException(
179 msg + ": EGL error: 0x" + Integer.toHexString(error));
183 private void checkGlError(String msg) {
185 if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
186 Log.e(TAG, msg + ": GL Error: 0x" + Integer.toHexString(error));
187 throw new RuntimeException(
188 msg + ": GL Error: 0x " + Integer.toHexString(error));
192 private void eglSetup(EGLContext sharedContext, int width, int height) {
193 Log.d(TAG, "EGL setup");
194 if (sharedContext == null) {
195 sharedContext = EGL14.EGL_NO_CONTEXT;
197 eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
198 if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
199 throw new RuntimeException("Unable to get EGL14 display");
201 int[] version = new int[2];
202 if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
203 throw new RuntimeException("Unable to initialize EGL14");
206 // Configure EGL for pbuffer and OpenGL ES 2.0.
208 EGL14.EGL_RED_SIZE, 8,
209 EGL14.EGL_GREEN_SIZE, 8,
210 EGL14.EGL_BLUE_SIZE, 8,
211 EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
212 EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
215 EGLConfig[] configs = new EGLConfig[1];
216 int[] numConfigs = new int[1];
217 if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0,
218 configs.length, numConfigs, 0)) {
219 throw new RuntimeException("Unable to find RGB888 EGL config");
222 // Configure context for OpenGL ES 2.0.
223 int[] attrib_list = {
224 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
227 eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedContext,
229 checkEglError("eglCreateContext");
230 if (eglContext == null) {
231 throw new RuntimeException("Null EGL context");
234 // Create a pbuffer surface.
235 int[] surfaceAttribs = {
236 EGL14.EGL_WIDTH, width,
237 EGL14.EGL_HEIGHT, height,
240 eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, configs[0],
242 checkEglError("eglCreatePbufferSurface");
243 if (eglSurface == null) {
244 throw new RuntimeException("EGL surface was null");
248 private void eglRelease() {
249 Log.d(TAG, "EGL release");
250 if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
251 EGL14.eglDestroySurface(eglDisplay, eglSurface);
252 EGL14.eglDestroyContext(eglDisplay, eglContext);
253 EGL14.eglReleaseThread();
254 EGL14.eglTerminate(eglDisplay);
256 eglDisplay = EGL14.EGL_NO_DISPLAY;
257 eglContext = EGL14.EGL_NO_CONTEXT;
258 eglSurface = EGL14.EGL_NO_SURFACE;
262 private void makeCurrent() {
263 if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
264 throw new RuntimeException("eglMakeCurrent failed");
268 private boolean initDecode(int width, int height, boolean useSurface,
269 EGLContext sharedContext) {
270 if (mediaCodecThread != null) {
271 throw new RuntimeException("Forgot to release()?");
273 if (useSurface && sharedContext == null) {
274 throw new RuntimeException("No shared EGL context.");
276 DecoderProperties properties = findVp8HwDecoder();
277 if (properties == null) {
278 throw new RuntimeException("Cannot find HW VP8 decoder");
280 Log.d(TAG, "Java initDecode: " + width + " x " + height +
281 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
282 ". Use Surface: " + useSurface );
283 if (sharedContext != null) {
284 Log.d(TAG, "Decoder shared EGL Context: " + sharedContext);
286 mediaCodecThread = Thread.currentThread();
288 Surface decodeSurface = null;
290 this.height = height;
291 this.useSurface = useSurface;
293 sliceHeight = height;
296 // Create shared EGL context.
297 eglSetup(sharedContext, width, height);
300 // Create output surface
301 int[] textures = new int[1];
302 GLES20.glGenTextures(1, textures, 0);
303 checkGlError("glGenTextures");
304 textureID = textures[0];
305 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
306 checkGlError("glBindTexture mTextureID");
308 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
309 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
310 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
311 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
312 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
313 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
314 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
315 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
316 checkGlError("glTexParameter");
317 Log.d(TAG, "Video decoder TextureID = " + textureID);
318 surfaceTexture = new SurfaceTexture(textureID);
319 surface = new Surface(surfaceTexture);
320 decodeSurface = surface;
324 MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
326 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
328 Log.d(TAG, " Format: " + format);
329 mediaCodec = MediaCodec.createByCodecName(properties.codecName);
330 if (mediaCodec == null) {
333 mediaCodec.configure(format, decodeSurface, null, 0);
335 colorFormat = properties.colorFormat;
336 outputBuffers = mediaCodec.getOutputBuffers();
337 inputBuffers = mediaCodec.getInputBuffers();
338 Log.d(TAG, "Input buffers: " + inputBuffers.length +
339 ". Output buffers: " + outputBuffers.length);
341 } catch (IllegalStateException e) {
342 Log.e(TAG, "initDecode failed", e);
347 private void release() {
348 Log.d(TAG, "Java releaseDecoder");
349 checkOnMediaCodecThread();
352 mediaCodec.release();
353 } catch (IllegalStateException e) {
354 Log.e(TAG, "release failed", e);
357 mediaCodecThread = null;
361 surfaceTexture = null;
362 if (textureID >= 0) {
363 int[] textures = new int[1];
364 textures[0] = textureID;
365 Log.d(TAG, "Delete video decoder TextureID " + textureID);
366 GLES20.glDeleteTextures(1, textures, 0);
367 checkGlError("glDeleteTextures");
373 // Dequeue an input buffer and return its index, -1 if no input buffer is
374 // available, or -2 if the codec is no longer operative.
375 private int dequeueInputBuffer() {
376 checkOnMediaCodecThread();
378 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
379 } catch (IllegalStateException e) {
380 Log.e(TAG, "dequeueIntputBuffer failed", e);
385 private boolean queueInputBuffer(
386 int inputBufferIndex, int size, long timestampUs) {
387 checkOnMediaCodecThread();
389 inputBuffers[inputBufferIndex].position(0);
390 inputBuffers[inputBufferIndex].limit(size);
391 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
394 catch (IllegalStateException e) {
395 Log.e(TAG, "decode failed", e);
400 // Helper struct for dequeueOutputBuffer() below.
401 private static class DecoderOutputBufferInfo {
402 public DecoderOutputBufferInfo(
403 int index, int offset, int size, long presentationTimestampUs) {
405 this.offset = offset;
407 this.presentationTimestampUs = presentationTimestampUs;
410 private final int index;
411 private final int offset;
412 private final int size;
413 private final long presentationTimestampUs;
416 // Dequeue and return an output buffer index, -1 if no output
417 // buffer available or -2 if error happened.
418 private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) {
419 checkOnMediaCodecThread();
421 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
422 int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
423 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
424 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
425 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
426 outputBuffers = mediaCodec.getOutputBuffers();
427 Log.d(TAG, "Output buffers changed: " + outputBuffers.length);
428 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
429 MediaFormat format = mediaCodec.getOutputFormat();
430 Log.d(TAG, "Format changed: " + format.toString());
431 width = format.getInteger(MediaFormat.KEY_WIDTH);
432 height = format.getInteger(MediaFormat.KEY_HEIGHT);
433 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
434 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
435 Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
436 // Check if new color space is supported.
437 boolean validColorFormat = false;
438 for (int supportedColorFormat : supportedColorList) {
439 if (colorFormat == supportedColorFormat) {
440 validColorFormat = true;
444 if (!validColorFormat) {
445 Log.e(TAG, "Non supported color format");
446 return new DecoderOutputBufferInfo(-1, 0, 0, -1);
449 if (format.containsKey("stride")) {
450 stride = format.getInteger("stride");
452 if (format.containsKey("slice-height")) {
453 sliceHeight = format.getInteger("slice-height");
455 Log.d(TAG, "Frame stride and slice height: "
456 + stride + " x " + sliceHeight);
457 stride = Math.max(width, stride);
458 sliceHeight = Math.max(height, sliceHeight);
460 result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
463 return new DecoderOutputBufferInfo(result, info.offset, info.size,
464 info.presentationTimeUs);
467 } catch (IllegalStateException e) {
468 Log.e(TAG, "dequeueOutputBuffer failed", e);
469 return new DecoderOutputBufferInfo(-1, 0, 0, -1);
473 // Release a dequeued output buffer back to the codec for re-use. Return
474 // false if the codec is no longer operable.
475 private boolean releaseOutputBuffer(int index, boolean render) {
476 checkOnMediaCodecThread();
481 mediaCodec.releaseOutputBuffer(index, render);
483 } catch (IllegalStateException e) {
484 Log.e(TAG, "releaseOutputBuffer failed", e);