Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / media / base / android / java / src / org / chromium / media / VideoCapture.java
index 446d50e..84f29dd 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Copyright 2014 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -7,266 +7,213 @@ package org.chromium.media;
 import android.content.Context;
 import android.graphics.ImageFormat;
 import android.graphics.SurfaceTexture;
-import android.graphics.SurfaceTexture.OnFrameAvailableListener;
-import android.hardware.Camera;
-import android.hardware.Camera.PreviewCallback;
 import android.opengl.GLES20;
 import android.util.Log;
 import android.view.Surface;
 import android.view.WindowManager;
 
-import java.io.IOException;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.Iterator;
-import java.util.List;
-
 import org.chromium.base.CalledByNative;
 import org.chromium.base.JNINamespace;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Video Capture Device base class to interface to native Chromium.
+ **/
 @JNINamespace("media")
-public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
-    static class CaptureCapability {
-        public int mWidth = 0;
-        public int mHeight = 0;
-        public int mDesiredFps = 0;
-    }
+@SuppressWarnings("deprecation")
+public abstract class VideoCapture implements android.hardware.Camera.PreviewCallback {
+
+    protected static class CaptureFormat {
+        int mWidth;
+        int mHeight;
+        final int mFramerate;
+        final int mPixelFormat;
+
+        public CaptureFormat(
+                int width, int height, int framerate, int pixelformat) {
+            mWidth = width;
+            mHeight = height;
+            mFramerate = framerate;
+            mPixelFormat = pixelformat;
+        }
 
-    // Some devices with OS older than JELLY_BEAN don't support YV12 format correctly.
-    // Some devices don't support YV12 format correctly even with JELLY_BEAN or newer OS.
-    // To work around the issues on those devices, we'd have to request NV21.
-    // This is a temporary hack till device manufacturers fix the problem or
-    // we don't need to support those devices any more.
-    private static class DeviceImageFormatHack {
-        private static final String[] sBUGGY_DEVICE_LIST = {
-            "SAMSUNG-SGH-I747",
-            "ODROID-U2",
-        };
-
-        static int getImageFormat() {
-            if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) {
-                return ImageFormat.NV21;
-            }
+        public int getWidth() {
+            return mWidth;
+        }
 
-            for (String buggyDevice : sBUGGY_DEVICE_LIST) {
-                if (buggyDevice.contentEquals(android.os.Build.MODEL)) {
-                    return ImageFormat.NV21;
-                }
-            }
-            return ImageFormat.YV12;
+        public int getHeight() {
+            return mHeight;
+        }
+
+        public int getFramerate() {
+            return mFramerate;
+        }
+
+        public int getPixelFormat() {
+            return mPixelFormat;
         }
     }
 
-    private Camera mCamera;
-    public ReentrantLock mPreviewBufferLock = new ReentrantLock();
-    private int mImageFormat = ImageFormat.YV12;
-    private byte[] mColorPlane = null;
-    private Context mContext = null;
+    protected android.hardware.Camera mCamera;
+    protected CaptureFormat mCaptureFormat = null;
+    // Lock to mutually exclude execution of OnPreviewFrame {start/stop}Capture.
+    protected ReentrantLock mPreviewBufferLock = new ReentrantLock();
+    protected Context mContext = null;
     // True when native code has started capture.
-    private boolean mIsRunning = false;
+    protected boolean mIsRunning = false;
 
-    private static final int NUM_CAPTURE_BUFFERS = 3;
-    private int mExpectedFrameSize = 0;
-    private int mId = 0;
+    protected int mId;
     // Native callback context variable.
-    private int mNativeVideoCaptureDeviceAndroid = 0;
-    private int[] mGlTextures = null;
-    private SurfaceTexture mSurfaceTexture = null;
-    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
-
-    private int mCameraOrientation = 0;
-    private int mCameraFacing = 0;
-    private int mDeviceOrientation = 0;
-
-    CaptureCapability mCurrentCapability = null;
+    protected long mNativeVideoCaptureDeviceAndroid;
+    protected int[] mGlTextures = null;
+    protected SurfaceTexture mSurfaceTexture = null;
+    protected static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+    protected int mCameraOrientation;
+    protected int mCameraFacing;
+    protected int mDeviceOrientation;
     private static final String TAG = "VideoCapture";
 
-    @CalledByNative
-    public static VideoCapture createVideoCapture(
-            Context context, int id, int nativeVideoCaptureDeviceAndroid) {
-        return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid);
-    }
-
-    public VideoCapture(
-            Context context, int id, int nativeVideoCaptureDeviceAndroid) {
+    VideoCapture(Context context,
+                 int id,
+                 long nativeVideoCaptureDeviceAndroid) {
         mContext = context;
         mId = id;
         mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid;
     }
 
-    // Returns true on success, false otherwise.
     @CalledByNative
-    public boolean allocate(int width, int height, int frameRate) {
-        Log.d(TAG, "allocate: requested width=" + width +
-              ", height=" + height + ", frameRate=" + frameRate);
+    boolean allocate(int width, int height, int frameRate) {
+        Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" +
+                frameRate + "fps");
         try {
-            mCamera = Camera.open(mId);
+            mCamera = android.hardware.Camera.open(mId);
         } catch (RuntimeException ex) {
-            Log.e(TAG, "allocate:Camera.open: " + ex);
+            Log.e(TAG, "allocate: Camera.open: " + ex);
             return false;
         }
 
-        try {
-            Camera.CameraInfo camera_info = new Camera.CameraInfo();
-            Camera.getCameraInfo(mId, camera_info);
-            mCameraOrientation = camera_info.orientation;
-            mCameraFacing = camera_info.facing;
-            mDeviceOrientation = getDeviceOrientation();
-            Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation +
-                  ", camera orientation=" + mCameraOrientation +
-                  ", facing=" + mCameraFacing);
-
-            Camera.Parameters parameters = mCamera.getParameters();
-
-            // Calculate fps.
-            List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
-            if (listFpsRange == null || listFpsRange.size() == 0) {
-                Log.e(TAG, "allocate: no fps range found");
-                return false;
-            }
-            int frameRateInMs = frameRate * 1000;
-            Iterator itFpsRange = listFpsRange.iterator();
-            int[] fpsRange = (int[])itFpsRange.next();
-            // Use the first range as default.
-            int fpsMin = fpsRange[0];
-            int fpsMax = fpsRange[1];
-            int newFrameRate = (fpsMin + 999) / 1000;
-            while (itFpsRange.hasNext()) {
-                fpsRange = (int[])itFpsRange.next();
-                if (fpsRange[0] <= frameRateInMs &&
-                    frameRateInMs <= fpsRange[1]) {
-                    fpsMin = fpsRange[0];
-                    fpsMax = fpsRange[1];
-                    newFrameRate = frameRate;
-                    break;
-                }
-            }
-            frameRate = newFrameRate;
-            Log.d(TAG, "allocate: fps set to " + frameRate);
-
-            mCurrentCapability = new CaptureCapability();
-            mCurrentCapability.mDesiredFps = frameRate;
-
-            // Calculate size.
-            List<Camera.Size> listCameraSize =
-                    parameters.getSupportedPreviewSizes();
-            int minDiff = Integer.MAX_VALUE;
-            int matchedWidth = width;
-            int matchedHeight = height;
-            Iterator itCameraSize = listCameraSize.iterator();
-            while (itCameraSize.hasNext()) {
-                Camera.Size size = (Camera.Size)itCameraSize.next();
-                int diff = Math.abs(size.width - width) +
-                           Math.abs(size.height - height);
-                Log.d(TAG, "allocate: support resolution (" +
-                      size.width + ", " + size.height + "), diff=" + diff);
-                // TODO(wjia): Remove this hack (forcing width to be multiple
-                // of 32) by supporting stride in video frame buffer.
-                // Right now, VideoCaptureController requires compact YV12
-                // (i.e., with no padding).
-                if (diff < minDiff && (size.width % 32 == 0)) {
-                    minDiff = diff;
-                    matchedWidth = size.width;
-                    matchedHeight = size.height;
-                }
-            }
-            if (minDiff == Integer.MAX_VALUE) {
-                Log.e(TAG, "allocate: can not find a resolution whose width " +
-                           "is multiple of 32");
-                return false;
+        android.hardware.Camera.CameraInfo cameraInfo = getCameraInfo(mId);
+        if (cameraInfo == null) {
+            mCamera.release();
+            mCamera = null;
+            return false;
+        }
+
+        mCameraOrientation = cameraInfo.orientation;
+        mCameraFacing = cameraInfo.facing;
+        mDeviceOrientation = getDeviceOrientation();
+        Log.d(TAG, "allocate: orientation dev=" + mDeviceOrientation +
+                  ", cam=" + mCameraOrientation + ", facing=" + mCameraFacing);
+
+        android.hardware.Camera.Parameters parameters = getCameraParameters(mCamera);
+        if (parameters == null) {
+            mCamera = null;
+            return false;
+        }
+
+        // getSupportedPreviewFpsRange() returns a List with at least one
+        // element, but when camera is in bad state, it can return null pointer.
+        List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+        if (listFpsRange == null || listFpsRange.size() == 0) {
+            Log.e(TAG, "allocate: no fps range found");
+            return false;
+        }
+        int frameRateInMs = frameRate * 1000;
+        // Use the first range as default.
+        int[] fpsMinMax = listFpsRange.get(0);
+        int newFrameRate = (fpsMinMax[0] + 999) / 1000;
+        for (int[] fpsRange : listFpsRange) {
+            if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
+                fpsMinMax = fpsRange;
+                newFrameRate = frameRate;
+                break;
             }
-            mCurrentCapability.mWidth = matchedWidth;
-            mCurrentCapability.mHeight = matchedHeight;
-            Log.d(TAG, "allocate: matched width=" + matchedWidth +
-                  ", height=" + matchedHeight);
-
-            calculateImageFormat(matchedWidth, matchedHeight);
-
-            if (parameters.isVideoStabilizationSupported()){
-                Log.d(TAG, "Image stabilization supported, currently: "
-                      + parameters.getVideoStabilization() + ", setting it.");
-                parameters.setVideoStabilization(true);
-            } else {
-                Log.d(TAG, "Image stabilization not supported.");
+        }
+        frameRate = newFrameRate;
+        Log.d(TAG, "allocate: fps set to " + frameRate);
+
+        // Calculate size.
+        List<android.hardware.Camera.Size> listCameraSize =
+                parameters.getSupportedPreviewSizes();
+        int minDiff = Integer.MAX_VALUE;
+        int matchedWidth = width;
+        int matchedHeight = height;
+        for (android.hardware.Camera.Size size : listCameraSize) {
+            int diff = Math.abs(size.width - width) +
+                       Math.abs(size.height - height);
+            Log.d(TAG, "allocate: supported (" +
+                  size.width + ", " + size.height + "), diff=" + diff);
+            // TODO(wjia): Remove this hack (forcing width to be multiple
+            // of 32) by supporting stride in video frame buffer.
+            // Right now, VideoCaptureController requires compact YV12
+            // (i.e., with no padding).
+            if (diff < minDiff && (size.width % 32 == 0)) {
+                minDiff = diff;
+                matchedWidth = size.width;
+                matchedHeight = size.height;
             }
+        }
+        if (minDiff == Integer.MAX_VALUE) {
+            Log.e(TAG, "allocate: can not find a multiple-of-32 resolution");
+            return false;
+        }
+        Log.d(TAG, "allocate: matched (" + matchedWidth + "x" + matchedHeight + ")");
+
+        if (parameters.isVideoStabilizationSupported()) {
+            Log.d(TAG, "Image stabilization supported, currently: " +
+                  parameters.getVideoStabilization() + ", setting it.");
+            parameters.setVideoStabilization(true);
+        } else {
+            Log.d(TAG, "Image stabilization not supported.");
+        }
 
-            parameters.setPreviewSize(matchedWidth, matchedHeight);
-            parameters.setPreviewFormat(mImageFormat);
-            parameters.setPreviewFpsRange(fpsMin, fpsMax);
+        setCaptureParameters(matchedWidth, matchedHeight, frameRate, parameters);
+        parameters.setPreviewSize(mCaptureFormat.mWidth,
+                                  mCaptureFormat.mHeight);
+        parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]);
+        parameters.setPreviewFormat(mCaptureFormat.mPixelFormat);
+        try {
             mCamera.setParameters(parameters);
+        } catch (RuntimeException ex) {
+            Log.e(TAG, "setParameters: " + ex);
+            return false;
+        }
 
-            // Set SurfaceTexture.
-            mGlTextures = new int[1];
-            // Generate one texture pointer and bind it as an external texture.
-            GLES20.glGenTextures(1, mGlTextures, 0);
-            GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
-            // No mip-mapping with camera source.
-            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
-                    GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
-            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
-                    GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
-            // Clamp to edge is only option.
-            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
-                    GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
-            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
-                    GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
-
-            mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
-            mSurfaceTexture.setOnFrameAvailableListener(null);
-
+        // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if
+        // it is not going to be used.
+        mGlTextures = new int[1];
+        // Generate one texture pointer and bind it as an external texture.
+        GLES20.glGenTextures(1, mGlTextures, 0);
+        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
+        // No mip-mapping with camera source.
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+        // Clamp to edge is only option.
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+        mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
+        mSurfaceTexture.setOnFrameAvailableListener(null);
+        try {
             mCamera.setPreviewTexture(mSurfaceTexture);
-
-            int bufSize = matchedWidth * matchedHeight *
-                          ImageFormat.getBitsPerPixel(mImageFormat) / 8;
-            for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
-                byte[] buffer = new byte[bufSize];
-                mCamera.addCallbackBuffer(buffer);
-            }
-            mExpectedFrameSize = bufSize;
         } catch (IOException ex) {
             Log.e(TAG, "allocate: " + ex);
             return false;
         }
 
+        allocateBuffers();
         return true;
     }
 
     @CalledByNative
-    public int queryWidth() {
-        return mCurrentCapability.mWidth;
-    }
-
-    @CalledByNative
-    public int queryHeight() {
-        return mCurrentCapability.mHeight;
-    }
-
-    @CalledByNative
-    public int queryFrameRate() {
-        return mCurrentCapability.mDesiredFps;
-    }
-
-    @CalledByNative
-    public int getColorspace() {
-        switch (mImageFormat){
-        case ImageFormat.YV12:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
-        case ImageFormat.NV21:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
-        case ImageFormat.YUY2:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2;
-        case ImageFormat.NV16:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16;
-        case ImageFormat.JPEG:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG;
-        case ImageFormat.RGB_565:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565;
-        case ImageFormat.UNKNOWN:
-        default:
-            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
-        }
-    }
-
-    @CalledByNative
     public int startCapture() {
         if (mCamera == null) {
             Log.e(TAG, "startCapture: camera is null");
@@ -282,8 +229,13 @@ public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
         } finally {
             mPreviewBufferLock.unlock();
         }
-        mCamera.setPreviewCallbackWithBuffer(this);
-        mCamera.startPreview();
+        setPreviewCallback(this);
+        try {
+            mCamera.startPreview();
+        } catch (RuntimeException ex) {
+            Log.e(TAG, "startCapture: Camera.startPreview: " + ex);
+            return -1;
+        }
         return 0;
     }
 
@@ -305,7 +257,7 @@ public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
         }
 
         mCamera.stopPreview();
-        mCamera.setPreviewCallbackWithBuffer(null);
+        setPreviewCallback(null);
         return 0;
     }
 
@@ -319,7 +271,7 @@ public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
             mCamera.setPreviewTexture(null);
             if (mGlTextures != null)
                 GLES20.glDeleteTextures(1, mGlTextures, 0);
-            mCurrentCapability = null;
+            mCaptureFormat = null;
             mCamera.release();
             mCamera = null;
         } catch (IOException ex) {
@@ -328,99 +280,54 @@ public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
         }
     }
 
-    @Override
-    public void onPreviewFrame(byte[] data, Camera camera) {
-        mPreviewBufferLock.lock();
-        try {
-            if (!mIsRunning) {
-                return;
-            }
-            if (data.length == mExpectedFrameSize) {
-                int rotation = getDeviceOrientation();
-                if (rotation != mDeviceOrientation) {
-                    mDeviceOrientation = rotation;
-                    Log.d(TAG,
-                          "onPreviewFrame: device orientation=" +
-                          mDeviceOrientation + ", camera orientation=" +
-                          mCameraOrientation);
-                }
-                boolean flipVertical = false;
-                boolean flipHorizontal = false;
-                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
-                    rotation = (mCameraOrientation + rotation) % 360;
-                    rotation = (360 - rotation) % 360;
-                    flipHorizontal = (rotation == 270 || rotation == 90);
-                    flipVertical = flipHorizontal;
-                } else {
-                    rotation = (mCameraOrientation - rotation + 360) % 360;
-                }
-                nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid,
-                        data, mExpectedFrameSize,
-                        rotation, flipVertical, flipHorizontal);
-            }
-        } finally {
-            mPreviewBufferLock.unlock();
-            if (camera != null) {
-                camera.addCallbackBuffer(data);
-            }
-        }
-    }
-
-    // TODO(wjia): investigate whether reading from texture could give better
-    // performance and frame rate.
-    @Override
-    public void onFrameAvailable(SurfaceTexture surfaceTexture) { }
+    // Local hook to allow derived classes to fill capture format and modify
+    // camera parameters as they see fit.
+    abstract void setCaptureParameters(
+            int width,
+            int height,
+            int frameRate,
+            android.hardware.Camera.Parameters cameraParameters);
 
-    private static class ChromiumCameraInfo {
-        private final int mId;
-        private final Camera.CameraInfo mCameraInfo;
+    // Local hook to allow derived classes to configure and plug capture
+    // buffers if needed.
+    abstract void allocateBuffers();
 
-        private ChromiumCameraInfo(int index) {
-            mId = index;
-            mCameraInfo = new Camera.CameraInfo();
-            Camera.getCameraInfo(index, mCameraInfo);
-        }
-
-        @CalledByNative("ChromiumCameraInfo")
-        private static int getNumberOfCameras() {
-            return Camera.getNumberOfCameras();
-        }
+    // Local method to be overriden with the particular setPreviewCallback to be
+    // used in the implementations.
+    abstract void setPreviewCallback(android.hardware.Camera.PreviewCallback cb);
 
-        @CalledByNative("ChromiumCameraInfo")
-        private static ChromiumCameraInfo getAt(int index) {
-            return new ChromiumCameraInfo(index);
-        }
+    @CalledByNative
+    public int queryWidth() {
+        return mCaptureFormat.mWidth;
+    }
 
-        @CalledByNative("ChromiumCameraInfo")
-        private int getId() {
-            return mId;
-        }
+    @CalledByNative
+    public int queryHeight() {
+        return mCaptureFormat.mHeight;
+    }
 
-        @CalledByNative("ChromiumCameraInfo")
-        private String getDeviceName() {
-            return  "camera " + mId + ", facing " +
-                    (mCameraInfo.facing ==
-                     Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back");
-        }
+    @CalledByNative
+    public int queryFrameRate() {
+        return mCaptureFormat.mFramerate;
+    }
 
-        @CalledByNative("ChromiumCameraInfo")
-        private int getOrientation() {
-            return mCameraInfo.orientation;
+    @CalledByNative
+    public int getColorspace() {
+        switch (mCaptureFormat.mPixelFormat) {
+            case ImageFormat.YV12:
+                return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
+            case ImageFormat.NV21:
+                return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
+            case ImageFormat.UNKNOWN:
+            default:
+                return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
         }
     }
 
-    private native void nativeOnFrameAvailable(
-            int nativeVideoCaptureDeviceAndroid,
-            byte[] data,
-            int length,
-            int rotation,
-            boolean flipVertical,
-            boolean flipHorizontal);
-
-    private int getDeviceOrientation() {
+    protected int getDeviceOrientation() {
         int orientation = 0;
         if (mContext != null) {
-            WindowManager wm = (WindowManager)mContext.getSystemService(
+            WindowManager wm = (WindowManager) mContext.getSystemService(
                     Context.WINDOW_SERVICE);
             switch(wm.getDefaultDisplay().getRotation()) {
                 case Surface.ROTATION_90:
@@ -441,7 +348,34 @@ public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
         return orientation;
     }
 
-    private void calculateImageFormat(int width, int height) {
-        mImageFormat = DeviceImageFormatHack.getImageFormat();
+    // Method for VideoCapture implementations to call back native code.
+    public native void nativeOnFrameAvailable(
+            long nativeVideoCaptureDeviceAndroid,
+            byte[] data,
+            int length,
+            int rotation);
+
+    protected static android.hardware.Camera.Parameters getCameraParameters(
+            android.hardware.Camera camera) {
+        android.hardware.Camera.Parameters parameters;
+        try {
+            parameters = camera.getParameters();
+        } catch (RuntimeException ex) {
+            Log.e(TAG, "getCameraParameters: android.hardware.Camera.getParameters: " + ex);
+            camera.release();
+            return null;
+        }
+        return parameters;
+    }
+
+    private android.hardware.Camera.CameraInfo getCameraInfo(int id) {
+        android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo();
+        try {
+            android.hardware.Camera.getCameraInfo(id, cameraInfo);
+        } catch (RuntimeException ex) {
+            Log.e(TAG, "getCameraInfo: android.hardware.Camera.getCameraInfo: " + ex);
+            return null;
+        }
+        return cameraInfo;
     }
 }