1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 package org.chromium.media;
7 import android.content.Context;
8 import android.graphics.ImageFormat;
9 import android.graphics.SurfaceTexture;
10 import android.hardware.Camera;
11 import android.hardware.Camera.PreviewCallback;
12 import android.hardware.Camera.Size;
13 import android.opengl.GLES20;
14 import android.util.Log;
15 import android.view.Surface;
16 import android.view.WindowManager;
18 import org.chromium.base.CalledByNative;
19 import org.chromium.base.JNINamespace;
21 import java.io.IOException;
22 import java.util.ArrayList;
23 import java.util.List;
24 import java.util.concurrent.locks.ReentrantLock;
26 /** This class implements the listener interface for receiving copies of preview
27 * frames from the camera, plus a series of methods to manipulate camera and its
28 * capture from the C++ side. Objects of this class are created via
29 * createVideoCapture() and are explicitly owned by the creator. All methods
30 * are invoked by this owner, including the callback OnPreviewFrame().
32 @JNINamespace("media")
33 public class VideoCapture implements PreviewCallback {
34 static class CaptureFormat {
36 int width, int height, int framerate, int pixelformat) {
39 mFramerate = framerate;
40 mPixelFormat = pixelformat;
44 public final int mFramerate;
45 public final int mPixelFormat;
46 @CalledByNative("CaptureFormat")
47 public int getWidth() {
50 @CalledByNative("CaptureFormat")
51 public int getHeight() {
54 @CalledByNative("CaptureFormat")
55 public int getFramerate() {
58 @CalledByNative("CaptureFormat")
59 public int getPixelFormat() {
64 // Some devices don't support YV12 format correctly, even with JELLY_BEAN or
65 // newer OS. To work around the issues on those devices, we have to request
66 // NV21. Some other devices have troubles with certain capture resolutions
67 // under a given one: for those, the resolution is swapped with a known
68 // good. Both are supposed to be temporary hacks.
69 private static class BuggyDeviceHack {
70 private static class IdAndSizes {
71 IdAndSizes(String model, String device, int minWidth, int minHeight) {
75 mMinHeight = minHeight;
77 public final String mModel;
78 public final String mDevice;
79 public final int mMinWidth;
80 public final int mMinHeight;
82 private static final IdAndSizes s_CAPTURESIZE_BUGGY_DEVICE_LIST[] = {
83 new IdAndSizes("Nexus 7", "flo", 640, 480)
86 private static final String[] s_COLORSPACE_BUGGY_DEVICE_LIST = {
91 static void applyMinDimensions(CaptureFormat format) {
92 // NOTE: this can discard requested aspect ratio considerations.
93 for (IdAndSizes buggyDevice : s_CAPTURESIZE_BUGGY_DEVICE_LIST) {
94 if (buggyDevice.mModel.contentEquals(android.os.Build.MODEL) &&
95 buggyDevice.mDevice.contentEquals(android.os.Build.DEVICE)) {
96 format.mWidth = (buggyDevice.mMinWidth > format.mWidth)
97 ? buggyDevice.mMinWidth
99 format.mHeight = (buggyDevice.mMinHeight > format.mHeight)
100 ? buggyDevice.mMinHeight
106 static int getImageFormat() {
107 if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) {
108 return ImageFormat.NV21;
111 for (String buggyDevice : s_COLORSPACE_BUGGY_DEVICE_LIST) {
112 if (buggyDevice.contentEquals(android.os.Build.MODEL)) {
113 return ImageFormat.NV21;
116 return ImageFormat.YV12;
120 private Camera mCamera;
121 public ReentrantLock mPreviewBufferLock = new ReentrantLock();
122 private Context mContext = null;
123 // True when native code has started capture.
124 private boolean mIsRunning = false;
126 private static final int NUM_CAPTURE_BUFFERS = 3;
127 private int mExpectedFrameSize = 0;
129 // Native callback context variable.
130 private long mNativeVideoCaptureDeviceAndroid = 0;
131 private int[] mGlTextures = null;
132 private SurfaceTexture mSurfaceTexture = null;
133 private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
135 private int mCameraOrientation = 0;
136 private int mCameraFacing = 0;
137 private int mDeviceOrientation = 0;
139 CaptureFormat mCaptureFormat = null;
140 private static final String TAG = "VideoCapture";
143 public static VideoCapture createVideoCapture(
144 Context context, int id, long nativeVideoCaptureDeviceAndroid) {
145 return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid);
149 public static CaptureFormat[] getDeviceSupportedFormats(int id) {
152 camera = Camera.open(id);
153 } catch (RuntimeException ex) {
154 Log.e(TAG, "Camera.open: " + ex);
157 Camera.Parameters parameters = camera.getParameters();
159 ArrayList<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
160 // getSupportedPreview{Formats,FpsRange,PreviewSizes}() returns Lists
161 // with at least one element, but when the camera is in bad state, they
162 // can return null pointers; in that case we use a 0 entry, so we can
163 // retrieve as much information as possible.
164 List<Integer> pixelFormats = parameters.getSupportedPreviewFormats();
165 if (pixelFormats == null) {
166 pixelFormats = new ArrayList<Integer>();
168 if (pixelFormats.size() == 0) {
169 pixelFormats.add(ImageFormat.UNKNOWN);
171 for (Integer previewFormat : pixelFormats) {
173 AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
174 if (previewFormat == ImageFormat.YV12) {
175 pixelFormat = AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
176 } else if (previewFormat == ImageFormat.NV21) {
180 List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
181 if (listFpsRange == null) {
182 listFpsRange = new ArrayList<int[]>();
184 if (listFpsRange.size() == 0) {
185 listFpsRange.add(new int[] {0, 0});
187 for (int[] fpsRange : listFpsRange) {
188 List<Camera.Size> supportedSizes =
189 parameters.getSupportedPreviewSizes();
190 if (supportedSizes == null) {
191 supportedSizes = new ArrayList<Camera.Size>();
193 if (supportedSizes.size() == 0) {
194 supportedSizes.add(camera.new Size(0, 0));
196 for (Camera.Size size : supportedSizes) {
197 formatList.add(new CaptureFormat(size.width, size.height,
198 (fpsRange[0] + 999 ) / 1000, pixelFormat));
203 return formatList.toArray(new CaptureFormat[formatList.size()]);
207 Context context, int id, long nativeVideoCaptureDeviceAndroid) {
210 mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid;
213 // Returns true on success, false otherwise.
215 public boolean allocate(int width, int height, int frameRate) {
216 Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" +
219 mCamera = Camera.open(mId);
220 } catch (RuntimeException ex) {
221 Log.e(TAG, "allocate: Camera.open: " + ex);
225 Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
226 Camera.getCameraInfo(mId, cameraInfo);
227 mCameraOrientation = cameraInfo.orientation;
228 mCameraFacing = cameraInfo.facing;
229 mDeviceOrientation = getDeviceOrientation();
230 Log.d(TAG, "allocate: orientation dev=" + mDeviceOrientation +
231 ", cam=" + mCameraOrientation + ", facing=" + mCameraFacing);
233 Camera.Parameters parameters = mCamera.getParameters();
235 // getSupportedPreviewFpsRange() returns a List with at least one
236 // element, but when camera is in bad state, it can return null pointer.
237 List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
238 if (listFpsRange == null || listFpsRange.size() == 0) {
239 Log.e(TAG, "allocate: no fps range found");
242 int frameRateInMs = frameRate * 1000;
243 // Use the first range as default.
244 int[] fpsMinMax = listFpsRange.get(0);
245 int newFrameRate = (fpsMinMax[0] + 999) / 1000;
246 for (int[] fpsRange : listFpsRange) {
247 if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
248 fpsMinMax = fpsRange;
249 newFrameRate = frameRate;
253 frameRate = newFrameRate;
254 Log.d(TAG, "allocate: fps set to " + frameRate);
257 List<Camera.Size> listCameraSize =
258 parameters.getSupportedPreviewSizes();
259 int minDiff = Integer.MAX_VALUE;
260 int matchedWidth = width;
261 int matchedHeight = height;
262 for (Camera.Size size : listCameraSize) {
263 int diff = Math.abs(size.width - width) +
264 Math.abs(size.height - height);
265 Log.d(TAG, "allocate: supported (" +
266 size.width + ", " + size.height + "), diff=" + diff);
267 // TODO(wjia): Remove this hack (forcing width to be multiple
268 // of 32) by supporting stride in video frame buffer.
269 // Right now, VideoCaptureController requires compact YV12
270 // (i.e., with no padding).
271 if (diff < minDiff && (size.width % 32 == 0)) {
273 matchedWidth = size.width;
274 matchedHeight = size.height;
277 if (minDiff == Integer.MAX_VALUE) {
278 Log.e(TAG, "allocate: can not find a multiple-of-32 resolution");
282 mCaptureFormat = new CaptureFormat(
283 matchedWidth, matchedHeight, frameRate,
284 BuggyDeviceHack.getImageFormat());
285 // Hack to avoid certain capture resolutions under a minimum one,
286 // see http://crbug.com/305294
287 BuggyDeviceHack.applyMinDimensions(mCaptureFormat);
288 Log.d(TAG, "allocate: matched (" + mCaptureFormat.mWidth + "x" +
289 mCaptureFormat.mHeight + ")");
291 if (parameters.isVideoStabilizationSupported()) {
292 Log.d(TAG, "Image stabilization supported, currently: "
293 + parameters.getVideoStabilization() + ", setting it.");
294 parameters.setVideoStabilization(true);
296 Log.d(TAG, "Image stabilization not supported.");
298 parameters.setPreviewSize(mCaptureFormat.mWidth,
299 mCaptureFormat.mHeight);
300 parameters.setPreviewFormat(mCaptureFormat.mPixelFormat);
301 parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]);
302 mCamera.setParameters(parameters);
304 // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if
305 // it is not going to be used.
306 mGlTextures = new int[1];
307 // Generate one texture pointer and bind it as an external texture.
308 GLES20.glGenTextures(1, mGlTextures, 0);
309 GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
310 // No mip-mapping with camera source.
311 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
312 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
313 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
314 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
315 // Clamp to edge is only option.
316 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
317 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
318 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
319 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
321 mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
322 mSurfaceTexture.setOnFrameAvailableListener(null);
325 mCamera.setPreviewTexture(mSurfaceTexture);
326 } catch (IOException ex) {
327 Log.e(TAG, "allocate: " + ex);
331 int bufSize = mCaptureFormat.mWidth *
332 mCaptureFormat.mHeight *
333 ImageFormat.getBitsPerPixel(
334 mCaptureFormat.mPixelFormat) / 8;
335 for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
336 byte[] buffer = new byte[bufSize];
337 mCamera.addCallbackBuffer(buffer);
339 mExpectedFrameSize = bufSize;
345 public int queryWidth() {
346 return mCaptureFormat.mWidth;
350 public int queryHeight() {
351 return mCaptureFormat.mHeight;
355 public int queryFrameRate() {
356 return mCaptureFormat.mFramerate;
360 public int getColorspace() {
361 switch (mCaptureFormat.mPixelFormat) {
362 case ImageFormat.YV12:
363 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
364 case ImageFormat.NV21:
365 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
366 case ImageFormat.UNKNOWN:
368 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
373 public int startCapture() {
374 if (mCamera == null) {
375 Log.e(TAG, "startCapture: camera is null");
379 mPreviewBufferLock.lock();
386 mPreviewBufferLock.unlock();
388 mCamera.setPreviewCallbackWithBuffer(this);
389 mCamera.startPreview();
394 public int stopCapture() {
395 if (mCamera == null) {
396 Log.e(TAG, "stopCapture: camera is null");
400 mPreviewBufferLock.lock();
407 mPreviewBufferLock.unlock();
410 mCamera.stopPreview();
411 mCamera.setPreviewCallbackWithBuffer(null);
416 public void deallocate() {
422 mCamera.setPreviewTexture(null);
423 if (mGlTextures != null)
424 GLES20.glDeleteTextures(1, mGlTextures, 0);
425 mCaptureFormat = null;
428 } catch (IOException ex) {
429 Log.e(TAG, "deallocate: failed to deallocate camera, " + ex);
435 public void onPreviewFrame(byte[] data, Camera camera) {
436 mPreviewBufferLock.lock();
441 if (data.length == mExpectedFrameSize) {
442 int rotation = getDeviceOrientation();
443 if (rotation != mDeviceOrientation) {
444 mDeviceOrientation = rotation;
446 "onPreviewFrame: device orientation=" +
447 mDeviceOrientation + ", camera orientation=" +
450 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
451 rotation = 360 - rotation;
453 rotation = (mCameraOrientation + rotation) % 360;
454 nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid,
455 data, mExpectedFrameSize, rotation);
458 mPreviewBufferLock.unlock();
459 if (camera != null) {
460 camera.addCallbackBuffer(data);
465 // TODO(wjia): investigate whether reading from texture could give better
466 // performance and frame rate, using onFrameAvailable().
468 private static class ChromiumCameraInfo {
469 private final int mId;
470 private final Camera.CameraInfo mCameraInfo;
472 private ChromiumCameraInfo(int index) {
474 mCameraInfo = new Camera.CameraInfo();
475 Camera.getCameraInfo(index, mCameraInfo);
478 @CalledByNative("ChromiumCameraInfo")
479 private static int getNumberOfCameras() {
480 return Camera.getNumberOfCameras();
483 @CalledByNative("ChromiumCameraInfo")
484 private static ChromiumCameraInfo getAt(int index) {
485 return new ChromiumCameraInfo(index);
488 @CalledByNative("ChromiumCameraInfo")
489 private int getId() {
493 @CalledByNative("ChromiumCameraInfo")
494 private String getDeviceName() {
495 return "camera " + mId + ", facing " +
496 (mCameraInfo.facing ==
497 Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back");
500 @CalledByNative("ChromiumCameraInfo")
501 private int getOrientation() {
502 return mCameraInfo.orientation;
506 private native void nativeOnFrameAvailable(
507 long nativeVideoCaptureDeviceAndroid,
512 private int getDeviceOrientation() {
514 if (mContext != null) {
515 WindowManager wm = (WindowManager) mContext.getSystemService(
516 Context.WINDOW_SERVICE);
517 switch(wm.getDefaultDisplay().getRotation()) {
518 case Surface.ROTATION_90:
521 case Surface.ROTATION_180:
524 case Surface.ROTATION_270:
527 case Surface.ROTATION_0: