1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 package org.chromium.media;
7 import android.content.Context;
8 import android.graphics.ImageFormat;
9 import android.graphics.SurfaceTexture;
10 import android.graphics.SurfaceTexture.OnFrameAvailableListener;
11 import android.hardware.Camera;
12 import android.hardware.Camera.PreviewCallback;
13 import android.opengl.GLES20;
14 import android.util.Log;
15 import android.view.Surface;
16 import android.view.WindowManager;
18 import java.io.IOException;
19 import java.util.concurrent.locks.ReentrantLock;
20 import java.util.Iterator;
21 import java.util.List;
23 import org.chromium.base.CalledByNative;
24 import org.chromium.base.JNINamespace;
26 @JNINamespace("media")
27 public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
28 static class CaptureCapability {
29 public int mWidth = 0;
30 public int mHeight = 0;
31 public int mDesiredFps = 0;
34 // Some devices with OS older than JELLY_BEAN don't support YV12 format correctly.
35 // Some devices don't support YV12 format correctly even with JELLY_BEAN or newer OS.
36 // To work around the issues on those devices, we'd have to request NV21.
37 // This is a temporary hack till device manufacturers fix the problem or
38 // we don't need to support those devices any more.
39 private static class DeviceImageFormatHack {
40 private static final String[] sBUGGY_DEVICE_LIST = {
45 static int getImageFormat() {
46 if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) {
47 return ImageFormat.NV21;
50 for (String buggyDevice : sBUGGY_DEVICE_LIST) {
51 if (buggyDevice.contentEquals(android.os.Build.MODEL)) {
52 return ImageFormat.NV21;
55 return ImageFormat.YV12;
59 private Camera mCamera;
60 public ReentrantLock mPreviewBufferLock = new ReentrantLock();
61 private int mImageFormat = ImageFormat.YV12;
62 private byte[] mColorPlane = null;
63 private Context mContext = null;
64 // True when native code has started capture.
65 private boolean mIsRunning = false;
67 private static final int NUM_CAPTURE_BUFFERS = 3;
68 private int mExpectedFrameSize = 0;
70 // Native callback context variable.
71 private int mNativeVideoCaptureDeviceAndroid = 0;
72 private int[] mGlTextures = null;
73 private SurfaceTexture mSurfaceTexture = null;
74 private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
76 private int mCameraOrientation = 0;
77 private int mCameraFacing = 0;
78 private int mDeviceOrientation = 0;
80 CaptureCapability mCurrentCapability = null;
81 private static final String TAG = "VideoCapture";
84 public static VideoCapture createVideoCapture(
85 Context context, int id, int nativeVideoCaptureDeviceAndroid) {
86 return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid);
90 Context context, int id, int nativeVideoCaptureDeviceAndroid) {
93 mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid;
96 // Returns true on success, false otherwise.
98 public boolean allocate(int width, int height, int frameRate) {
99 Log.d(TAG, "allocate: requested width=" + width +
100 ", height=" + height + ", frameRate=" + frameRate);
102 mCamera = Camera.open(mId);
103 } catch (RuntimeException ex) {
104 Log.e(TAG, "allocate:Camera.open: " + ex);
109 Camera.CameraInfo camera_info = new Camera.CameraInfo();
110 Camera.getCameraInfo(mId, camera_info);
111 mCameraOrientation = camera_info.orientation;
112 mCameraFacing = camera_info.facing;
113 mDeviceOrientation = getDeviceOrientation();
114 Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation +
115 ", camera orientation=" + mCameraOrientation +
116 ", facing=" + mCameraFacing);
118 Camera.Parameters parameters = mCamera.getParameters();
121 List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
122 if (listFpsRange == null || listFpsRange.size() == 0) {
123 Log.e(TAG, "allocate: no fps range found");
126 int frameRateInMs = frameRate * 1000;
127 Iterator itFpsRange = listFpsRange.iterator();
128 int[] fpsRange = (int[])itFpsRange.next();
129 // Use the first range as default.
130 int fpsMin = fpsRange[0];
131 int fpsMax = fpsRange[1];
132 int newFrameRate = (fpsMin + 999) / 1000;
133 while (itFpsRange.hasNext()) {
134 fpsRange = (int[])itFpsRange.next();
135 if (fpsRange[0] <= frameRateInMs &&
136 frameRateInMs <= fpsRange[1]) {
137 fpsMin = fpsRange[0];
138 fpsMax = fpsRange[1];
139 newFrameRate = frameRate;
143 frameRate = newFrameRate;
144 Log.d(TAG, "allocate: fps set to " + frameRate);
146 mCurrentCapability = new CaptureCapability();
147 mCurrentCapability.mDesiredFps = frameRate;
150 List<Camera.Size> listCameraSize =
151 parameters.getSupportedPreviewSizes();
152 int minDiff = Integer.MAX_VALUE;
153 int matchedWidth = width;
154 int matchedHeight = height;
155 Iterator itCameraSize = listCameraSize.iterator();
156 while (itCameraSize.hasNext()) {
157 Camera.Size size = (Camera.Size)itCameraSize.next();
158 int diff = Math.abs(size.width - width) +
159 Math.abs(size.height - height);
160 Log.d(TAG, "allocate: support resolution (" +
161 size.width + ", " + size.height + "), diff=" + diff);
162 // TODO(wjia): Remove this hack (forcing width to be multiple
163 // of 32) by supporting stride in video frame buffer.
164 // Right now, VideoCaptureController requires compact YV12
165 // (i.e., with no padding).
166 if (diff < minDiff && (size.width % 32 == 0)) {
168 matchedWidth = size.width;
169 matchedHeight = size.height;
172 if (minDiff == Integer.MAX_VALUE) {
173 Log.e(TAG, "allocate: can not find a resolution whose width " +
174 "is multiple of 32");
177 mCurrentCapability.mWidth = matchedWidth;
178 mCurrentCapability.mHeight = matchedHeight;
179 Log.d(TAG, "allocate: matched width=" + matchedWidth +
180 ", height=" + matchedHeight);
182 calculateImageFormat(matchedWidth, matchedHeight);
184 if (parameters.isVideoStabilizationSupported()){
185 Log.d(TAG, "Image stabilization supported, currently: "
186 + parameters.getVideoStabilization() + ", setting it.");
187 parameters.setVideoStabilization(true);
189 Log.d(TAG, "Image stabilization not supported.");
192 parameters.setPreviewSize(matchedWidth, matchedHeight);
193 parameters.setPreviewFormat(mImageFormat);
194 parameters.setPreviewFpsRange(fpsMin, fpsMax);
195 mCamera.setParameters(parameters);
197 // Set SurfaceTexture.
198 mGlTextures = new int[1];
199 // Generate one texture pointer and bind it as an external texture.
200 GLES20.glGenTextures(1, mGlTextures, 0);
201 GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
202 // No mip-mapping with camera source.
203 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
204 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
205 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
206 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
207 // Clamp to edge is only option.
208 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
209 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
210 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
211 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
213 mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
214 mSurfaceTexture.setOnFrameAvailableListener(null);
216 mCamera.setPreviewTexture(mSurfaceTexture);
218 int bufSize = matchedWidth * matchedHeight *
219 ImageFormat.getBitsPerPixel(mImageFormat) / 8;
220 for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
221 byte[] buffer = new byte[bufSize];
222 mCamera.addCallbackBuffer(buffer);
224 mExpectedFrameSize = bufSize;
225 } catch (IOException ex) {
226 Log.e(TAG, "allocate: " + ex);
234 public int queryWidth() {
235 return mCurrentCapability.mWidth;
239 public int queryHeight() {
240 return mCurrentCapability.mHeight;
244 public int queryFrameRate() {
245 return mCurrentCapability.mDesiredFps;
249 public int getColorspace() {
250 switch (mImageFormat){
251 case ImageFormat.YV12:
252 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
253 case ImageFormat.NV21:
254 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
255 case ImageFormat.YUY2:
256 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2;
257 case ImageFormat.NV16:
258 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16;
259 case ImageFormat.JPEG:
260 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG;
261 case ImageFormat.RGB_565:
262 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565;
263 case ImageFormat.UNKNOWN:
265 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
270 public int startCapture() {
271 if (mCamera == null) {
272 Log.e(TAG, "startCapture: camera is null");
276 mPreviewBufferLock.lock();
283 mPreviewBufferLock.unlock();
285 mCamera.setPreviewCallbackWithBuffer(this);
286 mCamera.startPreview();
291 public int stopCapture() {
292 if (mCamera == null) {
293 Log.e(TAG, "stopCapture: camera is null");
297 mPreviewBufferLock.lock();
304 mPreviewBufferLock.unlock();
307 mCamera.stopPreview();
308 mCamera.setPreviewCallbackWithBuffer(null);
313 public void deallocate() {
319 mCamera.setPreviewTexture(null);
320 if (mGlTextures != null)
321 GLES20.glDeleteTextures(1, mGlTextures, 0);
322 mCurrentCapability = null;
325 } catch (IOException ex) {
326 Log.e(TAG, "deallocate: failed to deallocate camera, " + ex);
332 public void onPreviewFrame(byte[] data, Camera camera) {
333 mPreviewBufferLock.lock();
338 if (data.length == mExpectedFrameSize) {
339 int rotation = getDeviceOrientation();
340 if (rotation != mDeviceOrientation) {
341 mDeviceOrientation = rotation;
343 "onPreviewFrame: device orientation=" +
344 mDeviceOrientation + ", camera orientation=" +
347 boolean flipVertical = false;
348 boolean flipHorizontal = false;
349 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
350 rotation = (mCameraOrientation + rotation) % 360;
351 rotation = (360 - rotation) % 360;
352 flipHorizontal = (rotation == 270 || rotation == 90);
353 flipVertical = flipHorizontal;
355 rotation = (mCameraOrientation - rotation + 360) % 360;
357 nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid,
358 data, mExpectedFrameSize,
359 rotation, flipVertical, flipHorizontal);
362 mPreviewBufferLock.unlock();
363 if (camera != null) {
364 camera.addCallbackBuffer(data);
369 // TODO(wjia): investigate whether reading from texture could give better
370 // performance and frame rate.
372 public void onFrameAvailable(SurfaceTexture surfaceTexture) { }
374 private static class ChromiumCameraInfo {
375 private final int mId;
376 private final Camera.CameraInfo mCameraInfo;
378 private ChromiumCameraInfo(int index) {
380 mCameraInfo = new Camera.CameraInfo();
381 Camera.getCameraInfo(index, mCameraInfo);
384 @CalledByNative("ChromiumCameraInfo")
385 private static int getNumberOfCameras() {
386 return Camera.getNumberOfCameras();
389 @CalledByNative("ChromiumCameraInfo")
390 private static ChromiumCameraInfo getAt(int index) {
391 return new ChromiumCameraInfo(index);
394 @CalledByNative("ChromiumCameraInfo")
395 private int getId() {
399 @CalledByNative("ChromiumCameraInfo")
400 private String getDeviceName() {
401 return "camera " + mId + ", facing " +
402 (mCameraInfo.facing ==
403 Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back");
406 @CalledByNative("ChromiumCameraInfo")
407 private int getOrientation() {
408 return mCameraInfo.orientation;
412 private native void nativeOnFrameAvailable(
413 int nativeVideoCaptureDeviceAndroid,
417 boolean flipVertical,
418 boolean flipHorizontal);
420 private int getDeviceOrientation() {
422 if (mContext != null) {
423 WindowManager wm = (WindowManager)mContext.getSystemService(
424 Context.WINDOW_SERVICE);
425 switch(wm.getDefaultDisplay().getRotation()) {
426 case Surface.ROTATION_90:
429 case Surface.ROTATION_180:
432 case Surface.ROTATION_270:
435 case Surface.ROTATION_0:
444 private void calculateImageFormat(int width, int height) {
445 mImageFormat = DeviceImageFormatHack.getImageFormat();