2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
11 package org.webrtc.videoengine;
13 import java.io.IOException;
14 import java.util.concurrent.Exchanger;
16 import android.content.Context;
17 import android.graphics.ImageFormat;
18 import android.graphics.SurfaceTexture;
19 import android.hardware.Camera.PreviewCallback;
20 import android.hardware.Camera;
21 import android.opengl.GLES11Ext;
22 import android.opengl.GLES20;
23 import android.os.Handler;
24 import android.os.Looper;
25 import android.os.SystemClock;
26 import android.util.Log;
27 import android.view.OrientationEventListener;
28 import android.view.SurfaceHolder.Callback;
29 import android.view.SurfaceHolder;
31 // Wrapper for android Camera, with support for direct local preview rendering.
32 // Threading notes: this class is called from ViE C++ code, and from Camera &
33 // SurfaceHolder Java callbacks. Since these calls happen on different threads,
34 // the entry points to this class are all synchronized. This shouldn't present
35 // a performance bottleneck because only onPreviewFrame() is called more than
36 // once (and is called serially on a single thread), so the lock should be
37 // uncontended. Note that each of these synchronized methods must check
38 // |camera| for null to account for having possibly waited for stopCapture() to
40 public class VideoCaptureAndroid implements PreviewCallback, Callback {
41 private final static String TAG = "WEBRTC-JC";
43 private static SurfaceHolder localPreview;
44 private Camera camera; // Only non-null while capturing.
45 private CameraThread cameraThread;
46 private Handler cameraThreadHandler;
48 private final Camera.CameraInfo info;
49 private final OrientationEventListener orientationListener;
50 private boolean orientationListenerEnabled;
51 private final long native_capturer; // |VideoCaptureAndroid*| in C++.
52 private SurfaceTexture cameraSurfaceTexture;
53 private int[] cameraGlTextures = null;
54 // Arbitrary queue depth. Higher number means more memory allocated & held,
55 // lower number means more sensitivity to processing time in the client (and
56 // potentially stalling the capturer if it runs out of buffers to write to).
57 private final int numCaptureBuffers = 3;
58 private double averageDurationMs;
59 private long lastCaptureTimeMs;
60 private int frameCount;
62 // Requests future capturers to send their frames to |localPreview| directly.
63 public static void setLocalPreview(SurfaceHolder localPreview) {
64 // It is a gross hack that this is a class-static. Doing it right would
65 // mean plumbing this through the C++ API and using it from
66 // webrtc/examples/android/media_demo's MediaEngine class.
67 VideoCaptureAndroid.localPreview = localPreview;
70 public VideoCaptureAndroid(int id, long native_capturer) {
72 this.native_capturer = native_capturer;
73 this.info = new Camera.CameraInfo();
74 Camera.getCameraInfo(id, info);
76 // Must be the last thing in the ctor since we pass a reference to |this|!
77 final VideoCaptureAndroid self = this;
78 orientationListener = new OrientationEventListener(GetContext()) {
79 @Override public void onOrientationChanged(int degrees) {
80 if (!self.orientationListenerEnabled) {
83 if (degrees == OrientationEventListener.ORIENTATION_UNKNOWN) {
86 if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
87 degrees = (info.orientation - degrees + 360) % 360;
88 } else { // back-facing
89 degrees = (info.orientation + degrees) % 360;
91 self.OnOrientationChanged(self.native_capturer, degrees);
94 // Don't add any code here; see the comment above |self| above!
97 // Return the global application context.
98 private static native Context GetContext();
99 // Request frame rotation post-capture.
100 private native void OnOrientationChanged(long captureObject, int degrees);
102 private class CameraThread extends Thread {
103 private Exchanger<Handler> handlerExchanger;
104 public CameraThread(Exchanger<Handler> handlerExchanger) {
105 this.handlerExchanger = handlerExchanger;
108 @Override public void run() {
110 exchange(handlerExchanger, new Handler());
115 // Called by native code. Returns true if capturer is started.
117 // Note that this actually opens the camera, and Camera callbacks run on the
118 // thread that calls open(), so this is done on the CameraThread. Since ViE
119 // API needs a synchronous success return value we wait for the result.
120 private synchronized boolean startCapture(
121 final int width, final int height,
122 final int min_mfps, final int max_mfps) {
123 Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
124 min_mfps + ":" + max_mfps);
125 if (cameraThread != null || cameraThreadHandler != null) {
126 throw new RuntimeException("Camera thread already started!");
128 Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
129 cameraThread = new CameraThread(handlerExchanger);
130 cameraThread.start();
131 cameraThreadHandler = exchange(handlerExchanger, null);
133 final Exchanger<Boolean> result = new Exchanger<Boolean>();
134 cameraThreadHandler.post(new Runnable() {
135 @Override public void run() {
136 startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
139 boolean startResult = exchange(result, false); // |false| is a dummy value.
140 orientationListenerEnabled = true;
141 orientationListener.enable();
145 private void startCaptureOnCameraThread(
146 int width, int height, int min_mfps, int max_mfps,
147 Exchanger<Boolean> result) {
148 Throwable error = null;
150 camera = Camera.open(id);
152 if (localPreview != null) {
153 localPreview.addCallback(this);
154 if (localPreview.getSurface() != null &&
155 localPreview.getSurface().isValid()) {
156 camera.setPreviewDisplay(localPreview);
159 // No local renderer (we only care about onPreviewFrame() buffers, not a
160 // directly-displayed UI element). Camera won't capture without
161 // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
162 // it over to Camera, but never listen for frame-ready callbacks,
163 // and never call updateTexImage on it.
165 cameraGlTextures = new int[1];
166 // Generate one texture pointer and bind it as an external texture.
167 GLES20.glGenTextures(1, cameraGlTextures, 0);
168 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
169 cameraGlTextures[0]);
170 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
171 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
172 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
173 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
174 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
175 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
176 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
177 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
179 cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
180 cameraSurfaceTexture.setOnFrameAvailableListener(null);
181 camera.setPreviewTexture(cameraSurfaceTexture);
182 } catch (IOException e) {
183 throw new RuntimeException(e);
187 Camera.Parameters parameters = camera.getParameters();
188 Log.d(TAG, "isVideoStabilizationSupported: " +
189 parameters.isVideoStabilizationSupported());
190 if (parameters.isVideoStabilizationSupported()) {
191 parameters.setVideoStabilization(true);
193 parameters.setPreviewSize(width, height);
194 parameters.setPreviewFpsRange(min_mfps, max_mfps);
195 int format = ImageFormat.NV21;
196 parameters.setPreviewFormat(format);
197 camera.setParameters(parameters);
198 int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
199 for (int i = 0; i < numCaptureBuffers; i++) {
200 camera.addCallbackBuffer(new byte[bufSize]);
202 camera.setPreviewCallbackWithBuffer(this);
204 averageDurationMs = 1000 / max_mfps;
205 camera.startPreview();
206 exchange(result, true);
208 } catch (IOException e) {
210 } catch (RuntimeException e) {
213 Log.e(TAG, "startCapture failed", error);
214 if (camera != null) {
215 Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
216 stopCaptureOnCameraThread(resultDropper);
217 exchange(resultDropper, false);
219 exchange(result, false);
223 // Called by native code. Returns true when camera is known to be stopped.
224 private synchronized boolean stopCapture() {
225 Log.d(TAG, "stopCapture");
226 orientationListener.disable();
227 orientationListenerEnabled = false;
228 final Exchanger<Boolean> result = new Exchanger<Boolean>();
229 cameraThreadHandler.post(new Runnable() {
230 @Override public void run() {
231 stopCaptureOnCameraThread(result);
234 boolean status = exchange(result, false); // |false| is a dummy value here.
237 } catch (InterruptedException e) {
238 throw new RuntimeException(e);
240 cameraThreadHandler = null;
242 Log.d(TAG, "stopCapture done");
246 private void stopCaptureOnCameraThread(
247 Exchanger<Boolean> result) {
248 if (camera == null) {
249 throw new RuntimeException("Camera is already stopped!");
251 Throwable error = null;
253 camera.stopPreview();
254 camera.setPreviewCallbackWithBuffer(null);
255 if (localPreview != null) {
256 localPreview.removeCallback(this);
257 camera.setPreviewDisplay(null);
259 camera.setPreviewTexture(null);
260 cameraSurfaceTexture = null;
261 if (cameraGlTextures != null) {
262 GLES20.glDeleteTextures(1, cameraGlTextures, 0);
263 cameraGlTextures = null;
268 exchange(result, true);
269 Looper.myLooper().quit();
271 } catch (IOException e) {
273 } catch (RuntimeException e) {
276 Log.e(TAG, "Failed to stop camera", error);
277 exchange(result, false);
278 Looper.myLooper().quit();
282 private native void ProvideCameraFrame(
283 byte[] data, int length, long timeStamp, long captureObject);
285 // Called on cameraThread so must not "synchronized".
287 public void onPreviewFrame(byte[] data, Camera callbackCamera) {
288 if (Thread.currentThread() != cameraThread) {
289 throw new RuntimeException("Camera callback not on camera thread?!?");
291 if (camera == null) {
294 if (camera != callbackCamera) {
295 throw new RuntimeException("Unexpected camera in callback!");
298 long captureTimeMs = SystemClock.elapsedRealtime();
299 if (frameCount > 1) {
300 double durationMs = captureTimeMs - lastCaptureTimeMs;
301 averageDurationMs = 0.9 * averageDurationMs + 0.1 * durationMs;
302 if ((frameCount % 30) == 0) {
303 Log.d(TAG, "Camera TS " + captureTimeMs +
304 ". Duration: " + (int)durationMs + " ms. FPS: " +
305 (int) (1000 / averageDurationMs + 0.5));
308 lastCaptureTimeMs = captureTimeMs;
309 ProvideCameraFrame(data, data.length, captureTimeMs, native_capturer);
310 camera.addCallbackBuffer(data);
313 // Sets the rotation of the preview render window.
314 // Does not affect the captured video image.
315 // Called by native code.
316 private synchronized void setPreviewRotation(final int rotation) {
317 if (camera == null || cameraThreadHandler == null) {
320 final Exchanger<IOException> result = new Exchanger<IOException>();
321 cameraThreadHandler.post(new Runnable() {
322 @Override public void run() {
323 setPreviewRotationOnCameraThread(rotation, result);
326 // Use the exchanger below to block this function until
327 // setPreviewRotationOnCameraThread() completes, holding the synchronized
328 // lock for the duration. The exchanged value itself is ignored.
329 exchange(result, null);
332 private void setPreviewRotationOnCameraThread(
333 int rotation, Exchanger<IOException> result) {
334 Log.v(TAG, "setPreviewRotation:" + rotation);
336 int resultRotation = 0;
337 if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
338 // This is a front facing camera. SetDisplayOrientation will flip
339 // the image horizontally before doing the rotation.
340 resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
342 // Back-facing camera.
343 resultRotation = rotation;
345 camera.setDisplayOrientation(resultRotation);
346 exchange(result, null);
350 public synchronized void surfaceChanged(
351 SurfaceHolder holder, int format, int width, int height) {
352 Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
353 format + ": " + width + "x" + height);
357 public synchronized void surfaceCreated(final SurfaceHolder holder) {
358 Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
359 if (camera == null || cameraThreadHandler == null) {
362 final Exchanger<IOException> result = new Exchanger<IOException>();
363 cameraThreadHandler.post(new Runnable() {
364 @Override public void run() {
365 setPreviewDisplayOnCameraThread(holder, result);
368 IOException e = exchange(result, null); // |null| is a dummy value here.
370 throw new RuntimeException(e);
375 public synchronized void surfaceDestroyed(SurfaceHolder holder) {
376 Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
377 if (camera == null || cameraThreadHandler == null) {
380 final Exchanger<IOException> result = new Exchanger<IOException>();
381 cameraThreadHandler.post(new Runnable() {
382 @Override public void run() {
383 setPreviewDisplayOnCameraThread(null, result);
386 IOException e = exchange(result, null); // |null| is a dummy value here.
388 throw new RuntimeException(e);
392 private void setPreviewDisplayOnCameraThread(
393 SurfaceHolder holder, Exchanger<IOException> result) {
395 camera.setPreviewDisplay(holder);
396 } catch (IOException e) {
400 exchange(result, null);
404 // Exchanges |value| with |exchanger|, converting InterruptedExceptions to
405 // RuntimeExceptions (since we expect never to see these).
406 private static <T> T exchange(Exchanger<T> exchanger, T value) {
408 return exchanger.exchange(value);
409 } catch (InterruptedException e) {
410 throw new RuntimeException(e);