Index: webrtc/api/java/android/org/webrtc/VideoCapturerAndroid2.java |
diff --git a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid2.java b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid2.java |
new file mode 100644 |
index 0000000000000000000000000000000000000000..d65357b45f65af026882e6fdc6e63f5bfde1b792 |
--- /dev/null |
+++ b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid2.java |
@@ -0,0 +1,686 @@ |
+/* |
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
+ * |
+ * Use of this source code is governed by a BSD-style license |
+ * that can be found in the LICENSE file in the root of the source |
+ * tree. An additional intellectual property rights grant can be found |
+ * in the file PATENTS. All contributing project authors may |
+ * be found in the AUTHORS file in the root of the source tree. |
+ */ |
+ |
+package org.webrtc; |
+ |
+import android.annotation.TargetApi; |
+import android.content.Context; |
+import android.graphics.SurfaceTexture; |
+ |
+import android.hardware.camera2.CameraAccessException; |
+import android.hardware.camera2.CameraCaptureSession; |
+import android.hardware.camera2.CameraCharacteristics; |
+import android.hardware.camera2.CameraDevice; |
+import android.hardware.camera2.CameraManager; |
+import android.hardware.camera2.CameraMetadata; |
+import android.hardware.camera2.CaptureFailure; |
+import android.hardware.camera2.CaptureRequest; |
+import android.hardware.camera2.TotalCaptureResult; |
+ |
+import android.hardware.camera2.params.StreamConfigurationMap; |
+import android.os.Build; |
+import android.os.Handler; |
+import android.os.SystemClock; |
+import android.util.Range; |
+import android.view.Surface; |
+import android.view.Surface; |
+import android.view.WindowManager; |
+import android.util.Size; |
+ |
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat; |
+import org.webrtc.Logging; |
+ |
+import java.util.Arrays; |
+import java.io.IOException; |
+import java.nio.ByteBuffer; |
+import java.util.ArrayList; |
+import java.util.Collections; |
+import java.util.HashMap; |
+import java.util.HashSet; |
+import java.util.IdentityHashMap; |
+import java.util.List; |
+import java.util.Map; |
+import java.util.Set; |
+import java.util.concurrent.CountDownLatch; |
+import java.util.concurrent.TimeUnit; |
+ |
+@TargetApi(21) |
+public class VideoCapturerAndroid2 implements |
+ CameraVideoCapturer, |
+ SurfaceTextureHelper.OnTextureFrameAvailableListener { |
+ private final static String TAG = "VideoCapturerAndroid2"; |
+ // In the Camera2 API, starting a camera is inherently asynchronous, and this state is represented |
+ // with 'STARTING'. |
+ private static enum CameraState { IDLE, STARTING, RUNNING } |
+ |
+ private final CameraManager cameraManager; |
+ private final CameraEventsHandler eventsHandler; |
+ private final Object handlerLock = new Object(); |
+ // |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread, |
+ // or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to |
+ // the handler - this way all callbacks with a specifed token can be removed at once. |
+ private Handler cameraThreadHandler; |
+ |
+ private String cameraId; |
+ private CameraDevice cameraDevice; |
+ private CameraCaptureSession captureSession; |
+ private boolean isFrontCamera; |
+ private int cameraOrientation; |
+ private CaptureFormat captureFormat; |
+ // Factor to convert between Android framerates and CaptureFormat.FramerateRange. It will be |
+ // either 1 or 1000. |
+ private int fpsUnitFactor; |
+ private CameraStatistics cameraStatistics; |
+ private boolean firstFrameReported; |
+ |
+ private final Object pendingCameraSwitchLock = new Object(); |
+ private volatile boolean pendingCameraSwitch; |
+ |
+ private final Object cameraStateLock = new Object(); |
+ private CameraState cameraState = CameraState.IDLE; |
+ |
+ private Context applicationContext; |
+ private CapturerObserver capturerObserver; |
+ private SurfaceTextureHelper surfaceTextureHelper; |
+ private Surface surface; |
+ |
+ // Remember the requested format in case we want to switch cameras. |
+ private int requestedWidth; |
+ private int requestedHeight; |
+ private int requestedFramerate; |
+ |
+ public VideoCapturerAndroid2( |
+ Context context, String cameraId, CameraEventsHandler eventsHandler) { |
+ Logging.d(TAG, "VideoCapturerAndroid2 ctor, camera id: " + cameraId); |
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); |
+ try { |
+ final String[] cameraIds = cameraManager.getCameraIdList(); |
+ if (!Arrays.asList(cameraIds).contains(cameraId)) { |
+ throw new IllegalArgumentException( |
+ "Camera id: " + cameraId + " does not match any known camera device:"); |
+ } |
+ if (cameraManager.getCameraCharacteristics(cameraId).get( |
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) |
+ == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { |
+ throw new IllegalArgumentException("Camera id: " + cameraId + " only has legacy support."); |
+ } |
+ } catch (CameraAccessException e) { |
+ throw new RuntimeException("Camera access exception: " + e); |
+ } |
+ setCameraId(cameraId); |
+ this.eventsHandler = eventsHandler; |
+ } |
+ |
+ private void checkIsOnCameraThread() { |
+ if (cameraThreadHandler == null) { |
+ throw new IllegalStateException("Camera is closed."); |
+ } |
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { |
+ throw new IllegalStateException("Wrong thread"); |
+ } |
+ } |
+ |
+ private void setCameraId(String cameraId) { |
+ if (cameraThreadHandler != null) { |
+ throw new RuntimeException("Changing camera id on running camera."); |
+ } |
+ final CameraCharacteristics characteristics; |
+ try { |
+ characteristics = cameraManager.getCameraCharacteristics(cameraId); |
+ } catch (CameraAccessException e) { |
+ throw new RuntimeException("Camera access exception: " + e); |
+ } |
+ this.cameraId = cameraId; |
+ isFrontCamera = characteristics.get(CameraCharacteristics.LENS_FACING) |
+ == CameraMetadata.LENS_FACING_FRONT; |
+ /* |
+ * Clockwise angle through which the output image needs to be rotated to be upright on the |
+ * device screen in its native orientation. |
+ * Also defines the direction of rolling shutter readout, which is from top to bottom in the |
+ * sensor's coordinate system. |
+ * Units: Degrees of clockwise rotation; always a multiple of 90 |
+ */ |
+ cameraOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); |
+ } |
+ |
+ private void onError2(String errorDescription) { |
+ checkIsOnCameraThread(); |
+ Logging.e(TAG, errorDescription); |
+ synchronized (cameraStateLock) { |
+ switch (cameraState) { |
+ case STARTING: |
+ capturerObserver.onCapturerStarted(false /* success */); |
+ // fall through |
+ case RUNNING: |
+ if (eventsHandler != null) { |
+ eventsHandler.onCameraError(errorDescription); |
+ } |
+ break; |
+ case IDLE: |
+ throw new IllegalStateException("onError() called while in IDLE state."); |
+ default: |
+ throw new RuntimeException("Unknown camera state: " + cameraState); |
+ } |
+ } |
+ closeAndRelease(); |
+ } |
+ |
+ private void closeAndRelease() { |
+ checkIsOnCameraThread(); |
+ Logging.d(TAG, "Close and release."); |
+ |
+ synchronized (handlerLock) { |
+ // Remove all pending Runnables posted from |this|. |
+ cameraThreadHandler.removeCallbacksAndMessages(this /* token */); |
+ // Set handler to null so that no more Runnables can be posted. |
+ cameraThreadHandler = null; |
+ } |
+ applicationContext = null; |
+ capturerObserver = null; |
+ if (cameraStatistics != null) { |
+ cameraStatistics.release(); |
+ cameraStatistics = null; |
+ } |
+ if (surfaceTextureHelper != null) { |
+ surfaceTextureHelper.stopListening(); |
+ surfaceTextureHelper = null; |
+ } |
+ if (captureSession != null) { |
+ captureSession.close(); |
+ captureSession = null; |
+ } |
+ if (surface != null) { |
+ surface.release(); |
+ surface = null; |
+ } |
+ if (cameraDevice != null) { |
+ // TODO(magjed): Timeout for onClosed(). cameraState must be set. |
+ cameraDevice.close(); |
+ cameraDevice = null; |
+ } |
+ } |
+ |
+ // Note that this actually opens the camera, and Camera callbacks run on the |
+ // thread that calls open(), so this is done on the CameraThread. |
+ @Override |
+ public void startCapture( |
+ final int requestedWidth, final int requestedHeight, final int requestedFramerate, |
+ final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext, |
+ final CapturerObserver capturerObserver) { |
+ Logging.d(TAG, "startCapture requested: " + requestedWidth + "x" + requestedHeight |
+ + "@" + requestedFramerate); |
+ if (surfaceTextureHelper == null) { |
+ throw new IllegalArgumentException("surfaceTextureHelper not set."); |
+ } |
+ if (applicationContext == null) { |
+ throw new IllegalArgumentException("applicationContext not set."); |
+ } |
+ if (capturerObserver == null) { |
+ throw new IllegalArgumentException("capturerObserver not set."); |
+ } |
+ synchronized (cameraStateLock) { |
+ if (cameraState != CameraState.IDLE) { |
+ throw new IllegalStateException("Unexpected camera state for startCapture: " + cameraState); |
+ } |
+ cameraState = CameraState.STARTING; |
+ } |
+ synchronized (handlerLock) { |
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler(); |
+ this.surfaceTextureHelper = surfaceTextureHelper; |
+ this.applicationContext = applicationContext; |
+ this.capturerObserver = capturerObserver; |
+ this.firstFrameReported = false; |
+ |
+ // Remember the requested format in case we want to switch cameras. |
+ this.requestedWidth = requestedWidth; |
+ this.requestedHeight = requestedHeight; |
+ this.requestedFramerate = requestedFramerate; |
+ |
+ CameraCharacteristics cameraCharacteristics = null; |
+ try { |
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); |
+ } catch (CameraAccessException e) { |
+ Logging.e(TAG, "getCameraCharacteristics(): " + e); |
+ // TODO(magjed); |
+ } |
+ |
+ final StreamConfigurationMap streamMap = |
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); |
+ final Size[] supportedSizes = streamMap.getOutputSizes(SurfaceTexture.class); |
+ final Range<Integer>[] supportedFpsRanges = |
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); |
+ if (supportedSizes == null || supportedFpsRanges == null || supportedSizes.length == 0 |
+ || supportedFpsRanges.length == 0) { |
+ Logging.e(TAG, "No supported camera output sizes"); |
+ // TODO(magjed). |
+ } |
+ |
+ final CaptureFormat.Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize( |
+ Camera2Enumerator.convertSizes(supportedSizes), requestedWidth, requestedHeight); |
+ |
+ // Some LEGACY camera implementations use fps rates that are multiplied with 1000. Make sure |
+ // all values are multiplied with 1000 for consistency. |
+ this.fpsUnitFactor = (supportedFpsRanges[0].getUpper() > 1000) ? 1 : 1000; |
+ |
+ final CaptureFormat.FramerateRange bestFpsRange = |
+ CameraEnumerationAndroid.getClosestSupportedFramerate( |
+ Camera2Enumerator.convertFramerates(supportedFpsRanges, fpsUnitFactor), |
+ requestedFramerate); |
+ |
+ this.captureFormat = new CaptureFormat(bestSize, bestFpsRange); |
+ Logging.d(TAG, "Using capture format: " + captureFormat); |
+ |
+ try { |
+ Logging.d(TAG, "Opening camera " + cameraId); |
+ if (eventsHandler != null) { |
+ int cameraIndex = -1; |
+ try { |
+ cameraIndex = Integer.parseInt(cameraId); |
+ } catch (NumberFormatException e) { |
+ Logging.d(TAG, "External camera with non-int identifier: " + cameraId); |
+ } |
+ eventsHandler.onCameraOpening(cameraIndex); |
+ } |
+ cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler); |
+ } catch (CameraAccessException e) { |
+ Logging.e(TAG, "Failed to open camera.", e); |
+ capturerObserver.onCapturerStarted(false /* success */); |
+ if (eventsHandler != null) { |
+ eventsHandler.onCameraError("Failed to open camera: " + e); |
+ } |
+ synchronized (cameraStateLock) { |
+ cameraState = CameraState.IDLE; |
+ cameraStateLock.notifyAll(); |
+ } |
+ } |
+ } |
+ } |
+ |
+ final class CameraStateCallback extends CameraDevice.StateCallback { |
+ private String getErrorDescription(int errorCode) { |
+ switch (errorCode) { |
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: |
+ return "Camera device has encountered a fatal error."; |
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: |
+ return "Camera device could not be opened due to a device policy."; |
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: |
+ return "Camera device is in use already."; |
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: |
+ return "Camera service has encountered a fatal error."; |
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: |
+ return "Camera device could not be opened because" |
+ + " there are too many other open camera devices."; |
+ default: |
+ return "Unknown camera error: " + errorCode; |
+ } |
+ } |
+ |
+ @Override |
+ public void onDisconnected(CameraDevice camera) { |
+ checkIsOnCameraThread(); |
+ cameraDevice = camera; |
+ onError2("Camera disconnected."); |
+ } |
+ |
+ @Override |
+ public void onError(CameraDevice camera, int errorCode) { |
+ checkIsOnCameraThread(); |
+ cameraDevice = camera; |
+ onError2(getErrorDescription(errorCode)); |
+ } |
+ |
+ @Override |
+ public void onOpened(CameraDevice camera) { |
+ checkIsOnCameraThread(); |
+ Logging.d(TAG, "Camera opened."); |
+ synchronized (cameraStateLock) { |
+ if (cameraState != CameraState.STARTING) { |
+ throw new IllegalStateException("Unexpected state when camera opened: " + cameraState); |
+ } |
+ } |
+ |
+ cameraDevice = camera; |
+ final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); |
+ // StreamConfigurationMap.getOutputSizes(SurfaceTexture.class) |
+ surfaceTexture.setDefaultBufferSize(captureFormat.size.width, captureFormat.size.height); |
+ surface = new Surface(surfaceTexture); |
+ try { |
+ camera.createCaptureSession( |
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); |
+ } catch (CameraAccessException e) { |
+ onError2("Failed to create capture session. " + e); |
+ } |
+ } |
+ |
+ @Override |
+ public void onClosed(CameraDevice camera) { |
+ Logging.d(TAG, "Camera device closed."); |
+ if (cameraThreadHandler != null) { |
+ throw new IllegalStateException("Camera thread handler should be null when closing."); |
+ } |
+ if (eventsHandler != null) { |
+ eventsHandler.onCameraClosed(); |
+ } |
+ synchronized (cameraStateLock) { |
+ cameraState = CameraState.IDLE; |
+ cameraStateLock.notifyAll(); |
+ } |
+ } |
+ } |
+ |
+ final class CaptureSessionCallback extends CameraCaptureSession.StateCallback { |
+ @Override |
+ public void onConfigureFailed(CameraCaptureSession session) { |
+ checkIsOnCameraThread(); |
+ captureSession = session; |
+ onError2("Failed to configure capture session."); |
+ } |
+ |
+ @Override |
+ public void onConfigured(CameraCaptureSession session) { |
+ checkIsOnCameraThread(); |
+ Logging.d(TAG, "Camera capture session configured."); |
+ captureSession = session; |
+ try { |
+ /* |
+ * The viable options for video capture requests are: |
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality |
+ * post-processing. |
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording |
+ * quality. |
+ */ |
+ final CaptureRequest.Builder captureRequestBuilder = |
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); |
+ // Set auto exposure fps range. |
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>( |
+ captureFormat.framerate.min / fpsUnitFactor, |
+ captureFormat.framerate.max / fpsUnitFactor)); |
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); |
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); |
+ |
+ captureRequestBuilder.addTarget(surface); |
+ session.setRepeatingRequest( |
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); |
+ } catch (CameraAccessException e) { |
+ onError2("Failed to start capture request. " + e); |
+ return; |
+ } |
+ |
+ Logging.d(TAG, "Camera device successfully started."); |
+ surfaceTextureHelper.startListening(VideoCapturerAndroid2.this); |
+ capturerObserver.onCapturerStarted(true /* success */); |
+ cameraStatistics = new CameraStatistics(surfaceTextureHelper, eventsHandler); |
+ synchronized (cameraStateLock) { |
+ if (cameraState == CameraState.STARTING) { |
+ cameraState = CameraState.RUNNING; |
+ cameraStateLock.notifyAll(); |
+ } else { |
+ throw new IllegalStateException("Unexpected camera state when starting: " + cameraState); |
+ } |
+ } |
+ } |
+ } |
+ |
+ final class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback { |
+ static final int MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES = 10; |
+ int consecutiveCameraCaptureFailures; |
+ |
+ @Override |
+ public void onCaptureFailed( |
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { |
+ checkIsOnCameraThread(); |
+ ++consecutiveCameraCaptureFailures; |
+ if (consecutiveCameraCaptureFailures > MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES) { |
+ onError2("Capture failed " + consecutiveCameraCaptureFailures + " consecutive times."); |
+ } |
+ } |
+ |
+ @Override |
+ public void onCaptureCompleted( |
+ CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { |
+ checkIsOnCameraThread(); |
+ consecutiveCameraCaptureFailures = 0; |
+ } |
+ } |
+ |
+ // Switch camera to the next valid camera id. This can only be called while |
+ // the camera is running. |
+ @Override |
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) { |
+ final String[] cameraIds; |
+ try { |
+ cameraIds = cameraManager.getCameraIdList(); |
+ } catch (CameraAccessException e) { |
+ if (switchEventsHandler != null) { |
+ switchEventsHandler.onCameraSwitchError("Could not get camera ids: " + e); |
+ } |
+ return; |
+ } |
+ if (cameraIds.length < 2) { |
+ if (switchEventsHandler != null) { |
+ switchEventsHandler.onCameraSwitchError("No camera to switch to."); |
+ } |
+ return; |
+ } |
+ // Do not handle multiple camera switch request to avoid blocking camera thread by handling too |
+ // many switch request from a queue. |
+ synchronized (pendingCameraSwitchLock) { |
+ if (pendingCameraSwitch) { |
+ Logging.w(TAG, "Ignoring camera switch request."); |
+ if (switchEventsHandler != null) { |
+ switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress."); |
+ } |
+ return; |
+ } |
+ pendingCameraSwitch = true; |
+ } |
+ // Check that camera is running before trying to switch. |
+ synchronized (cameraStateLock) { |
+ if (cameraState == CameraState.IDLE) { |
+ Logging.e(TAG, "Calling swithCamera() on stopped camera."); |
+ if (switchEventsHandler != null) { |
+ switchEventsHandler.onCameraSwitchError("Camera is stopped."); |
+ } |
+ return; |
+ } |
+ } |
+ // Calculate new camera index and camera id. |
+ final int currentCameraIndex = Arrays.asList(cameraIds).indexOf(cameraId); |
+ if (currentCameraIndex == -1) { |
+ Logging.e(TAG, "Couldn't find current camera id " + cameraId + " in list of camera ids: " |
+ + Arrays.toString(cameraIds)); |
+ } |
+ final int newCameraIndex = (currentCameraIndex + 1) % cameraIds.length; |
+ final String newCameraId = cameraIds[newCameraIndex]; |
+ |
+ // Remember parameters. |
+ final SurfaceTextureHelper surfaceTextureHelper = this.surfaceTextureHelper; |
+ final Context applicationContext = this.applicationContext; |
+ final CapturerObserver capturerObserver = this.capturerObserver; |
+ |
+ // Make the switch. |
+ stopCapture(); |
+ // TODO(magjed): Handle failure if camera does not exist anymore? |
+ // TODO(magjed): Synchronize with changeCaptureFormat. |
+ setCameraId(newCameraId); |
+ startCapture(requestedWidth, requestedHeight, requestedFramerate, surfaceTextureHelper, |
+ applicationContext, capturerObserver); |
+ // TODO(magjed): Should not call onCameraSwitchDone here? Wait until camera is actually started. |
+ if (switchEventsHandler != null) { |
+ switchEventsHandler.onCameraSwitchDone(isFrontCamera); |
+ } |
+ synchronized (pendingCameraSwitchLock) { |
+ pendingCameraSwitch = false; |
+ } |
+ } |
+ |
+ // Requests a new output format from the video capturer. Captured frames |
+ // by the camera will be scaled/or dropped by the video capturer. |
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce |
+ // the same result as |width| = 480, |height| = 640. |
+ // TODO(magjed/perkj): Document what this function does. Change name? |
+ @Override |
+ public void onOutputFormatRequest(final int width, final int height, final int framerate) { |
+ maybePostOnCameraThread(new Runnable() { |
+ @Override |
+ public void run() { |
+ if (cameraDevice == null) { |
+ Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera."); |
+ return; |
+ } |
+ Logging.d(TAG, |
+ "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate); |
+ capturerObserver.onOutputFormatRequest(width, height, framerate); |
+ } |
+ }); |
+ } |
+ |
+ // Reconfigure the camera to capture in a new format. This should only be called while the camera |
+ // is running. |
+ @Override |
+ public void changeCaptureFormat(final int width, final int height, final int framerate) { |
+ // Remember parameters. |
+ final SurfaceTextureHelper surfaceTextureHelper = this.surfaceTextureHelper; |
+ final Context applicationContext = this.applicationContext; |
+ final CapturerObserver capturerObserver = this.capturerObserver; |
+ // Make the switch. |
+ stopCapture(); |
+ // TODO(magjed): Just recreate session. |
+ // TODO(magjed): Handle failure if camera does not exist anymore? |
+ // TODO(magjed): Synchronize cameraId |
+ startCapture(width, height, framerate, |
+ surfaceTextureHelper, applicationContext, capturerObserver); |
+ } |
+ |
+ @Override |
+ public List<CaptureFormat> getSupportedFormats() { |
+ // TOOD(magjed): Synchronize with swithCamera. |
+ // return Camera2Enumerator.getSupportedFormats(cameraId, cameraManager); |
+ return new ArrayList<CaptureFormat>(); |
+ } |
+ |
+ // Dispose the SurfaceTextureHelper. This needs to be done manually, otherwise the |
+ // SurfaceTextureHelper thread and resources will not be garbage collected. |
+ @Override |
+ public void dispose() { |
+ synchronized (cameraStateLock) { |
+ if (cameraState != CameraState.IDLE) { |
+ throw new IllegalStateException("Unexpected camera state for dispose: " + cameraState); |
+ } |
+ } |
+ } |
+ |
+ // Blocks until camera is known to be stopped. |
+ @Override |
+ public void stopCapture() { |
+ Logging.d(TAG, "stopCapture"); |
+ synchronized (cameraStateLock) { |
+ if (cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING) { |
+ throw new IllegalStateException("stopCapture called for already stopped camera."); |
+ } |
+ } |
+ final boolean didPost = maybePostOnCameraThread(new Runnable() { |
+ @Override |
+ public void run() { |
+ Logging.d(TAG, "stopCaptureOnCameraThread"); |
+ // With the Camera2 API, capture is started asynchronously. Wait for a pending start |
+ // request to complete before stopping to avoid any weird problem. |
+ synchronized (cameraStateLock) { |
+ while (cameraState == CameraState.STARTING) { |
+ // TODO(magjed): Timeout? |
+ ThreadUtils.waitUninterruptibly(cameraStateLock); |
+ } |
+ } |
+ // Stop capture. |
+ closeAndRelease(); |
+ } |
+ }); |
+ if (!didPost) { |
+ Logging.e(TAG, "Calling stopCapture() for already stopped camera."); |
+ return; |
+ } |
+ // Block until camera is stopped. |
+ synchronized (cameraStateLock) { |
+ while (cameraState != CameraState.IDLE) { |
+ // TODO(magjed). Timeout? |
+ ThreadUtils.waitUninterruptibly(cameraStateLock); |
+ } |
+ } |
+ Logging.d(TAG, "stopCapture done"); |
+ } |
+ |
+ private boolean maybePostOnCameraThread(Runnable runnable) { |
+ return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable); |
+ } |
+ |
+ private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) { |
+ synchronized (handlerLock) { |
+ return cameraThreadHandler != null |
+ && cameraThreadHandler.postAtTime( |
+ runnable, this /* token */, SystemClock.uptimeMillis() + delayMs); |
+ } |
+ } |
+ |
+ private int getDeviceOrientation() { |
+ int orientation = 0; |
+ |
+ WindowManager wm = (WindowManager) applicationContext.getSystemService( |
+ Context.WINDOW_SERVICE); |
+ switch(wm.getDefaultDisplay().getRotation()) { |
+ case Surface.ROTATION_90: |
+ orientation = 90; |
+ break; |
+ case Surface.ROTATION_180: |
+ orientation = 180; |
+ break; |
+ case Surface.ROTATION_270: |
+ orientation = 270; |
+ break; |
+ case Surface.ROTATION_0: |
+ default: |
+ orientation = 0; |
+ break; |
+ } |
+ return orientation; |
+ } |
+ |
+ @Override |
+ public void onTextureFrameAvailable( |
+ int oesTextureId, float[] transformMatrix, long timestampNs) { |
+ if (cameraThreadHandler == null) { |
+ throw new IllegalStateException("onTextureFrameAvailable() called after stopCapture()."); |
+ } |
+ checkIsOnCameraThread(); |
+ if (eventsHandler != null && !firstFrameReported) { |
+ eventsHandler.onFirstFrameAvailable(); |
+ firstFrameReported = true; |
+ } |
+ |
+ int rotation; |
+ if (isFrontCamera) { |
+ // Undo the mirror that the OS "helps" us with. |
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) |
+ rotation = cameraOrientation + getDeviceOrientation(); |
+ transformMatrix = |
+ RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix()); |
+ } else { |
+ rotation = cameraOrientation - getDeviceOrientation(); |
+ } |
+ // Make sure |rotation| is between 0 and 360. |
+ rotation = (360 + rotation % 360) % 360; |
+ |
+ // Undo camera orientation - we report it as rotation instead. |
+ transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation); |
+ |
+ cameraStatistics.addFrame(); |
+ capturerObserver.onTextureFrameCaptured(captureFormat.size.width, captureFormat.size.height, |
+ oesTextureId, transformMatrix, rotation, timestampNs); |
+ } |
+} |