Chromium Code Reviews| Index: webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
| diff --git a/webrtc/api/android/java/src/org/webrtc/Camera1Session.java b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..940370d111f2378cec860043fd7d42acd8b79ab4 |
| --- /dev/null |
| +++ b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
| @@ -0,0 +1,352 @@ |
| +/* |
| + * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| + * |
| + * Use of this source code is governed by a BSD-style license |
| + * that can be found in the LICENSE file in the root of the source |
| + * tree. An additional intellectual property rights grant can be found |
| + * in the file PATENTS. All contributing project authors may |
| + * be found in the AUTHORS file in the root of the source tree. |
| + */ |
| + |
| +package org.webrtc; |
| + |
| +import org.webrtc.CameraEnumerationAndroid.CaptureFormat; |
| + |
| +import android.content.Context; |
| +import android.os.Handler; |
| +import android.os.SystemClock; |
| +import android.view.Surface; |
| +import android.view.WindowManager; |
| + |
| +import java.io.IOException; |
| +import java.nio.ByteBuffer; |
| +import java.util.HashSet; |
| +import java.util.List; |
| +import java.util.Set; |
| +import java.util.concurrent.CountDownLatch; |
| +import java.util.concurrent.TimeUnit; |
| + |
| +@SuppressWarnings("deprecation") |
| +public class Camera1Session implements CameraSession { |
| + private static final String TAG = "Camera1Session"; |
| + private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; |
| + |
| + private static enum SessionState { RUNNING, STOPPED }; |
| + |
| + private final Handler cameraThreadHandler; |
| + private final CameraVideoCapturer.CameraEventsHandler eventsHandler; |
| + private final boolean captureToTexture; |
| + private final Context applicationContext; |
| + private final CameraVideoCapturer.CapturerObserver capturerObserver; |
| + private final SurfaceTextureHelper surfaceTextureHelper; |
| + private final int cameraId; |
| + private final int width; |
| + private final int height; |
| + private final int framerate; |
| + private final android.hardware.Camera camera; |
| + private final android.hardware.Camera.CameraInfo info; |
| + private final CaptureFormat captureFormat; |
| + private final CameraVideoCapturer.CameraStatistics cameraStatistics; |
| + |
| + private SessionState state; |
| + private boolean firstFrameReported = false; |
| + |
| + public static void create( |
| + final CreateSessionCallback callback, |
| + final CameraVideoCapturer.CameraEventsHandler eventsHandler, final boolean captureToTexture, |
| + final Context applicationContext, final CameraVideoCapturer.CapturerObserver capturerObserver, |
| + final SurfaceTextureHelper surfaceTextureHelper, |
| + final int cameraId, final int width, final int height, final int framerate) { |
| + Logging.d(TAG, "Open camera " + cameraId); |
| + eventsHandler.onCameraOpening(cameraId); |
| + |
| + final android.hardware.Camera camera; |
| + try { |
| + camera = android.hardware.Camera.open(cameraId); |
| + } catch (RuntimeException e) { |
| + callback.onFailure(e.getMessage()); |
| + capturerObserver.onCapturerStarted(false); |
|
magjed_webrtc
2016/08/11 12:57:13
I guess this call could be moved into CameraCaptur
sakal
2016/08/15 09:11:58
Yes, it is very arbitrary. The reason I would like
|
| + return; |
| + } |
| + |
| + try { |
| + camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); |
| + } catch (IOException e) { |
| + camera.release(); |
| + callback.onFailure(e.getMessage()); |
| + capturerObserver.onCapturerStarted(false); |
| + return; |
| + } |
| + |
| + final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo(); |
| + android.hardware.Camera.getCameraInfo(cameraId, info); |
| + |
| + camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() { |
| + @Override |
| + public void onError(int error, android.hardware.Camera camera) { |
| + String errorMessage; |
| + if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { |
| + errorMessage = "Camera server died!"; |
| + } else { |
| + errorMessage = "Camera error: " + error; |
| + } |
| + Logging.e(TAG, errorMessage); |
| + eventsHandler.onCameraError(errorMessage); |
| + } |
| + }); |
| + |
| + final android.hardware.Camera.Parameters parameters = camera.getParameters(); |
| + final CaptureFormat captureFormat = findClosestCaptureFormat( |
| + parameters, width, height, framerate); |
| + final Size pictureSize = findClosestPictureSize(parameters, width, height); |
| + |
| + updateCameraParameters(camera, parameters, captureFormat, captureToTexture); |
| + |
| + // Initialize the capture buffers. |
| + if (!captureToTexture) { |
| + final int frameSize = captureFormat.frameSize(); |
| + for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { |
| + final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); |
| + camera.addCallbackBuffer(buffer.array()); |
| + } |
| + } |
| + |
| + // Calculate orientation manually and send it as CVO insted. |
| + camera.setDisplayOrientation(0 /* degrees */); |
| + |
| + callback.onDone(new Camera1Session( |
| + eventsHandler, captureToTexture, applicationContext, |
| + capturerObserver, surfaceTextureHelper, cameraId, width, height, framerate, |
| + camera, info, captureFormat)); |
| + } |
| + |
| + private static void updateCameraParameters(android.hardware.Camera camera, |
| + android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, |
| + boolean captureToTexture) { |
| + final List<String> focusModes = parameters.getSupportedFocusModes(); |
| + |
| + parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); |
| + parameters.setPreviewSize(captureFormat.width, captureFormat.height); |
| + if (!captureToTexture) { |
| + parameters.setPreviewFormat(captureFormat.imageFormat); |
| + } |
| + |
| + if (parameters.isVideoStabilizationSupported()) { |
| + parameters.setVideoStabilization(true); |
| + } |
| + if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { |
| + parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
| + } |
| + camera.setParameters(parameters); |
| + } |
| + |
| + private static CaptureFormat findClosestCaptureFormat( |
| + android.hardware.Camera.Parameters parameters, int width, int height, int framerate) { |
| + // Find closest supported format for |width| x |height| @ |framerate|. |
| + final List<CaptureFormat.FramerateRange> supportedFramerates = |
| + Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); |
| + Logging.d(TAG, "Available fps ranges: " + supportedFramerates); |
| + |
| + final CaptureFormat.FramerateRange fpsRange = |
| + CameraEnumerationAndroid.getClosestSupportedFramerateRange( |
| + supportedFramerates, framerate); |
| + |
| + final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( |
| + Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), |
| + width, height); |
| + |
| + return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); |
| + } |
| + |
| + private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters, |
| + int width, int height) { |
| + return CameraEnumerationAndroid.getClosestSupportedSize( |
| + Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), |
| + width, height); |
| + } |
| + |
| + private Camera1Session( |
| + CameraVideoCapturer.CameraEventsHandler eventsHandler, boolean captureToTexture, |
| + Context applicationContext, CameraVideoCapturer.CapturerObserver capturerObserver, |
| + SurfaceTextureHelper surfaceTextureHelper, |
| + int cameraId, int width, int height, int framerate, |
| + android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, |
| + CaptureFormat captureFormat) { |
| + Logging.d(TAG, "Create new camera1 session on camera " + cameraId); |
| + |
| + this.cameraThreadHandler = new Handler(); |
| + this.eventsHandler = eventsHandler; |
| + this.captureToTexture = captureToTexture; |
| + this.applicationContext = applicationContext; |
| + this.capturerObserver = capturerObserver; |
| + this.surfaceTextureHelper = surfaceTextureHelper; |
| + this.cameraId = cameraId; |
| + this.width = width; |
| + this.height = height; |
| + this.framerate = framerate; |
| + this.camera = camera; |
| + this.info = info; |
| + this.captureFormat = captureFormat; |
| + cameraStatistics = new CameraVideoCapturer.CameraStatistics( |
| + surfaceTextureHelper, eventsHandler); |
| + |
| + startCapturing(); |
| + } |
| + |
| + @Override |
| + public void stop() { |
| + Logging.d(TAG, "Stop camera1 session on camera " + cameraId); |
| + final CountDownLatch stopLatch = new CountDownLatch(1); |
| + |
| + cameraThreadHandler.post(new Runnable() { |
| + @Override |
| + public void run() { |
| + if (state != SessionState.STOPPED) { |
| + state = SessionState.STOPPED; |
| + capturerObserver.onCapturerStopped(); |
| + stopLatch.countDown(); |
| + stopInternal(); |
| + } |
| + } |
| + }); |
| + |
| + ThreadUtils.awaitUninterruptibly(stopLatch); |
| + } |
| + |
| + private void startCapturing() { |
| + Logging.d(TAG, "Start capturing"); |
| + checkIsOnCameraThread(); |
| + |
| + state = SessionState.RUNNING; |
| + capturerObserver.onCapturerStarted(true); |
| + |
| + if (captureToTexture) { |
| + listenForTextureFrames(); |
| + } else { |
| + listenForBytebufferFrames(); |
| + } |
| + try { |
| + camera.startPreview(); |
| + } catch (RuntimeException e) { |
| + eventsHandler.onCameraError(e.getMessage()); |
| + state = SessionState.STOPPED; |
| + stopInternal(); |
| + } |
| + } |
| + |
| + private void stopInternal() { |
| + Logging.d(TAG, "Stop internal"); |
| + checkIsOnCameraThread(); |
| + |
| + surfaceTextureHelper.stopListening(); |
| + cameraStatistics.release(); |
| + |
| + // Note: stopPreview or other driver code might deadlock. Deadlock in |
|
magjed_webrtc
2016/08/11 12:57:13
If the app calls startCapture and stopCapture and
sakal
2016/08/15 09:11:57
I'd rather handle this issue in a separate CL.
|
| + // android.hardware.Camera._stopPreview(Native Method) has been observed on |
| + // Nexus 5 (hammerhead), OS version LMY48I. |
| + camera.stopPreview(); |
| + camera.release(); |
| + eventsHandler.onCameraClosed(); |
| + |
| + Logging.d(TAG, "Stop done"); |
| + } |
| + |
| + private void listenForTextureFrames() { |
| + surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() { |
| + @Override |
| + public void onTextureFrameAvailable( |
| + int oesTextureId, float[] transformMatrix, long timestampNs) { |
| + checkIsOnCameraThread(); |
| + |
| + if (state != SessionState.RUNNING) { |
| + Logging.d(TAG, "Texture frame captured but camera is no longer running."); |
| + return; |
| + } |
| + |
| + if (!firstFrameReported) { |
| + eventsHandler.onFirstFrameAvailable(); |
| + firstFrameReported = true; |
| + } |
| + |
| + int rotation = getFrameOrientation(); |
| + if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { |
| + // Undo the mirror that the OS "helps" us with. |
| + // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) |
| + transformMatrix = RendererCommon.multiplyMatrices( |
| + transformMatrix, RendererCommon.horizontalFlipMatrix()); |
| + } |
| + cameraStatistics.addFrame(); |
| + capturerObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, |
| + oesTextureId, transformMatrix, rotation, timestampNs); |
| + } |
| + }); |
| + } |
| + |
| + private void listenForBytebufferFrames() { |
| + camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() { |
| + @Override |
| + public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) { |
| + checkIsOnCameraThread(); |
| + |
| + if (callbackCamera != camera) { |
| + Logging.e(TAG, "Callback from a different camera. This should never happen."); |
| + return; |
| + } |
| + |
| + if (state != SessionState.RUNNING) { |
| + Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running."); |
| + return; |
| + } |
| + |
| + final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); |
| + |
| + if (!firstFrameReported) { |
| + eventsHandler.onFirstFrameAvailable(); |
| + firstFrameReported = true; |
| + } |
| + |
| + cameraStatistics.addFrame(); |
| + capturerObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height, |
| + getFrameOrientation(), captureTimeNs); |
| + camera.addCallbackBuffer(data); |
| + } |
| + }); |
| + } |
| + |
| + private int getDeviceOrientation() { |
| + int orientation = 0; |
| + |
| + WindowManager wm = (WindowManager) applicationContext.getSystemService( |
| + Context.WINDOW_SERVICE); |
| + switch(wm.getDefaultDisplay().getRotation()) { |
| + case Surface.ROTATION_90: |
| + orientation = 90; |
| + break; |
| + case Surface.ROTATION_180: |
| + orientation = 180; |
| + break; |
| + case Surface.ROTATION_270: |
| + orientation = 270; |
| + break; |
| + case Surface.ROTATION_0: |
| + default: |
| + orientation = 0; |
| + break; |
| + } |
| + return orientation; |
| + } |
| + |
| + private int getFrameOrientation() { |
| + int rotation = getDeviceOrientation(); |
| + if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { |
| + rotation = 360 - rotation; |
| + } |
| + return (info.orientation + rotation) % 360; |
| + } |
| + |
| + private void checkIsOnCameraThread() { |
| + if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { |
| + throw new IllegalStateException("Wrong thread"); |
| + } |
| + } |
| +} |