Index: webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Session.java b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
similarity index 27% |
copy from webrtc/api/android/java/src/org/webrtc/Camera2Session.java |
copy to webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
index 213e41d88e7421e11d1a8289afa02cd35b827e61..d47134def21bf99fb4c3a58e18f67c7a6e5d219b 100644 |
--- a/webrtc/api/android/java/src/org/webrtc/Camera2Session.java |
+++ b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java |
@@ -13,311 +13,175 @@ package org.webrtc; |
import org.webrtc.CameraEnumerationAndroid.CaptureFormat; |
import org.webrtc.Metrics.Histogram; |
-import android.annotation.TargetApi; |
import android.content.Context; |
-import android.graphics.SurfaceTexture; |
-import android.hardware.camera2.CameraAccessException; |
-import android.hardware.camera2.CameraCaptureSession; |
-import android.hardware.camera2.CameraCharacteristics; |
-import android.hardware.camera2.CameraDevice; |
-import android.hardware.camera2.CameraManager; |
-import android.hardware.camera2.CameraMetadata; |
-import android.hardware.camera2.CaptureFailure; |
-import android.hardware.camera2.CaptureRequest; |
import android.os.Handler; |
-import android.util.Range; |
+import android.os.SystemClock; |
import android.view.Surface; |
import android.view.WindowManager; |
-import java.util.Arrays; |
+import java.io.IOException; |
+import java.nio.ByteBuffer; |
import java.util.List; |
import java.util.concurrent.CountDownLatch; |
import java.util.concurrent.TimeUnit; |
-@TargetApi(21) |
-public class Camera2Session implements CameraSession { |
- private static final String TAG = "Camera2Session"; |
+@SuppressWarnings("deprecation") |
+public class Camera1Session implements CameraSession { |
+ private static final String TAG = "Camera1Session"; |
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; |
- private static final Histogram camera2StartTimeMsHistogram = |
- Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50); |
- private static final Histogram camera2StopTimeMsHistogram = |
- Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); |
+ private static final Histogram camera1StartTimeMsHistogram = |
+ Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50); |
+ private static final Histogram camera1StopTimeMsHistogram = |
+ Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); |
private static enum SessionState { RUNNING, STOPPED }; |
private final Handler cameraThreadHandler; |
- private final CreateSessionCallback callback; |
private final Events events; |
+ private final boolean captureToTexture; |
private final Context applicationContext; |
- private final CameraManager cameraManager; |
private final SurfaceTextureHelper surfaceTextureHelper; |
- private final String cameraId; |
+ private final int cameraId; |
private final int width; |
private final int height; |
private final int framerate; |
+ private final android.hardware.Camera camera; |
+ private final android.hardware.Camera.CameraInfo info; |
+ private final CaptureFormat captureFormat; |
+ // Used only for stats. Only used on the camera thread. |
+ private final long constructionTimeNs; // Construction time of this class. |
- // Initialized at start |
- private CameraCharacteristics cameraCharacteristics; |
- private int cameraOrientation; |
- private boolean isCameraFrontFacing; |
- private int fpsUnitFactor; |
- private CaptureFormat captureFormat; |
- |
- // Initialized when camera opens |
- private CameraDevice cameraDevice; |
- private Surface surface; |
- |
- // Initialized when capture session is created |
- private CameraCaptureSession captureSession; |
- |
- // State |
- private SessionState state = SessionState.RUNNING; |
+ private SessionState state; |
private boolean firstFrameReported = false; |
- // Used only for stats. Only used on the camera thread. |
- private final long constructionTimeNs; // Construction time of this class. |
+ public static void create( |
+ final CreateSessionCallback callback, final Events events, |
+ final boolean captureToTexture, final Context applicationContext, |
+ final SurfaceTextureHelper surfaceTextureHelper, |
+ final int cameraId, final int width, final int height, final int framerate) { |
+ final long constructionTimeNs = System.nanoTime(); |
+ Logging.d(TAG, "Open camera " + cameraId); |
+ events.onCameraOpening(); |
- private class CameraStateCallback extends CameraDevice.StateCallback { |
- private String getErrorDescription(int errorCode) { |
- switch (errorCode) { |
- case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: |
- return "Camera device has encountered a fatal error."; |
- case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: |
- return "Camera device could not be opened due to a device policy."; |
- case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: |
- return "Camera device is in use already."; |
- case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: |
- return "Camera service has encountered a fatal error."; |
- case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: |
- return "Camera device could not be opened because" |
- + " there are too many other open camera devices."; |
- default: |
- return "Unknown camera error: " + errorCode; |
- } |
+ final android.hardware.Camera camera; |
+ try { |
+ camera = android.hardware.Camera.open(cameraId); |
+ } catch (RuntimeException e) { |
+ callback.onFailure(e.getMessage()); |
+ return; |
} |
- @Override |
- public void onDisconnected(CameraDevice camera) { |
- checkIsOnCameraThread(); |
- reportError("Camera disconnected."); |
+ try { |
+ camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); |
+ } catch (IOException e) { |
+ camera.release(); |
+ callback.onFailure(e.getMessage()); |
+ return; |
} |
- @Override |
- public void onError(CameraDevice camera, int errorCode) { |
- checkIsOnCameraThread(); |
- reportError(getErrorDescription(errorCode)); |
- } |
+ final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo(); |
+ android.hardware.Camera.getCameraInfo(cameraId, info); |
+ |
+ final android.hardware.Camera.Parameters parameters = camera.getParameters(); |
+ final CaptureFormat captureFormat = findClosestCaptureFormat( |
+ parameters, width, height, framerate); |
+ final Size pictureSize = findClosestPictureSize(parameters, width, height); |
- @Override |
- public void onOpened(CameraDevice camera) { |
- checkIsOnCameraThread(); |
- |
- Logging.d(TAG, "Camera opened."); |
- cameraDevice = camera; |
- |
- final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); |
- surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height); |
- surface = new Surface(surfaceTexture); |
- try { |
- camera.createCaptureSession( |
- Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler); |
- } catch (CameraAccessException e) { |
- reportError("Failed to create capture session. " + e); |
- return; |
+ updateCameraParameters(camera, parameters, captureFormat, captureToTexture); |
+ |
+ // Initialize the capture buffers. |
+ if (!captureToTexture) { |
+ final int frameSize = captureFormat.frameSize(); |
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { |
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); |
+ camera.addCallbackBuffer(buffer.array()); |
} |
} |
- @Override |
- public void onClosed(CameraDevice camera) { |
- checkIsOnCameraThread(); |
+ // Calculate orientation manually and send it as CVO insted. |
+ camera.setDisplayOrientation(0 /* degrees */); |
- Logging.d(TAG, "Camera device closed."); |
- events.onCameraClosed(Camera2Session.this); |
- } |
+ callback.onDone(new Camera1Session( |
+ events, captureToTexture, applicationContext, surfaceTextureHelper, |
+ cameraId, width, height, framerate, |
+ camera, info, captureFormat, constructionTimeNs)); |
} |
- private class CaptureSessionCallback extends CameraCaptureSession.StateCallback { |
- @Override |
- public void onConfigureFailed(CameraCaptureSession session) { |
- checkIsOnCameraThread(); |
- session.close(); |
- reportError("Failed to configure capture session."); |
- } |
+ private static void updateCameraParameters(android.hardware.Camera camera, |
+ android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, |
+ boolean captureToTexture) { |
+ final List<String> focusModes = parameters.getSupportedFocusModes(); |
- @Override |
- public void onConfigured(CameraCaptureSession session) { |
- checkIsOnCameraThread(); |
- Logging.d(TAG, "Camera capture session configured."); |
- captureSession = session; |
- try { |
- /* |
- * The viable options for video capture requests are: |
- * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality |
- * post-processing. |
- * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording |
- * quality. |
- */ |
- final CaptureRequest.Builder captureRequestBuilder = |
- cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); |
- // Set auto exposure fps range. |
- captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>( |
- captureFormat.framerate.min / fpsUnitFactor, |
- captureFormat.framerate.max / fpsUnitFactor)); |
- captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, |
- CaptureRequest.CONTROL_AE_MODE_ON); |
- captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); |
- |
- captureRequestBuilder.addTarget(surface); |
- session.setRepeatingRequest( |
- captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); |
- } catch (CameraAccessException e) { |
- reportError("Failed to start capture request. " + e); |
- return; |
- } |
- |
- surfaceTextureHelper.startListening( |
- new SurfaceTextureHelper.OnTextureFrameAvailableListener() { |
- @Override |
- public void onTextureFrameAvailable( |
- int oesTextureId, float[] transformMatrix, long timestampNs) { |
- checkIsOnCameraThread(); |
- |
- if (state != SessionState.RUNNING) { |
- Logging.d(TAG, "Texture frame captured but camera is no longer running."); |
- surfaceTextureHelper.returnTextureFrame(); |
- return; |
- } |
- |
- if (!firstFrameReported) { |
- firstFrameReported = true; |
- final int startTimeMs = |
- (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); |
- camera2StartTimeMsHistogram.addSample(startTimeMs); |
- } |
- |
- int rotation = getFrameOrientation(); |
- if (isCameraFrontFacing) { |
- // Undo the mirror that the OS "helps" us with. |
- // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) |
- transformMatrix = RendererCommon.multiplyMatrices( |
- transformMatrix, RendererCommon.horizontalFlipMatrix()); |
- } |
- |
- // Undo camera orientation - we report it as rotation instead. |
- transformMatrix = RendererCommon.rotateTextureMatrix( |
- transformMatrix, -cameraOrientation); |
- |
- events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width, |
- captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs); |
- } |
- }); |
- Logging.d(TAG, "Camera device successfully started."); |
- callback.onDone(Camera2Session.this); |
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); |
+ parameters.setPreviewSize(captureFormat.width, captureFormat.height); |
+ if (!captureToTexture) { |
+ parameters.setPreviewFormat(captureFormat.imageFormat); |
} |
- } |
- private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback { |
- @Override |
- public void onCaptureFailed( |
- CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { |
- Logging.d(TAG, "Capture failed: " + failure); |
+ if (parameters.isVideoStabilizationSupported()) { |
+ parameters.setVideoStabilization(true); |
} |
+ if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { |
+ parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
+ } |
+ camera.setParameters(parameters); |
} |
- public static void create( |
- CreateSessionCallback callback, Events events, |
- Context applicationContext, CameraManager cameraManager, |
- SurfaceTextureHelper surfaceTextureHelper, |
- String cameraId, int width, int height, int framerate) { |
- new Camera2Session( |
- callback, events, |
- applicationContext, cameraManager, |
- surfaceTextureHelper, |
- cameraId, width, height, framerate); |
+ private static CaptureFormat findClosestCaptureFormat( |
+ android.hardware.Camera.Parameters parameters, int width, int height, int framerate) { |
+ // Find closest supported format for |width| x |height| @ |framerate|. |
+ final List<CaptureFormat.FramerateRange> supportedFramerates = |
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); |
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates); |
+ |
+ final CaptureFormat.FramerateRange fpsRange = |
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange( |
+ supportedFramerates, framerate); |
+ |
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( |
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), |
+ width, height); |
+ |
+ return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); |
} |
- private Camera2Session( |
- CreateSessionCallback callback, Events events, |
- Context applicationContext, CameraManager cameraManager, |
- SurfaceTextureHelper surfaceTextureHelper, |
- String cameraId, int width, int height, int framerate) { |
- Logging.d(TAG, "Create new camera2 session on camera " + cameraId); |
+ private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters, |
+ int width, int height) { |
+ return CameraEnumerationAndroid.getClosestSupportedSize( |
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), |
+ width, height); |
+ } |
- constructionTimeNs = System.nanoTime(); |
+ private Camera1Session( |
+ Events events, boolean captureToTexture, |
+ Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, |
+ int cameraId, int width, int height, int framerate, |
+ android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, |
+ CaptureFormat captureFormat, long constructionTimeNs) { |
+ Logging.d(TAG, "Create new camera1 session on camera " + cameraId); |
this.cameraThreadHandler = new Handler(); |
- this.callback = callback; |
this.events = events; |
+ this.captureToTexture = captureToTexture; |
this.applicationContext = applicationContext; |
- this.cameraManager = cameraManager; |
this.surfaceTextureHelper = surfaceTextureHelper; |
this.cameraId = cameraId; |
this.width = width; |
this.height = height; |
this.framerate = framerate; |
+ this.camera = camera; |
+ this.info = info; |
+ this.captureFormat = captureFormat; |
+ this.constructionTimeNs = constructionTimeNs; |
- start(); |
- } |
- |
- private void start() { |
- checkIsOnCameraThread(); |
- Logging.d(TAG, "start"); |
- |
- try { |
- cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); |
- } catch (final CameraAccessException e) { |
- reportError("getCameraCharacteristics(): " + e.getMessage()); |
- } |
- cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); |
- isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) |
- == CameraMetadata.LENS_FACING_FRONT; |
- |
- findCaptureFormat(); |
- openCamera(); |
- } |
- |
- private void findCaptureFormat() { |
- checkIsOnCameraThread(); |
- |
- Range<Integer>[] fpsRanges = |
- cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); |
- fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); |
- List<CaptureFormat.FramerateRange> framerateRanges = |
- Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); |
- List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); |
- |
- if (framerateRanges.isEmpty() || sizes.isEmpty()) { |
- reportError("No supported capture formats."); |
- } |
- |
- final CaptureFormat.FramerateRange bestFpsRange = |
- CameraEnumerationAndroid.getClosestSupportedFramerateRange( |
- framerateRanges, framerate); |
- |
- final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize( |
- sizes, width, height); |
- |
- captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); |
- Logging.d(TAG, "Using capture format: " + captureFormat); |
- } |
- |
- private void openCamera() { |
- checkIsOnCameraThread(); |
- |
- Logging.d(TAG, "Opening camera " + cameraId); |
- events.onCameraOpening(); |
- |
- try { |
- cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler); |
- } catch (CameraAccessException e) { |
- reportError("Failed to open camera: " + e); |
- } |
+ startCapturing(); |
} |
@Override |
public void stop() { |
final long stopStartTime = System.nanoTime(); |
- Logging.d(TAG, "Stop camera2 session on camera " + cameraId); |
+ Logging.d(TAG, "Stop camera1 session on camera " + cameraId); |
if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) { |
if (state != SessionState.STOPPED) { |
state = SessionState.STOPPED; |
@@ -328,7 +192,7 @@ public class Camera2Session implements CameraSession { |
stopInternal(); |
final int stopTimeMs = |
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); |
- camera2StopTimeMsHistogram.addSample(stopTimeMs); |
+ camera1StopTimeMsHistogram.addSample(stopTimeMs); |
} |
}); |
} |
@@ -344,7 +208,7 @@ public class Camera2Session implements CameraSession { |
stopInternal(); |
final int stopTimeMs = |
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime); |
- camera2StopTimeMsHistogram.addSample(stopTimeMs); |
+ camera1StopTimeMsHistogram.addSample(stopTimeMs); |
} |
} |
}); |
@@ -353,40 +217,121 @@ public class Camera2Session implements CameraSession { |
} |
} |
+ private void startCapturing() { |
+ Logging.d(TAG, "Start capturing"); |
+ checkIsOnCameraThread(); |
+ |
+ state = SessionState.RUNNING; |
+ |
+ camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() { |
+ @Override |
+ public void onError(int error, android.hardware.Camera camera) { |
+ String errorMessage; |
+ if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { |
+ errorMessage = "Camera server died!"; |
+ } else { |
+ errorMessage = "Camera error: " + error; |
+ } |
+ Logging.e(TAG, errorMessage); |
+ events.onCameraError(Camera1Session.this, errorMessage); |
magjed_webrtc
2016/09/19 14:32:16
Can we stop before notifying the callback, i.e. mo
sakal
2016/09/20 07:11:59
Done.
|
+ state = SessionState.STOPPED; |
+ stopInternal(); |
+ } |
+ }); |
+ |
+ if (captureToTexture) { |
+ listenForTextureFrames(); |
+ } else { |
+ listenForBytebufferFrames(); |
+ } |
+ try { |
+ camera.startPreview(); |
+ } catch (RuntimeException e) { |
+ events.onCameraError(this, e.getMessage()); |
+ state = SessionState.STOPPED; |
+ stopInternal(); |
+ } |
+ } |
+ |
private void stopInternal() { |
Logging.d(TAG, "Stop internal"); |
checkIsOnCameraThread(); |
surfaceTextureHelper.stopListening(); |
- if (captureSession != null) { |
- captureSession.close(); |
- captureSession = null; |
- } |
- if (surface != null) { |
- surface.release(); |
- surface = null; |
- } |
- if (cameraDevice != null) { |
- cameraDevice.close(); |
- cameraDevice = null; |
- } |
+ // Note: stopPreview or other driver code might deadlock. Deadlock in |
+ // android.hardware.Camera._stopPreview(Native Method) has been observed on |
+ // Nexus 5 (hammerhead), OS version LMY48I. |
+ camera.stopPreview(); |
+ camera.release(); |
+ events.onCameraClosed(this); |
Logging.d(TAG, "Stop done"); |
} |
- private void reportError(String error) { |
- checkIsOnCameraThread(); |
- Logging.e(TAG, "Error: " + error); |
+ private void listenForTextureFrames() { |
+ surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() { |
+ @Override |
+ public void onTextureFrameAvailable( |
+ int oesTextureId, float[] transformMatrix, long timestampNs) { |
+ checkIsOnCameraThread(); |
+ |
+ if (state != SessionState.RUNNING) { |
+ Logging.d(TAG, "Texture frame captured but camera is no longer running."); |
+ surfaceTextureHelper.returnTextureFrame(); |
+ return; |
+ } |
- final boolean startFailure = (captureSession == null); |
- state = SessionState.STOPPED; |
- stopInternal(); |
- if (startFailure) { |
- callback.onFailure(error); |
- } else { |
- events.onCameraError(this, error); |
- } |
+ if (!firstFrameReported) { |
+ final int startTimeMs = |
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); |
+ camera1StartTimeMsHistogram.addSample(startTimeMs); |
+ firstFrameReported = true; |
+ } |
+ |
+ int rotation = getFrameOrientation(); |
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { |
+ // Undo the mirror that the OS "helps" us with. |
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) |
+ transformMatrix = RendererCommon.multiplyMatrices( |
+ transformMatrix, RendererCommon.horizontalFlipMatrix()); |
+ } |
+ events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width, |
+ captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs); |
+ } |
+ }); |
+ } |
+ |
+ private void listenForBytebufferFrames() { |
+ camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() { |
+ @Override |
+ public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) { |
+ checkIsOnCameraThread(); |
+ |
+ if (callbackCamera != camera) { |
+ Logging.e(TAG, "Callback from a different camera. This should never happen."); |
+ return; |
+ } |
+ |
+ if (state != SessionState.RUNNING) { |
+ Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running."); |
+ return; |
+ } |
+ |
+ final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); |
+ |
+ if (!firstFrameReported) { |
+ final int startTimeMs = |
+ (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); |
+ camera1StartTimeMsHistogram.addSample(startTimeMs); |
+ firstFrameReported = true; |
+ } |
+ |
+ events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width, |
+ captureFormat.height, getFrameOrientation(), captureTimeNs); |
+ camera.addCallbackBuffer(data); |
+ } |
+ }); |
} |
private int getDeviceOrientation() { |
@@ -414,10 +359,10 @@ public class Camera2Session implements CameraSession { |
private int getFrameOrientation() { |
int rotation = getDeviceOrientation(); |
- if (!isCameraFrontFacing) { |
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { |
rotation = 360 - rotation; |
} |
- return (cameraOrientation + rotation) % 360; |
+ return (info.orientation + rotation) % 360; |
} |
private void checkIsOnCameraThread() { |