Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 39 private static final String TAG = "Camera2Session"; | 39 private static final String TAG = "Camera2Session"; |
| 40 | 40 |
| 41 private static final Histogram camera2StartTimeMsHistogram = | 41 private static final Histogram camera2StartTimeMsHistogram = |
| 42 Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50) ; | 42 Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50) ; |
| 43 private static final Histogram camera2StopTimeMsHistogram = | 43 private static final Histogram camera2StopTimeMsHistogram = |
| 44 Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); | 44 Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); |
| 45 | 45 |
| 46 private static enum SessionState { RUNNING, STOPPED }; | 46 private static enum SessionState { RUNNING, STOPPED }; |
| 47 | 47 |
| 48 private final Handler cameraThreadHandler; | 48 private final Handler cameraThreadHandler; |
| 49 private final CreateSessionCallback callback; | |
| 50 private final Events events; | |
| 51 private final Context applicationContext; | |
| 49 private final CameraManager cameraManager; | 52 private final CameraManager cameraManager; |
| 50 private final CreateSessionCallback callback; | |
| 51 private final CameraVideoCapturer.CameraEventsHandler eventsHandler; | |
| 52 private final Context applicationContext; | |
| 53 private final CameraVideoCapturer.CapturerObserver capturerObserver; | |
| 54 private final SurfaceTextureHelper surfaceTextureHelper; | 53 private final SurfaceTextureHelper surfaceTextureHelper; |
| 55 private final String cameraId; | 54 private final String cameraId; |
| 56 private final int width; | 55 private final int width; |
| 57 private final int height; | 56 private final int height; |
| 58 private final int framerate; | 57 private final int framerate; |
| 59 | 58 |
| 60 // Initialized at start | 59 // Initialized at start |
| 61 private CameraCharacteristics cameraCharacteristics; | 60 private CameraCharacteristics cameraCharacteristics; |
| 62 private int cameraOrientation; | 61 private int cameraOrientation; |
| 63 private boolean isCameraFrontFacing; | 62 private boolean isCameraFrontFacing; |
| 64 private int fpsUnitFactor; | 63 private int fpsUnitFactor; |
| 65 private CaptureFormat captureFormat; | 64 private CaptureFormat captureFormat; |
| 66 | 65 |
| 67 // Initialized when camera opens | 66 // Initialized when camera opens |
| 68 private CameraDevice cameraDevice; | 67 private CameraDevice cameraDevice; |
| 69 private Surface surface; | 68 private Surface surface; |
| 70 | 69 |
| 71 // Initialized when capture session is created | 70 // Initialized when capture session is created |
| 72 private CameraCaptureSession captureSession; | 71 private CameraCaptureSession captureSession; |
| 73 private CameraVideoCapturer.CameraStatistics cameraStatistics; | |
| 74 | 72 |
| 75 // State | 73 // State |
| 76 private SessionState state = SessionState.RUNNING; | 74 private SessionState state = SessionState.RUNNING; |
| 77 private boolean firstFrameReported = false; | 75 private boolean firstFrameReported = false; |
| 78 | 76 |
| 79 // Used only for stats. Only used on the camera thread. | 77 // Used only for stats. Only used on the camera thread. |
| 80 private final long constructionTimeNs; // Construction time of this class. | 78 private final long constructionTimeNs; // Construction time of this class. |
| 81 | 79 |
| 82 private class CameraStateCallback extends CameraDevice.StateCallback { | 80 private class CameraStateCallback extends CameraDevice.StateCallback { |
| 83 private String getErrorDescription(int errorCode) { | 81 private String getErrorDescription(int errorCode) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 127 reportError("Failed to create capture session. " + e); | 125 reportError("Failed to create capture session. " + e); |
| 128 return; | 126 return; |
| 129 } | 127 } |
| 130 } | 128 } |
| 131 | 129 |
| 132 @Override | 130 @Override |
| 133 public void onClosed(CameraDevice camera) { | 131 public void onClosed(CameraDevice camera) { |
| 134 checkIsOnCameraThread(); | 132 checkIsOnCameraThread(); |
| 135 | 133 |
| 136 Logging.d(TAG, "Camera device closed."); | 134 Logging.d(TAG, "Camera device closed."); |
| 137 eventsHandler.onCameraClosed(); | 135 events.onCameraClosed(Camera2Session.this); |
| 138 } | 136 } |
| 139 } | 137 } |
| 140 | 138 |
| 141 private class CaptureSessionCallback extends CameraCaptureSession.StateCallbac k { | 139 private class CaptureSessionCallback extends CameraCaptureSession.StateCallbac k { |
| 142 @Override | 140 @Override |
| 143 public void onConfigureFailed(CameraCaptureSession session) { | 141 public void onConfigureFailed(CameraCaptureSession session) { |
| 144 checkIsOnCameraThread(); | 142 checkIsOnCameraThread(); |
| 145 session.close(); | 143 session.close(); |
| 146 reportError("Failed to configure capture session."); | 144 reportError("Failed to configure capture session."); |
| 147 } | 145 } |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 184 int oesTextureId, float[] transformMatrix, long timestampNs) { | 182 int oesTextureId, float[] transformMatrix, long timestampNs) { |
| 185 checkIsOnCameraThread(); | 183 checkIsOnCameraThread(); |
| 186 | 184 |
| 187 if (state != SessionState.RUNNING) { | 185 if (state != SessionState.RUNNING) { |
| 188 Logging.d(TAG, "Texture frame captured but camera is no longer r unning."); | 186 Logging.d(TAG, "Texture frame captured but camera is no longer r unning."); |
| 189 surfaceTextureHelper.returnTextureFrame(); | 187 surfaceTextureHelper.returnTextureFrame(); |
| 190 return; | 188 return; |
| 191 } | 189 } |
| 192 | 190 |
| 193 if (!firstFrameReported) { | 191 if (!firstFrameReported) { |
| 194 eventsHandler.onFirstFrameAvailable(); | |
| 195 firstFrameReported = true; | 192 firstFrameReported = true; |
| 196 final int startTimeMs = | 193 final int startTimeMs = |
| 197 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - cons tructionTimeNs); | 194 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - cons tructionTimeNs); |
| 198 camera2StartTimeMsHistogram.addSample(startTimeMs); | 195 camera2StartTimeMsHistogram.addSample(startTimeMs); |
| 199 } | 196 } |
| 200 | 197 |
| 201 int rotation = getFrameOrientation(); | 198 int rotation = getFrameOrientation(); |
| 202 if (isCameraFrontFacing) { | 199 if (isCameraFrontFacing) { |
| 203 // Undo the mirror that the OS "helps" us with. | 200 // Undo the mirror that the OS "helps" us with. |
| 204 // http://developer.android.com/reference/android/hardware/Camer a.html#setDisplayOrientation(int) | 201 // http://developer.android.com/reference/android/hardware/Camer a.html#setDisplayOrientation(int) |
| 205 transformMatrix = RendererCommon.multiplyMatrices( | 202 transformMatrix = RendererCommon.multiplyMatrices( |
| 206 transformMatrix, RendererCommon.horizontalFlipMatrix()); | 203 transformMatrix, RendererCommon.horizontalFlipMatrix()); |
| 207 } | 204 } |
| 208 | 205 |
| 209 // Undo camera orientation - we report it as rotation instead. | 206 // Undo camera orientation - we report it as rotation instead. |
| 210 transformMatrix = RendererCommon.rotateTextureMatrix( | 207 transformMatrix = RendererCommon.rotateTextureMatrix( |
| 211 transformMatrix, -cameraOrientation); | 208 transformMatrix, -cameraOrientation); |
| 212 | 209 |
| 213 cameraStatistics.addFrame(); | 210 events.onTextureFrameCaptured(Camera2Session.this, captureFormat.w idth, |
| 214 capturerObserver.onTextureFrameCaptured(captureFormat.width, captu reFormat.height, | 211 captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs); |
| 215 oesTextureId, transformMatrix, rotation, timestampNs); | |
| 216 } | 212 } |
| 217 }); | 213 }); |
| 218 capturerObserver.onCapturerStarted(true /* success */); | |
| 219 cameraStatistics = new CameraVideoCapturer.CameraStatistics( | |
| 220 surfaceTextureHelper, eventsHandler); | |
| 221 Logging.d(TAG, "Camera device successfully started."); | 214 Logging.d(TAG, "Camera device successfully started."); |
| 222 callback.onDone(Camera2Session.this); | 215 callback.onDone(Camera2Session.this); |
| 223 } | 216 } |
| 224 } | 217 } |
| 225 | 218 |
| 226 private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallba ck { | 219 private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallba ck { |
| 227 @Override | 220 @Override |
| 228 public void onCaptureFailed( | 221 public void onCaptureFailed( |
| 229 CameraCaptureSession session, CaptureRequest request, CaptureFailure fai lure) { | 222 CameraCaptureSession session, CaptureRequest request, CaptureFailure fai lure) { |
| 230 Logging.d(TAG, "Capture failed: " + failure); | 223 Logging.d(TAG, "Capture failed: " + failure); |
| 231 } | 224 } |
| 232 } | 225 } |
| 233 | 226 |
| 234 public static void create( | 227 public static void create( |
| 235 CameraManager cameraManager, CreateSessionCallback callback, | 228 CreateSessionCallback callback, Events events, |
| 236 CameraVideoCapturer.CameraEventsHandler eventsHandler, Context application Context, | 229 Context applicationContext, CameraManager cameraManager, |
| 237 CameraVideoCapturer.CapturerObserver capturerObserver, | |
| 238 SurfaceTextureHelper surfaceTextureHelper, | 230 SurfaceTextureHelper surfaceTextureHelper, |
| 239 String cameraId, int width, int height, int framerate) { | 231 String cameraId, int width, int height, int framerate) { |
| 240 new Camera2Session( | 232 new Camera2Session( |
| 241 cameraManager, callback, | 233 callback, events, |
| 242 eventsHandler, applicationContext, | 234 applicationContext, cameraManager, |
| 243 capturerObserver, | |
| 244 surfaceTextureHelper, | 235 surfaceTextureHelper, |
| 245 cameraId, width, height, framerate); | 236 cameraId, width, height, framerate); |
| 246 } | 237 } |
| 247 | 238 |
| 248 private Camera2Session( | 239 private Camera2Session( |
| 249 CameraManager cameraManager, CreateSessionCallback callback, | 240 CreateSessionCallback callback, Events events, |
| 250 CameraVideoCapturer.CameraEventsHandler eventsHandler, Context application Context, | 241 Context applicationContext, CameraManager cameraManager, |
| 251 CameraVideoCapturer.CapturerObserver capturerObserver, | |
| 252 SurfaceTextureHelper surfaceTextureHelper, | 242 SurfaceTextureHelper surfaceTextureHelper, |
| 253 String cameraId, int width, int height, int framerate) { | 243 String cameraId, int width, int height, int framerate) { |
| 254 Logging.d(TAG, "Create new camera2 session on camera " + cameraId); | 244 Logging.d(TAG, "Create new camera2 session on camera " + cameraId); |
| 255 | 245 |
| 256 constructionTimeNs = System.nanoTime(); | 246 constructionTimeNs = System.nanoTime(); |
| 257 | 247 |
| 258 this.cameraThreadHandler = new Handler(); | 248 this.cameraThreadHandler = new Handler(); |
| 249 this.callback = callback; | |
| 250 this.events = events; | |
| 251 this.applicationContext = applicationContext; | |
| 259 this.cameraManager = cameraManager; | 252 this.cameraManager = cameraManager; |
| 260 this.callback = callback; | |
| 261 this.eventsHandler = eventsHandler; | |
| 262 this.applicationContext = applicationContext; | |
| 263 this.capturerObserver = capturerObserver; | |
| 264 this.surfaceTextureHelper = surfaceTextureHelper; | 253 this.surfaceTextureHelper = surfaceTextureHelper; |
| 265 this.cameraId = cameraId; | 254 this.cameraId = cameraId; |
| 266 this.width = width; | 255 this.width = width; |
| 267 this.height = height; | 256 this.height = height; |
| 268 this.framerate = framerate; | 257 this.framerate = framerate; |
| 269 | 258 |
| 270 start(); | 259 start(); |
| 271 } | 260 } |
| 272 | 261 |
| 273 private void start() { | 262 private void start() { |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 309 sizes, width, height); | 298 sizes, width, height); |
| 310 | 299 |
| 311 captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRa nge); | 300 captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRa nge); |
| 312 Logging.d(TAG, "Using capture format: " + captureFormat); | 301 Logging.d(TAG, "Using capture format: " + captureFormat); |
| 313 } | 302 } |
| 314 | 303 |
| 315 private void openCamera() { | 304 private void openCamera() { |
| 316 checkIsOnCameraThread(); | 305 checkIsOnCameraThread(); |
| 317 | 306 |
| 318 Logging.d(TAG, "Opening camera " + cameraId); | 307 Logging.d(TAG, "Opening camera " + cameraId); |
| 319 eventsHandler.onCameraOpening(cameraId); | 308 events.onCameraOpening(this); |
| 320 | 309 |
| 321 try { | 310 try { |
| 322 cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThread Handler); | 311 cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThread Handler); |
| 323 } catch (CameraAccessException e) { | 312 } catch (CameraAccessException e) { |
| 324 reportError("Failed to open camera: " + e); | 313 reportError("Failed to open camera: " + e); |
| 325 } | 314 } |
| 326 } | 315 } |
| 327 | 316 |
| 328 @Override | 317 @Override |
| 329 public void stop() { | 318 public void stop() { |
| 330 final long stopStartTime = System.nanoTime(); | 319 final long stopStartTime = System.nanoTime(); |
| 331 Logging.d(TAG, "Stop camera2 session on camera " + cameraId); | 320 Logging.d(TAG, "Stop camera2 session on camera " + cameraId); |
| 332 if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) { | 321 if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) { |
| 333 if (state != SessionState.STOPPED) { | 322 if (state != SessionState.STOPPED) { |
| 334 state = SessionState.STOPPED; | 323 state = SessionState.STOPPED; |
| 335 capturerObserver.onCapturerStopped(); | |
| 336 // Post the stopInternal to return earlier. | 324 // Post the stopInternal to return earlier. |
| 337 cameraThreadHandler.post(new Runnable() { | 325 cameraThreadHandler.post(new Runnable() { |
| 338 @Override | 326 @Override |
| 339 public void run() { | 327 public void run() { |
| 340 stopInternal(); | 328 stopInternal(); |
| 341 final int stopTimeMs = | 329 final int stopTimeMs = |
| 342 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); | 330 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); |
| 343 camera2StopTimeMsHistogram.addSample(stopTimeMs); | 331 camera2StopTimeMsHistogram.addSample(stopTimeMs); |
| 344 } | 332 } |
| 345 }); | 333 }); |
| 346 } | 334 } |
| 347 } else { | 335 } else { |
| 348 final CountDownLatch stopLatch = new CountDownLatch(1); | 336 final CountDownLatch stopLatch = new CountDownLatch(1); |
| 349 | 337 |
| 350 cameraThreadHandler.post(new Runnable() { | 338 cameraThreadHandler.post(new Runnable() { |
| 351 @Override | 339 @Override |
| 352 public void run() { | 340 public void run() { |
| 353 if (state != SessionState.STOPPED) { | 341 if (state != SessionState.STOPPED) { |
| 354 state = SessionState.STOPPED; | 342 state = SessionState.STOPPED; |
| 355 capturerObserver.onCapturerStopped(); | |
| 356 stopLatch.countDown(); | 343 stopLatch.countDown(); |
| 357 stopInternal(); | 344 stopInternal(); |
| 358 final int stopTimeMs = | 345 final int stopTimeMs = |
| 359 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); | 346 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); |
| 360 camera2StopTimeMsHistogram.addSample(stopTimeMs); | 347 camera2StopTimeMsHistogram.addSample(stopTimeMs); |
| 361 } | 348 } |
| 362 } | 349 } |
| 363 }); | 350 }); |
| 364 | 351 |
| 365 ThreadUtils.awaitUninterruptibly(stopLatch); | 352 ThreadUtils.awaitUninterruptibly(stopLatch); |
| 366 } | 353 } |
| 367 } | 354 } |
| 368 | 355 |
| 369 private void stopInternal() { | 356 private void stopInternal() { |
| 370 Logging.d(TAG, "Stop internal"); | 357 Logging.d(TAG, "Stop internal"); |
| 371 checkIsOnCameraThread(); | 358 checkIsOnCameraThread(); |
| 372 | 359 |
| 373 surfaceTextureHelper.stopListening(); | 360 surfaceTextureHelper.stopListening(); |
| 374 cameraStatistics.release(); | |
| 375 | 361 |
| 376 captureSession.close(); | 362 captureSession.close(); |
| 377 captureSession = null; | 363 captureSession = null; |
| 378 surface.release(); | 364 surface.release(); |
| 379 surface = null; | 365 surface = null; |
| 380 cameraDevice.close(); | 366 cameraDevice.close(); |
| 381 cameraDevice = null; | 367 cameraDevice = null; |
| 382 | 368 |
| 383 Logging.d(TAG, "Stop done"); | 369 Logging.d(TAG, "Stop done"); |
| 384 } | 370 } |
| 385 | 371 |
| 386 private void reportError(String error) { | 372 private void reportError(String error) { |
| 387 checkIsOnCameraThread(); | 373 checkIsOnCameraThread(); |
| 388 Logging.e(TAG, "Error: " + error); | 374 Logging.e(TAG, "Error: " + error); |
| 389 | 375 |
| 390 if (captureSession == null) { | 376 if (captureSession == null) { |
| 391 if (cameraDevice != null) { | 377 if (cameraDevice != null) { |
| 392 cameraDevice.close(); | 378 cameraDevice.close(); |
| 393 cameraDevice = null; | 379 cameraDevice = null; |
| 394 } | 380 } |
| 395 | 381 |
| 396 state = SessionState.STOPPED; | 382 state = SessionState.STOPPED; |
| 397 callback.onFailure(error); | 383 callback.onFailure(error); |
| 398 capturerObserver.onCapturerStarted(false /* success */); | |
| 399 } else { | 384 } else { |
| 400 eventsHandler.onCameraError(error); | 385 stop(); |
|
magjed_webrtc
2016/09/16 12:59:19
I would like to flatten the number of function ind
sakal
2016/09/16 13:25:16
Done.
| |
| 386 events.onCameraError(this, error); | |
| 401 } | 387 } |
| 402 } | 388 } |
| 403 | 389 |
| 404 private int getDeviceOrientation() { | 390 private int getDeviceOrientation() { |
| 405 int orientation = 0; | 391 int orientation = 0; |
| 406 | 392 |
| 407 WindowManager wm = (WindowManager) applicationContext.getSystemService( | 393 WindowManager wm = (WindowManager) applicationContext.getSystemService( |
| 408 Context.WINDOW_SERVICE); | 394 Context.WINDOW_SERVICE); |
| 409 switch(wm.getDefaultDisplay().getRotation()) { | 395 switch(wm.getDefaultDisplay().getRotation()) { |
| 410 case Surface.ROTATION_90: | 396 case Surface.ROTATION_90: |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 431 } | 417 } |
| 432 return (cameraOrientation + rotation) % 360; | 418 return (cameraOrientation + rotation) % 360; |
| 433 } | 419 } |
| 434 | 420 |
| 435 private void checkIsOnCameraThread() { | 421 private void checkIsOnCameraThread() { |
| 436 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { | 422 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { |
| 437 throw new IllegalStateException("Wrong thread"); | 423 throw new IllegalStateException("Wrong thread"); |
| 438 } | 424 } |
| 439 } | 425 } |
| 440 } | 426 } |
| OLD | NEW |