| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 224 this.id = 0; | 224 this.id = 0; |
| 225 } else { | 225 } else { |
| 226 this.id = getCameraIndex(cameraName); | 226 this.id = getCameraIndex(cameraName); |
| 227 } | 227 } |
| 228 this.eventsHandler = eventsHandler; | 228 this.eventsHandler = eventsHandler; |
| 229 isCapturingToTexture = captureToTexture; | 229 isCapturingToTexture = captureToTexture; |
| 230 Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingT
oTexture); | 230 Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingT
oTexture); |
| 231 } | 231 } |
| 232 | 232 |
| 233 private void checkIsOnCameraThread() { | 233 private void checkIsOnCameraThread() { |
| 234 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { | 234 synchronized (handlerLock) { |
| 235 throw new IllegalStateException("Wrong thread"); | 235 if (cameraThreadHandler == null) { |
| 236 Logging.e(TAG, "Camera is stopped - can't check thread."); |
| 237 } else if (Thread.currentThread() != cameraThreadHandler.getLooper().getTh
read()) { |
| 238 throw new IllegalStateException("Wrong thread"); |
| 239 } |
| 236 } | 240 } |
| 237 } | 241 } |
| 238 | 242 |
| 239 // Returns the camera index for camera with name |deviceName|, or throws Illeg
alArgumentException | 243 // Returns the camera index for camera with name |deviceName|, or throws Illeg
alArgumentException |
| 240 // if no such camera can be found. | 244 // if no such camera can be found. |
| 241 private static int getCameraIndex(String deviceName) { | 245 private static int getCameraIndex(String deviceName) { |
| 242 Logging.d(TAG, "getCameraIndex: " + deviceName); | 246 Logging.d(TAG, "getCameraIndex: " + deviceName); |
| 243 for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) { | 247 for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) { |
| 244 if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) { | 248 if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) { |
| 245 return i; | 249 return i; |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 305 if (eventsHandler != null) { | 309 if (eventsHandler != null) { |
| 306 eventsHandler.onCameraError("Could not post task to camera thread."); | 310 eventsHandler.onCameraError("Could not post task to camera thread."); |
| 307 } | 311 } |
| 308 } | 312 } |
| 309 } | 313 } |
| 310 } | 314 } |
| 311 | 315 |
| 312 private void startCaptureOnCameraThread( | 316 private void startCaptureOnCameraThread( |
| 313 final int width, final int height, final int framerate, final CapturerObse
rver frameObserver, | 317 final int width, final int height, final int framerate, final CapturerObse
rver frameObserver, |
| 314 final Context applicationContext) { | 318 final Context applicationContext) { |
| 315 Throwable error = null; | 319 synchronized (handlerLock) { |
| 316 checkIsOnCameraThread(); | 320 if (cameraThreadHandler == null) { |
| 321 Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped"); |
| 322 return; |
| 323 } else { |
| 324 checkIsOnCameraThread(); |
| 325 } |
| 326 } |
| 317 if (camera != null) { | 327 if (camera != null) { |
| 318 Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been starte
d."); | 328 Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been starte
d."); |
| 319 return; | 329 return; |
| 320 } | 330 } |
| 321 this.applicationContext = applicationContext; | 331 this.applicationContext = applicationContext; |
| 322 this.frameObserver = frameObserver; | 332 this.frameObserver = frameObserver; |
| 323 this.firstFrameReported = false; | 333 this.firstFrameReported = false; |
| 324 | 334 |
| 325 try { | 335 try { |
| 326 try { | 336 try { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 341 @Override public void run() { | 351 @Override public void run() { |
| 342 startCaptureOnCameraThread(width, height, framerate, frameObserver
, | 352 startCaptureOnCameraThread(width, height, framerate, frameObserver
, |
| 343 applicationContext); | 353 applicationContext); |
| 344 } | 354 } |
| 345 }); | 355 }); |
| 346 return; | 356 return; |
| 347 } | 357 } |
| 348 throw e; | 358 throw e; |
| 349 } | 359 } |
| 350 | 360 |
| 351 try { | 361 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); |
| 352 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); | |
| 353 } catch (IOException e) { | |
| 354 Logging.e(TAG, "setPreviewTexture failed", error); | |
| 355 throw new RuntimeException(e); | |
| 356 } | |
| 357 | 362 |
| 358 Logging.d(TAG, "Camera orientation: " + info.orientation + | 363 Logging.d(TAG, "Camera orientation: " + info.orientation + |
| 359 " .Device orientation: " + getDeviceOrientation()); | 364 " .Device orientation: " + getDeviceOrientation()); |
| 360 camera.setErrorCallback(cameraErrorCallback); | 365 camera.setErrorCallback(cameraErrorCallback); |
| 361 startPreviewOnCameraThread(width, height, framerate); | 366 startPreviewOnCameraThread(width, height, framerate); |
| 362 frameObserver.onCapturerStarted(true); | 367 frameObserver.onCapturerStarted(true); |
| 363 if (isCapturingToTexture) { | 368 if (isCapturingToTexture) { |
| 364 surfaceHelper.startListening(this); | 369 surfaceHelper.startListening(this); |
| 365 } | 370 } |
| 366 | 371 |
| 367 // Start camera observer. | 372 // Start camera observer. |
| 368 cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler); | 373 cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler); |
| 369 return; | 374 } catch (IOException|RuntimeException e) { |
| 370 } catch (RuntimeException e) { | 375 Logging.e(TAG, "startCapture failed", e); |
| 371 error = e; | 376 // Make sure the camera is released. |
| 372 } | 377 stopCaptureOnCameraThread(true /* stopHandler */); |
| 373 Logging.e(TAG, "startCapture failed", error); | 378 frameObserver.onCapturerStarted(false); |
| 374 // Make sure the camera is released. | 379 if (eventsHandler != null) { |
| 375 stopCaptureOnCameraThread(true /* stopHandler */); | 380 eventsHandler.onCameraError("Camera can not be started."); |
| 376 frameObserver.onCapturerStarted(false); | 381 } |
| 377 if (eventsHandler != null) { | 382 } |
| 378 eventsHandler.onCameraError("Camera can not be started."); | |
| 379 } | |
| 380 return; | |
| 381 } | 383 } |
| 382 | 384 |
| 383 // (Re)start preview with the closest supported format to |width| x |height| @
|framerate|. | 385 // (Re)start preview with the closest supported format to |width| x |height| @
|framerate|. |
| 384 private void startPreviewOnCameraThread(int width, int height, int framerate)
{ | 386 private void startPreviewOnCameraThread(int width, int height, int framerate)
{ |
| 385 checkIsOnCameraThread(); | 387 synchronized (handlerLock) { |
| 388 if (cameraThreadHandler == null || camera == null) { |
| 389 Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped"); |
| 390 return; |
| 391 } else { |
| 392 checkIsOnCameraThread(); |
| 393 } |
| 394 } |
| 386 Logging.d( | 395 Logging.d( |
| 387 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "
@" + framerate); | 396 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "
@" + framerate); |
| 388 if (camera == null) { | |
| 389 Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); | |
| 390 return; | |
| 391 } | |
| 392 | 397 |
| 393 requestedWidth = width; | 398 requestedWidth = width; |
| 394 requestedHeight = height; | 399 requestedHeight = height; |
| 395 requestedFramerate = framerate; | 400 requestedFramerate = framerate; |
| 396 | 401 |
| 397 // Find closest supported format for |width| x |height| @ |framerate|. | 402 // Find closest supported format for |width| x |height| @ |framerate|. |
| 398 final android.hardware.Camera.Parameters parameters = camera.getParameters()
; | 403 final android.hardware.Camera.Parameters parameters = camera.getParameters()
; |
| 399 final List<CaptureFormat.FramerateRange> supportedFramerates = | 404 final List<CaptureFormat.FramerateRange> supportedFramerates = |
| 400 CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRang
e()); | 405 CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRang
e()); |
| 401 Logging.d(TAG, "Available fps ranges: " + supportedFramerates); | 406 Logging.d(TAG, "Available fps ranges: " + supportedFramerates); |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 495 Logging.e(TAG, "Camera stop timeout"); | 500 Logging.e(TAG, "Camera stop timeout"); |
| 496 printStackTrace(); | 501 printStackTrace(); |
| 497 if (eventsHandler != null) { | 502 if (eventsHandler != null) { |
| 498 eventsHandler.onCameraError("Camera stop timeout"); | 503 eventsHandler.onCameraError("Camera stop timeout"); |
| 499 } | 504 } |
| 500 } | 505 } |
| 501 Logging.d(TAG, "stopCapture done"); | 506 Logging.d(TAG, "stopCapture done"); |
| 502 } | 507 } |
| 503 | 508 |
| 504 private void stopCaptureOnCameraThread(boolean stopHandler) { | 509 private void stopCaptureOnCameraThread(boolean stopHandler) { |
| 505 checkIsOnCameraThread(); | 510 synchronized (handlerLock) { |
| 511 if (cameraThreadHandler == null) { |
| 512 Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped"); |
| 513 } else { |
| 514 checkIsOnCameraThread(); |
| 515 } |
| 516 } |
| 506 Logging.d(TAG, "stopCaptureOnCameraThread"); | 517 Logging.d(TAG, "stopCaptureOnCameraThread"); |
| 507 // Note that the camera might still not be started here if startCaptureOnCam
eraThread failed | 518 // Note that the camera might still not be started here if startCaptureOnCam
eraThread failed |
| 508 // and we posted a retry. | 519 // and we posted a retry. |
| 509 | 520 |
| 510 // Make sure onTextureFrameAvailable() is not called anymore. | 521 // Make sure onTextureFrameAvailable() is not called anymore. |
| 511 if (surfaceHelper != null) { | 522 if (surfaceHelper != null) { |
| 512 surfaceHelper.stopListening(); | 523 surfaceHelper.stopListening(); |
| 513 } | 524 } |
| 514 if (stopHandler) { | 525 if (stopHandler) { |
| 515 synchronized (handlerLock) { | 526 synchronized (handlerLock) { |
| 516 // Clear the cameraThreadHandler first, in case stopPreview or | 527 // Clear the cameraThreadHandler first, in case stopPreview or |
| 517 // other driver code deadlocks. Deadlock in | 528 // other driver code deadlocks. Deadlock in |
| 518 // android.hardware.Camera._stopPreview(Native Method) has | 529 // android.hardware.Camera._stopPreview(Native Method) has |
| 519 // been observed on Nexus 5 (hammerhead), OS version LMY48I. | 530 // been observed on Nexus 5 (hammerhead), OS version LMY48I. |
| 520 // The camera might post another one or two preview frames | 531 // The camera might post another one or two preview frames |
| 521 // before stopped, so we have to check for a null | 532 // before stopped, so we have to check for a null |
| 522 // cameraThreadHandler in our handler. Remove all pending | 533 // cameraThreadHandler in our handler. Remove all pending |
| 523 // Runnables posted from |this|. | 534 // Runnables posted from |this|. |
| 524 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); | 535 if (cameraThreadHandler != null) { |
| 525 cameraThreadHandler = null; | 536 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); |
| 537 cameraThreadHandler = null; |
| 538 } |
| 526 surfaceHelper = null; | 539 surfaceHelper = null; |
| 527 } | 540 } |
| 528 } | 541 } |
| 529 if (cameraStatistics != null) { | 542 if (cameraStatistics != null) { |
| 530 cameraStatistics.release(); | 543 cameraStatistics.release(); |
| 531 cameraStatistics = null; | 544 cameraStatistics = null; |
| 532 } | 545 } |
| 533 Logging.d(TAG, "Stop preview."); | 546 Logging.d(TAG, "Stop preview."); |
| 534 if (camera != null) { | 547 if (camera != null) { |
| 535 camera.stopPreview(); | 548 camera.stopPreview(); |
| 536 camera.setPreviewCallbackWithBuffer(null); | 549 camera.setPreviewCallbackWithBuffer(null); |
| 537 } | 550 } |
| 538 queuedBuffers.clear(); | 551 queuedBuffers.clear(); |
| 539 captureFormat = null; | 552 captureFormat = null; |
| 540 | 553 |
| 541 Logging.d(TAG, "Release camera."); | 554 Logging.d(TAG, "Release camera."); |
| 542 if (camera != null) { | 555 if (camera != null) { |
| 543 camera.release(); | 556 camera.release(); |
| 544 camera = null; | 557 camera = null; |
| 545 } | 558 } |
| 546 if (eventsHandler != null) { | 559 if (eventsHandler != null) { |
| 547 eventsHandler.onCameraClosed(); | 560 eventsHandler.onCameraClosed(); |
| 548 } | 561 } |
| 549 Logging.d(TAG, "stopCaptureOnCameraThread done"); | 562 Logging.d(TAG, "stopCaptureOnCameraThread done"); |
| 550 } | 563 } |
| 551 | 564 |
| 552 private void switchCameraOnCameraThread() { | 565 private void switchCameraOnCameraThread() { |
| 553 checkIsOnCameraThread(); | 566 synchronized (handlerLock) { |
| 567 if (cameraThreadHandler == null) { |
| 568 Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped"); |
| 569 return; |
| 570 } else { |
| 571 checkIsOnCameraThread(); |
| 572 } |
| 573 } |
| 554 Logging.d(TAG, "switchCameraOnCameraThread"); | 574 Logging.d(TAG, "switchCameraOnCameraThread"); |
| 555 stopCaptureOnCameraThread(false /* stopHandler */); | 575 stopCaptureOnCameraThread(false /* stopHandler */); |
| 556 synchronized (cameraIdLock) { | 576 synchronized (cameraIdLock) { |
| 557 id = (id + 1) % android.hardware.Camera.getNumberOfCameras(); | 577 id = (id + 1) % android.hardware.Camera.getNumberOfCameras(); |
| 558 } | 578 } |
| 559 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera
te, frameObserver, | 579 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera
te, frameObserver, |
| 560 applicationContext); | 580 applicationContext); |
| 561 Logging.d(TAG, "switchCameraOnCameraThread done"); | 581 Logging.d(TAG, "switchCameraOnCameraThread done"); |
| 562 } | 582 } |
| 563 | 583 |
| 564 private void onOutputFormatRequestOnCameraThread(int width, int height, int fr
amerate) { | 584 private void onOutputFormatRequestOnCameraThread(int width, int height, int fr
amerate) { |
| 565 checkIsOnCameraThread(); | 585 synchronized (handlerLock) { |
| 566 if (camera == null) { | 586 if (cameraThreadHandler == null || camera == null) { |
| 567 Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera."); | 587 Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped")
; |
| 568 return; | 588 return; |
| 589 } else { |
| 590 checkIsOnCameraThread(); |
| 591 } |
| 569 } | 592 } |
| 570 Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + heigh
t + | 593 Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + heigh
t + |
| 571 "@" + framerate); | 594 "@" + framerate); |
| 572 frameObserver.onOutputFormatRequest(width, height, framerate); | 595 frameObserver.onOutputFormatRequest(width, height, framerate); |
| 573 } | 596 } |
| 574 | 597 |
| 575 // Exposed for testing purposes only. | 598 // Exposed for testing purposes only. |
| 576 Handler getCameraThreadHandler() { | 599 Handler getCameraThreadHandler() { |
| 577 return cameraThreadHandler; | 600 return cameraThreadHandler; |
| 578 } | 601 } |
| (...skipping 25 matching lines...) Expand all Loading... |
| 604 int rotation = getDeviceOrientation(); | 627 int rotation = getDeviceOrientation(); |
| 605 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { | 628 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { |
| 606 rotation = 360 - rotation; | 629 rotation = 360 - rotation; |
| 607 } | 630 } |
| 608 return (info.orientation + rotation) % 360; | 631 return (info.orientation + rotation) % 360; |
| 609 } | 632 } |
| 610 | 633 |
| 611 // Called on cameraThread so must not "synchronized". | 634 // Called on cameraThread so must not "synchronized". |
| 612 @Override | 635 @Override |
| 613 public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera
) { | 636 public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera
) { |
| 614 if (cameraThreadHandler == null) { | 637 synchronized (handlerLock) { |
| 615 // The camera has been stopped. | 638 if (cameraThreadHandler == null) { |
| 616 return; | 639 Logging.e(TAG, "onPreviewFrame: Camera is stopped"); |
| 640 return; |
| 641 } else { |
| 642 checkIsOnCameraThread(); |
| 643 } |
| 617 } | 644 } |
| 618 checkIsOnCameraThread(); | |
| 619 if (!queuedBuffers.contains(data)) { | 645 if (!queuedBuffers.contains(data)) { |
| 620 // |data| is an old invalid buffer. | 646 // |data| is an old invalid buffer. |
| 621 return; | 647 return; |
| 622 } | 648 } |
| 623 if (camera != callbackCamera) { | 649 if (camera != callbackCamera) { |
| 624 throw new RuntimeException("Unexpected camera in callback!"); | 650 throw new RuntimeException("Unexpected camera in callback!"); |
| 625 } | 651 } |
| 626 | 652 |
| 627 final long captureTimeNs = | 653 final long captureTimeNs = |
| 628 TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); | 654 TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); |
| 629 | 655 |
| 630 if (eventsHandler != null && !firstFrameReported) { | 656 if (eventsHandler != null && !firstFrameReported) { |
| 631 eventsHandler.onFirstFrameAvailable(); | 657 eventsHandler.onFirstFrameAvailable(); |
| 632 firstFrameReported = true; | 658 firstFrameReported = true; |
| 633 } | 659 } |
| 634 | 660 |
| 635 cameraStatistics.addFrame(); | 661 cameraStatistics.addFrame(); |
| 636 frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFo
rmat.height, | 662 frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFo
rmat.height, |
| 637 getFrameOrientation(), captureTimeNs); | 663 getFrameOrientation(), captureTimeNs); |
| 638 camera.addCallbackBuffer(data); | 664 camera.addCallbackBuffer(data); |
| 639 } | 665 } |
| 640 | 666 |
| 641 @Override | 667 @Override |
| 642 public void onTextureFrameAvailable( | 668 public void onTextureFrameAvailable( |
| 643 int oesTextureId, float[] transformMatrix, long timestampNs) { | 669 int oesTextureId, float[] transformMatrix, long timestampNs) { |
| 644 | 670 synchronized (handlerLock) { |
| 645 checkIsOnCameraThread(); | 671 if (cameraThreadHandler == null) { |
| 672 Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped"); |
| 673 surfaceHelper.returnTextureFrame(); |
| 674 return; |
| 675 } else { |
| 676 checkIsOnCameraThread(); |
| 677 } |
| 678 } |
| 646 if (eventsHandler != null && !firstFrameReported) { | 679 if (eventsHandler != null && !firstFrameReported) { |
| 647 eventsHandler.onFirstFrameAvailable(); | 680 eventsHandler.onFirstFrameAvailable(); |
| 648 firstFrameReported = true; | 681 firstFrameReported = true; |
| 649 } | 682 } |
| 650 | 683 |
| 651 int rotation = getFrameOrientation(); | 684 int rotation = getFrameOrientation(); |
| 652 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { | 685 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { |
| 653 // Undo the mirror that the OS "helps" us with. | 686 // Undo the mirror that the OS "helps" us with. |
| 654 // http://developer.android.com/reference/android/hardware/Camera.html#set
DisplayOrientation(int) | 687 // http://developer.android.com/reference/android/hardware/Camera.html#set
DisplayOrientation(int) |
| 655 transformMatrix = | 688 transformMatrix = |
| 656 RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizo
ntalFlipMatrix()); | 689 RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizo
ntalFlipMatrix()); |
| 657 } | 690 } |
| 658 cameraStatistics.addFrame(); | 691 cameraStatistics.addFrame(); |
| 659 frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.heig
ht, oesTextureId, | 692 frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.heig
ht, oesTextureId, |
| 660 transformMatrix, rotation, timestampNs); | 693 transformMatrix, rotation, timestampNs); |
| 661 } | 694 } |
| 662 } | 695 } |
| OLD | NEW |