OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.webrtc; | 11 package org.webrtc; |
12 | 12 |
13 import android.content.Context; | 13 import android.content.Context; |
| 14 import android.graphics.SurfaceTexture; |
| 15 import android.opengl.GLES11Ext; |
| 16 import android.opengl.GLES20; |
14 import android.os.Handler; | 17 import android.os.Handler; |
| 18 import android.os.HandlerThread; |
15 import android.os.SystemClock; | 19 import android.os.SystemClock; |
16 import android.view.Surface; | 20 import android.view.Surface; |
17 import android.view.WindowManager; | 21 import android.view.WindowManager; |
18 | 22 |
19 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; | 23 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; |
20 import org.webrtc.Logging; | 24 import org.webrtc.Logging; |
21 | 25 |
22 import java.io.IOException; | 26 import java.io.IOException; |
23 import java.nio.ByteBuffer; | 27 import java.nio.ByteBuffer; |
24 import java.util.HashSet; | 28 import java.util.HashSet; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
70 private final Object pendingCameraSwitchLock = new Object(); | 74 private final Object pendingCameraSwitchLock = new Object(); |
71 private volatile boolean pendingCameraSwitch; | 75 private volatile boolean pendingCameraSwitch; |
72 private CapturerObserver frameObserver = null; | 76 private CapturerObserver frameObserver = null; |
73 private final CameraEventsHandler eventsHandler; | 77 private final CameraEventsHandler eventsHandler; |
74 private boolean firstFrameReported; | 78 private boolean firstFrameReported; |
75 // Arbitrary queue depth. Higher number means more memory allocated & held, | 79 // Arbitrary queue depth. Higher number means more memory allocated & held, |
76 // lower number means more sensitivity to processing time in the client (and | 80 // lower number means more sensitivity to processing time in the client (and |
77 // potentially stalling the capturer if it runs out of buffers to write to). | 81 // potentially stalling the capturer if it runs out of buffers to write to). |
78 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; | 82 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; |
79 private final Set<byte[]> queuedBuffers = new HashSet<byte[]>(); | 83 private final Set<byte[]> queuedBuffers = new HashSet<byte[]>(); |
80 private final boolean isCapturingToTexture; | 84 private boolean isCapturingToTexture; |
81 private SurfaceTextureHelper surfaceHelper; | 85 private SurfaceTextureHelper surfaceHelper; |
| 86 // These dummy variables are used in case |surfaceHelper| is null. |
| 87 private int dummyTextureId; |
| 88 private SurfaceTexture dummySurfaceTexture; |
82 // The camera API can output one old frame after the camera has been switched
or the resolution | 89 // The camera API can output one old frame after the camera has been switched
or the resolution |
83 // has been changed. This flag is used for dropping the first frame after came
ra restart. | 90 // has been changed. This flag is used for dropping the first frame after came
ra restart. |
84 private boolean dropNextFrame = false; | 91 private boolean dropNextFrame = false; |
85 private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; | 92 private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; |
86 private final static int OPEN_CAMERA_DELAY_MS = 500; | 93 private final static int OPEN_CAMERA_DELAY_MS = 500; |
87 private int openCameraAttempts; | 94 private int openCameraAttempts; |
88 | 95 |
89 // Camera error callback. | 96 // Camera error callback. |
90 private final android.hardware.Camera.ErrorCallback cameraErrorCallback = | 97 private final android.hardware.Camera.ErrorCallback cameraErrorCallback = |
91 new android.hardware.Camera.ErrorCallback() { | 98 new android.hardware.Camera.ErrorCallback() { |
(...skipping 20 matching lines...) Expand all Loading... |
112 int cameraFramesCount = cameraStatistics.getAndResetFrameCount(); | 119 int cameraFramesCount = cameraStatistics.getAndResetFrameCount(); |
113 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) | 120 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) |
114 / CAMERA_OBSERVER_PERIOD_MS; | 121 / CAMERA_OBSERVER_PERIOD_MS; |
115 | 122 |
116 Logging.d(TAG, "Camera fps: " + cameraFps +"."); | 123 Logging.d(TAG, "Camera fps: " + cameraFps +"."); |
117 if (cameraFramesCount == 0) { | 124 if (cameraFramesCount == 0) { |
118 ++freezePeriodCount; | 125 ++freezePeriodCount; |
119 if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPOR
T_TIMOUT_MS | 126 if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPOR
T_TIMOUT_MS |
120 && eventsHandler != null) { | 127 && eventsHandler != null) { |
121 Logging.e(TAG, "Camera freezed."); | 128 Logging.e(TAG, "Camera freezed."); |
122 if (surfaceHelper.isTextureInUse()) { | 129 if (surfaceHelper != null && surfaceHelper.isTextureInUse()) { |
123 // This can only happen if we are capturing to textures. | 130 // This can only happen if we are capturing to textures. |
124 eventsHandler.onCameraFreezed("Camera failure. Client must return vi
deo buffers."); | 131 eventsHandler.onCameraFreezed("Camera failure. Client must return vi
deo buffers."); |
125 } else { | 132 } else { |
126 eventsHandler.onCameraFreezed("Camera failure."); | 133 eventsHandler.onCameraFreezed("Camera failure."); |
127 } | 134 } |
128 return; | 135 return; |
129 } | 136 } |
130 } else { | 137 } else { |
131 freezePeriodCount = 0; | 138 freezePeriodCount = 0; |
132 } | 139 } |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
367 } | 374 } |
368 | 375 |
369 // Note that this actually opens the camera, and Camera callbacks run on the | 376 // Note that this actually opens the camera, and Camera callbacks run on the |
370 // thread that calls open(), so this is done on the CameraThread. | 377 // thread that calls open(), so this is done on the CameraThread. |
371 @Override | 378 @Override |
372 public void startCapture( | 379 public void startCapture( |
373 final int width, final int height, final int framerate, | 380 final int width, final int height, final int framerate, |
374 final SurfaceTextureHelper surfaceTextureHelper, final Context application
Context, | 381 final SurfaceTextureHelper surfaceTextureHelper, final Context application
Context, |
375 final CapturerObserver frameObserver) { | 382 final CapturerObserver frameObserver) { |
376 Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + fra
merate); | 383 Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + fra
merate); |
377 if (surfaceTextureHelper == null) { | |
378 throw new IllegalArgumentException("surfaceTextureHelper not set."); | |
379 } | |
380 if (applicationContext == null) { | 384 if (applicationContext == null) { |
381 throw new IllegalArgumentException("applicationContext not set."); | 385 throw new IllegalArgumentException("applicationContext not set."); |
382 } | 386 } |
383 if (frameObserver == null) { | 387 if (frameObserver == null) { |
384 throw new IllegalArgumentException("frameObserver not set."); | 388 throw new IllegalArgumentException("frameObserver not set."); |
385 } | 389 } |
386 synchronized (handlerLock) { | 390 synchronized (handlerLock) { |
387 if (this.cameraThreadHandler != null) { | 391 if (this.cameraThreadHandler != null) { |
388 throw new RuntimeException("Camera has already been started."); | 392 throw new RuntimeException("Camera has already been started."); |
389 } | 393 } |
390 this.cameraThreadHandler = surfaceTextureHelper.getHandler(); | 394 if (surfaceTextureHelper != null) { |
391 this.surfaceHelper = surfaceTextureHelper; | 395 this.cameraThreadHandler = surfaceTextureHelper.getHandler(); |
| 396 this.surfaceHelper = surfaceTextureHelper; |
| 397 } else { |
| 398 // No SurfaceTextureHelper - create own handler. |
| 399 if (isCapturingToTexture) { |
| 400 Logging.e(TAG, "No SurfaceTextureHelper - falling back to byte buffer
capture"); |
| 401 isCapturingToTexture = false; |
| 402 } |
| 403 this.surfaceHelper = null; |
| 404 final HandlerThread thread = new HandlerThread(TAG); |
| 405 thread.start(); |
| 406 this.cameraThreadHandler = new Handler(thread.getLooper()); |
| 407 } |
392 final boolean didPost = maybePostOnCameraThread(new Runnable() { | 408 final boolean didPost = maybePostOnCameraThread(new Runnable() { |
393 @Override | 409 @Override |
394 public void run() { | 410 public void run() { |
395 openCameraAttempts = 0; | 411 openCameraAttempts = 0; |
396 startCaptureOnCameraThread(width, height, framerate, frameObserver, | 412 startCaptureOnCameraThread(width, height, framerate, frameObserver, |
397 applicationContext); | 413 applicationContext); |
398 } | 414 } |
399 }); | 415 }); |
400 if (!didPost) { | 416 if (!didPost) { |
401 frameObserver.onCapturerStarted(false); | 417 frameObserver.onCapturerStarted(false); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
438 startCaptureOnCameraThread(width, height, framerate, frameObserver
, | 454 startCaptureOnCameraThread(width, height, framerate, frameObserver
, |
439 applicationContext); | 455 applicationContext); |
440 } | 456 } |
441 }); | 457 }); |
442 return; | 458 return; |
443 } | 459 } |
444 throw e; | 460 throw e; |
445 } | 461 } |
446 | 462 |
447 try { | 463 try { |
448 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); | 464 if (surfaceHelper != null) { |
| 465 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); |
| 466 } else { |
| 467 // No SurfaceTextureHelper - create own dummy SurfaceTexture. |
| 468 this.dummyTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTE
RNAL_OES); |
| 469 this.dummySurfaceTexture = new SurfaceTexture(dummyTextureId); |
| 470 camera.setPreviewTexture(dummySurfaceTexture); |
| 471 } |
449 } catch (IOException e) { | 472 } catch (IOException e) { |
450 Logging.e(TAG, "setPreviewTexture failed", error); | 473 Logging.e(TAG, "setPreviewTexture failed", error); |
451 throw new RuntimeException(e); | 474 throw new RuntimeException(e); |
452 } | 475 } |
453 | 476 |
454 Logging.d(TAG, "Camera orientation: " + info.orientation + | 477 Logging.d(TAG, "Camera orientation: " + info.orientation + |
455 " .Device orientation: " + getDeviceOrientation()); | 478 " .Device orientation: " + getDeviceOrientation()); |
456 camera.setErrorCallback(cameraErrorCallback); | 479 camera.setErrorCallback(cameraErrorCallback); |
457 startPreviewOnCameraThread(width, height, framerate); | 480 startPreviewOnCameraThread(width, height, framerate); |
458 frameObserver.onCapturerStarted(true); | 481 frameObserver.onCapturerStarted(true); |
459 if (isCapturingToTexture) { | 482 if (isCapturingToTexture && surfaceHelper != null) { |
460 surfaceHelper.startListening(this); | 483 surfaceHelper.startListening(this); |
461 } | 484 } |
462 | 485 |
463 // Start camera observer. | 486 // Start camera observer. |
464 maybePostDelayedOnCameraThread(CAMERA_OBSERVER_PERIOD_MS, cameraObserver); | 487 maybePostDelayedOnCameraThread(CAMERA_OBSERVER_PERIOD_MS, cameraObserver); |
465 return; | 488 return; |
466 } catch (RuntimeException e) { | 489 } catch (RuntimeException e) { |
467 error = e; | 490 error = e; |
468 } | 491 } |
469 Logging.e(TAG, "startCapture failed", error); | 492 Logging.e(TAG, "startCapture failed", error); |
470 if (camera != null) { | 493 if (camera != null) { |
471 // Make sure the camera is released. | 494 // Make sure the camera is released. |
472 stopCaptureOnCameraThread(); | 495 stopCaptureOnCameraThread(); |
473 } | 496 } |
474 synchronized (handlerLock) { | 497 synchronized (handlerLock) { |
475 // Remove all pending Runnables posted from |this|. | 498 // Remove all pending Runnables posted from |this|. |
476 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); | 499 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); |
| 500 releaseOwnHandlerAndDummySurfaceTexture(); |
477 cameraThreadHandler = null; | 501 cameraThreadHandler = null; |
478 } | 502 } |
479 frameObserver.onCapturerStarted(false); | 503 frameObserver.onCapturerStarted(false); |
480 if (eventsHandler != null) { | 504 if (eventsHandler != null) { |
481 eventsHandler.onCameraError("Camera can not be started."); | 505 eventsHandler.onCameraError("Camera can not be started."); |
482 } | 506 } |
483 return; | 507 return; |
484 } | 508 } |
485 | 509 |
| 510 private void releaseOwnHandlerAndDummySurfaceTexture() { |
| 511 if (surfaceHelper == null) { |
| 512 // We are using our own thread - quit it. |
| 513 cameraThreadHandler.getLooper().quit(); |
| 514 cameraThreadHandler = null; |
| 515 } |
| 516 if (dummyTextureId != 0) { |
| 517 GLES20.glDeleteTextures(1, new int[] {dummyTextureId}, 0); |
| 518 dummyTextureId = 0; |
| 519 } |
| 520 if (dummySurfaceTexture != null) { |
| 521 dummySurfaceTexture.release(); |
| 522 dummySurfaceTexture = null; |
| 523 } |
| 524 } |
| 525 |
486 // (Re)start preview with the closest supported format to |width| x |height| @
|framerate|. | 526 // (Re)start preview with the closest supported format to |width| x |height| @
|framerate|. |
487 private void startPreviewOnCameraThread(int width, int height, int framerate)
{ | 527 private void startPreviewOnCameraThread(int width, int height, int framerate)
{ |
488 checkIsOnCameraThread(); | 528 checkIsOnCameraThread(); |
489 Logging.d( | 529 Logging.d( |
490 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "
@" + framerate); | 530 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "
@" + framerate); |
491 if (camera == null) { | 531 if (camera == null) { |
492 Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); | 532 Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); |
493 return; | 533 return; |
494 } | 534 } |
495 | 535 |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
577 @Override | 617 @Override |
578 public void stopCapture() throws InterruptedException { | 618 public void stopCapture() throws InterruptedException { |
579 Logging.d(TAG, "stopCapture"); | 619 Logging.d(TAG, "stopCapture"); |
580 final CountDownLatch barrier = new CountDownLatch(1); | 620 final CountDownLatch barrier = new CountDownLatch(1); |
581 final boolean didPost = maybePostOnCameraThread(new Runnable() { | 621 final boolean didPost = maybePostOnCameraThread(new Runnable() { |
582 @Override public void run() { | 622 @Override public void run() { |
583 stopCaptureOnCameraThread(); | 623 stopCaptureOnCameraThread(); |
584 synchronized (handlerLock) { | 624 synchronized (handlerLock) { |
585 // Remove all pending Runnables posted from |this|. | 625 // Remove all pending Runnables posted from |this|. |
586 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); | 626 cameraThreadHandler.removeCallbacksAndMessages(this /* token */); |
| 627 releaseOwnHandlerAndDummySurfaceTexture(); |
587 cameraThreadHandler = null; | 628 cameraThreadHandler = null; |
588 surfaceHelper = null; | 629 surfaceHelper = null; |
589 } | 630 } |
590 barrier.countDown(); | 631 barrier.countDown(); |
591 } | 632 } |
592 }); | 633 }); |
593 if (!didPost) { | 634 if (!didPost) { |
594 Logging.e(TAG, "Calling stopCapture() for already stopped camera."); | 635 Logging.e(TAG, "Calling stopCapture() for already stopped camera."); |
595 return; | 636 return; |
596 } | 637 } |
597 if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { | 638 if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { |
598 Logging.e(TAG, "Camera stop timeout"); | 639 Logging.e(TAG, "Camera stop timeout"); |
599 printStackTrace(); | 640 printStackTrace(); |
600 if (eventsHandler != null) { | 641 if (eventsHandler != null) { |
601 eventsHandler.onCameraError("Camera stop timeout"); | 642 eventsHandler.onCameraError("Camera stop timeout"); |
602 } | 643 } |
603 } | 644 } |
604 Logging.d(TAG, "stopCapture done"); | 645 Logging.d(TAG, "stopCapture done"); |
605 } | 646 } |
606 | 647 |
607 private void stopCaptureOnCameraThread() { | 648 private void stopCaptureOnCameraThread() { |
608 checkIsOnCameraThread(); | 649 checkIsOnCameraThread(); |
609 Logging.d(TAG, "stopCaptureOnCameraThread"); | 650 Logging.d(TAG, "stopCaptureOnCameraThread"); |
610 | 651 |
611 // Make sure onTextureFrameAvailable() is not called anymore. | 652 // Make sure onTextureFrameAvailable() is not called anymore. |
612 surfaceHelper.stopListening(); | 653 if (surfaceHelper != null) { |
| 654 surfaceHelper.stopListening(); |
| 655 } |
613 cameraThreadHandler.removeCallbacks(cameraObserver); | 656 cameraThreadHandler.removeCallbacks(cameraObserver); |
614 cameraStatistics.getAndResetFrameCount(); | 657 cameraStatistics.getAndResetFrameCount(); |
615 Logging.d(TAG, "Stop preview."); | 658 Logging.d(TAG, "Stop preview."); |
616 camera.stopPreview(); | 659 camera.stopPreview(); |
617 camera.setPreviewCallbackWithBuffer(null); | 660 camera.setPreviewCallbackWithBuffer(null); |
618 queuedBuffers.clear(); | 661 queuedBuffers.clear(); |
619 captureFormat = null; | 662 captureFormat = null; |
620 | 663 |
621 Logging.d(TAG, "Release camera."); | 664 Logging.d(TAG, "Release camera."); |
622 camera.release(); | 665 camera.release(); |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
739 // Undo the mirror that the OS "helps" us with. | 782 // Undo the mirror that the OS "helps" us with. |
740 // http://developer.android.com/reference/android/hardware/Camera.html#set
DisplayOrientation(int) | 783 // http://developer.android.com/reference/android/hardware/Camera.html#set
DisplayOrientation(int) |
741 transformMatrix = | 784 transformMatrix = |
742 RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizo
ntalFlipMatrix()); | 785 RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizo
ntalFlipMatrix()); |
743 } | 786 } |
744 cameraStatistics.addFrame(); | 787 cameraStatistics.addFrame(); |
745 frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.heig
ht, oesTextureId, | 788 frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.heig
ht, oesTextureId, |
746 transformMatrix, rotation, timestampNs); | 789 transformMatrix, rotation, timestampNs); |
747 } | 790 } |
748 } | 791 } |
OLD | NEW |