Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(75)

Side by Side Diff: webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java

Issue 2111923003: Reland of Combine webrtc/api/java/android and webrtc/api/java/src. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 package org.webrtc;
12
13 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
14
15 import android.content.Context;
16 import android.os.Handler;
17 import android.os.SystemClock;
18 import android.view.Surface;
19 import android.view.WindowManager;
20
21 import java.io.IOException;
22 import java.nio.ByteBuffer;
23 import java.util.HashSet;
24 import java.util.List;
25 import java.util.Set;
26 import java.util.concurrent.CountDownLatch;
27 import java.util.concurrent.TimeUnit;
28
29 // Android specific implementation of VideoCapturer.
30 // An instance of this class can be created by an application using
31 // VideoCapturerAndroid.create();
32 // This class extends VideoCapturer with a method to easily switch between the
33 // front and back camera. It also provides methods for enumerating valid device
34 // names.
35 //
36 // Threading notes: this class is called from C++ code, Android Camera callbacks , and possibly
37 // arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
38 // camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
39 // the camera has been stopped.
40 // TODO(magjed): This class name is now confusing - rename to Camera1VideoCaptur er.
41 @SuppressWarnings("deprecation")
42 public class VideoCapturerAndroid implements
43 CameraVideoCapturer,
44 android.hardware.Camera.PreviewCallback,
45 SurfaceTextureHelper.OnTextureFrameAvailableListener {
46 private final static String TAG = "VideoCapturerAndroid";
47 private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
48
49 private android.hardware.Camera camera; // Only non-null while capturing.
50 private final Object handlerLock = new Object();
51 // |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
52 // or when modifying the reference. Use maybePostOnCameraThread() instead of p osting directly to
53 // the handler - this way all callbacks with a specifed token can be removed a t once.
54 private Handler cameraThreadHandler;
55 private Context applicationContext;
56 // Synchronization lock for |id|.
57 private final Object cameraIdLock = new Object();
58 private int id;
59 private android.hardware.Camera.CameraInfo info;
60 private CameraStatistics cameraStatistics;
61 // Remember the requested format in case we want to switch cameras.
62 private int requestedWidth;
63 private int requestedHeight;
64 private int requestedFramerate;
65 // The capture format will be the closest supported format to the requested fo rmat.
66 private CaptureFormat captureFormat;
67 private final Object pendingCameraSwitchLock = new Object();
68 private volatile boolean pendingCameraSwitch;
69 private CapturerObserver frameObserver = null;
70 private final CameraEventsHandler eventsHandler;
71 private boolean firstFrameReported;
72 // Arbitrary queue depth. Higher number means more memory allocated & held,
73 // lower number means more sensitivity to processing time in the client (and
74 // potentially stalling the capturer if it runs out of buffers to write to).
75 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
76 private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
77 private final boolean isCapturingToTexture;
78 private SurfaceTextureHelper surfaceHelper;
79 private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
80 private final static int OPEN_CAMERA_DELAY_MS = 500;
81 private int openCameraAttempts;
82
83 // Camera error callback.
84 private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
85 new android.hardware.Camera.ErrorCallback() {
86 @Override
87 public void onError(int error, android.hardware.Camera camera) {
88 String errorMessage;
89 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
90 errorMessage = "Camera server died!";
91 } else {
92 errorMessage = "Camera error: " + error;
93 }
94 Logging.e(TAG, errorMessage);
95 if (eventsHandler != null) {
96 eventsHandler.onCameraError(errorMessage);
97 }
98 }
99 };
100
101 public static VideoCapturerAndroid create(String name,
102 CameraEventsHandler eventsHandler) {
103 return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTe xture */);
104 }
105
106 // Use ctor directly instead.
107 @Deprecated
108 public static VideoCapturerAndroid create(String name,
109 CameraEventsHandler eventsHandler, boolean captureToTexture) {
110 try {
111 return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
112 } catch (RuntimeException e) {
113 Logging.e(TAG, "Couldn't create camera.", e);
114 return null;
115 }
116 }
117
118 public void printStackTrace() {
119 Thread cameraThread = null;
120 synchronized (handlerLock) {
121 if (cameraThreadHandler != null) {
122 cameraThread = cameraThreadHandler.getLooper().getThread();
123 }
124 }
125 if (cameraThread != null) {
126 StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
127 if (cameraStackTraces.length > 0) {
128 Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
129 for (StackTraceElement stackTrace : cameraStackTraces) {
130 Logging.d(TAG, stackTrace.toString());
131 }
132 }
133 }
134 }
135
136 // Switch camera to the next valid camera id. This can only be called while
137 // the camera is running.
138 @Override
139 public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
140 if (android.hardware.Camera.getNumberOfCameras() < 2) {
141 if (switchEventsHandler != null) {
142 switchEventsHandler.onCameraSwitchError("No camera to switch to.");
143 }
144 return;
145 }
146 synchronized (pendingCameraSwitchLock) {
147 if (pendingCameraSwitch) {
148 // Do not handle multiple camera switch request to avoid blocking
149 // camera thread by handling too many switch request from a queue.
150 Logging.w(TAG, "Ignoring camera switch request.");
151 if (switchEventsHandler != null) {
152 switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
153 }
154 return;
155 }
156 pendingCameraSwitch = true;
157 }
158 final boolean didPost = maybePostOnCameraThread(new Runnable() {
159 @Override
160 public void run() {
161 switchCameraOnCameraThread();
162 synchronized (pendingCameraSwitchLock) {
163 pendingCameraSwitch = false;
164 }
165 if (switchEventsHandler != null) {
166 switchEventsHandler.onCameraSwitchDone(
167 info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FR ONT);
168 }
169 }
170 });
171 if (!didPost && switchEventsHandler != null) {
172 switchEventsHandler.onCameraSwitchError("Camera is stopped.");
173 }
174 }
175
176 // Requests a new output format from the video capturer. Captured frames
177 // by the camera will be scaled/or dropped by the video capturer.
178 // It does not matter if width and height are flipped. I.E, |width| = 640, |he ight| = 480 produce
179 // the same result as |width| = 480, |height| = 640.
180 // TODO(magjed/perkj): Document what this function does. Change name?
181 @Override
182 public void onOutputFormatRequest(final int width, final int height, final int framerate) {
183 maybePostOnCameraThread(new Runnable() {
184 @Override public void run() {
185 onOutputFormatRequestOnCameraThread(width, height, framerate);
186 }
187 });
188 }
189
190 // Reconfigure the camera to capture in a new format. This should only be call ed while the camera
191 // is running.
192 @Override
193 public void changeCaptureFormat(final int width, final int height, final int f ramerate) {
194 maybePostOnCameraThread(new Runnable() {
195 @Override public void run() {
196 startPreviewOnCameraThread(width, height, framerate);
197 }
198 });
199 }
200
201 // Helper function to retrieve the current camera id synchronously. Note that the camera id might
202 // change at any point by switchCamera() calls.
203 private int getCurrentCameraId() {
204 synchronized (cameraIdLock) {
205 return id;
206 }
207 }
208
209 @Override
210 public List<CaptureFormat> getSupportedFormats() {
211 return Camera1Enumerator.getSupportedFormats(getCurrentCameraId());
212 }
213
214 // Returns true if this VideoCapturer is setup to capture video frames to a Su rfaceTexture.
215 public boolean isCapturingToTexture() {
216 return isCapturingToTexture;
217 }
218
219 public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandl er,
220 boolean captureToTexture) {
221 if (android.hardware.Camera.getNumberOfCameras() == 0) {
222 throw new RuntimeException("No cameras available");
223 }
224 if (cameraName == null || cameraName.equals("")) {
225 this.id = 0;
226 } else {
227 this.id = Camera1Enumerator.getCameraIndex(cameraName);
228 }
229 this.eventsHandler = eventsHandler;
230 isCapturingToTexture = captureToTexture;
231 Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingT oTexture);
232 }
233
234 private void checkIsOnCameraThread() {
235 synchronized (handlerLock) {
236 if (cameraThreadHandler == null) {
237 Logging.e(TAG, "Camera is stopped - can't check thread.");
238 } else if (Thread.currentThread() != cameraThreadHandler.getLooper().getTh read()) {
239 throw new IllegalStateException("Wrong thread");
240 }
241 }
242 }
243
244 private boolean maybePostOnCameraThread(Runnable runnable) {
245 return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable);
246 }
247
248 private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
249 synchronized (handlerLock) {
250 return cameraThreadHandler != null
251 && cameraThreadHandler.postAtTime(
252 runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
253 }
254 }
255
256 @Override
257 public void dispose() {
258 Logging.d(TAG, "dispose");
259 }
260
261 // Note that this actually opens the camera, and Camera callbacks run on the
262 // thread that calls open(), so this is done on the CameraThread.
263 @Override
264 public void startCapture(
265 final int width, final int height, final int framerate,
266 final SurfaceTextureHelper surfaceTextureHelper, final Context application Context,
267 final CapturerObserver frameObserver) {
268 Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + fra merate);
269 if (surfaceTextureHelper == null) {
270 frameObserver.onCapturerStarted(false /* success */);
271 if (eventsHandler != null) {
272 eventsHandler.onCameraError("No SurfaceTexture created.");
273 }
274 return;
275 }
276 if (applicationContext == null) {
277 throw new IllegalArgumentException("applicationContext not set.");
278 }
279 if (frameObserver == null) {
280 throw new IllegalArgumentException("frameObserver not set.");
281 }
282 synchronized (handlerLock) {
283 if (this.cameraThreadHandler != null) {
284 throw new RuntimeException("Camera has already been started.");
285 }
286 this.cameraThreadHandler = surfaceTextureHelper.getHandler();
287 this.surfaceHelper = surfaceTextureHelper;
288 final boolean didPost = maybePostOnCameraThread(new Runnable() {
289 @Override
290 public void run() {
291 openCameraAttempts = 0;
292 startCaptureOnCameraThread(width, height, framerate, frameObserver,
293 applicationContext);
294 }
295 });
296 if (!didPost) {
297 frameObserver.onCapturerStarted(false);
298 if (eventsHandler != null) {
299 eventsHandler.onCameraError("Could not post task to camera thread.");
300 }
301 }
302 }
303 }
304
305 private void startCaptureOnCameraThread(
306 final int width, final int height, final int framerate, final CapturerObse rver frameObserver,
307 final Context applicationContext) {
308 synchronized (handlerLock) {
309 if (cameraThreadHandler == null) {
310 Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
311 return;
312 } else {
313 checkIsOnCameraThread();
314 }
315 }
316 if (camera != null) {
317 Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been starte d.");
318 return;
319 }
320 this.applicationContext = applicationContext;
321 this.frameObserver = frameObserver;
322 this.firstFrameReported = false;
323
324 try {
325 try {
326 synchronized (cameraIdLock) {
327 Logging.d(TAG, "Opening camera " + id);
328 if (eventsHandler != null) {
329 eventsHandler.onCameraOpening(id);
330 }
331 camera = android.hardware.Camera.open(id);
332 info = new android.hardware.Camera.CameraInfo();
333 android.hardware.Camera.getCameraInfo(id, info);
334 }
335 } catch (RuntimeException e) {
336 openCameraAttempts++;
337 if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
338 Logging.e(TAG, "Camera.open failed, retrying", e);
339 maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
340 @Override public void run() {
341 startCaptureOnCameraThread(width, height, framerate, frameObserver ,
342 applicationContext);
343 }
344 });
345 return;
346 }
347 throw e;
348 }
349
350 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
351
352 Logging.d(TAG, "Camera orientation: " + info.orientation +
353 " .Device orientation: " + getDeviceOrientation());
354 camera.setErrorCallback(cameraErrorCallback);
355 startPreviewOnCameraThread(width, height, framerate);
356 frameObserver.onCapturerStarted(true);
357 if (isCapturingToTexture) {
358 surfaceHelper.startListening(this);
359 }
360
361 // Start camera observer.
362 cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
363 } catch (IOException|RuntimeException e) {
364 Logging.e(TAG, "startCapture failed", e);
365 // Make sure the camera is released.
366 stopCaptureOnCameraThread(true /* stopHandler */);
367 frameObserver.onCapturerStarted(false);
368 if (eventsHandler != null) {
369 eventsHandler.onCameraError("Camera can not be started.");
370 }
371 }
372 }
373
374 // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
375 private void startPreviewOnCameraThread(int width, int height, int framerate) {
376 synchronized (handlerLock) {
377 if (cameraThreadHandler == null || camera == null) {
378 Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
379 return;
380 } else {
381 checkIsOnCameraThread();
382 }
383 }
384 Logging.d(
385 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + " @" + framerate);
386
387 requestedWidth = width;
388 requestedHeight = height;
389 requestedFramerate = framerate;
390
391 // Find closest supported format for |width| x |height| @ |framerate|.
392 final android.hardware.Camera.Parameters parameters = camera.getParameters() ;
393 final List<CaptureFormat.FramerateRange> supportedFramerates =
394 Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRan ge());
395 Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
396
397 final CaptureFormat.FramerateRange fpsRange =
398 CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFram erates, framerate);
399
400 final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
401 Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), w idth, height);
402
403 final CaptureFormat captureFormat =
404 new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
405
406 // Check if we are already using this capture format, then we don't need to do anything.
407 if (captureFormat.equals(this.captureFormat)) {
408 return;
409 }
410
411 // Update camera parameters.
412 Logging.d(TAG, "isVideoStabilizationSupported: " +
413 parameters.isVideoStabilizationSupported());
414 if (parameters.isVideoStabilizationSupported()) {
415 parameters.setVideoStabilization(true);
416 }
417 // Note: setRecordingHint(true) actually decrease frame rate on N5.
418 // parameters.setRecordingHint(true);
419 if (captureFormat.framerate.max > 0) {
420 parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.f ramerate.max);
421 }
422 parameters.setPreviewSize(previewSize.width, previewSize.height);
423
424 if (!isCapturingToTexture) {
425 parameters.setPreviewFormat(captureFormat.imageFormat);
426 }
427 // Picture size is for taking pictures and not for preview/video, but we nee d to set it anyway
428 // as a workaround for an aspect ratio problem on Nexus 7.
429 final Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
430 Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), w idth, height);
431 parameters.setPictureSize(pictureSize.width, pictureSize.height);
432
433 // Temporarily stop preview if it's already running.
434 if (this.captureFormat != null) {
435 camera.stopPreview();
436 // Calling |setPreviewCallbackWithBuffer| with null should clear the inter nal camera buffer
437 // queue, but sometimes we receive a frame with the old resolution after t his call anyway.
438 camera.setPreviewCallbackWithBuffer(null);
439 }
440
441 // (Re)start preview.
442 Logging.d(TAG, "Start capturing: " + captureFormat);
443 this.captureFormat = captureFormat;
444
445 List<String> focusModes = parameters.getSupportedFocusModes();
446 if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTIN UOUS_VIDEO)) {
447 parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONT INUOUS_VIDEO);
448 }
449
450 camera.setParameters(parameters);
451 // Calculate orientation manually and send it as CVO instead.
452 camera.setDisplayOrientation(0 /* degrees */);
453 if (!isCapturingToTexture) {
454 queuedBuffers.clear();
455 final int frameSize = captureFormat.frameSize();
456 for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
457 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
458 queuedBuffers.add(buffer.array());
459 camera.addCallbackBuffer(buffer.array());
460 }
461 camera.setPreviewCallbackWithBuffer(this);
462 }
463 camera.startPreview();
464 }
465
466 // Blocks until camera is known to be stopped.
467 @Override
468 public void stopCapture() throws InterruptedException {
469 Logging.d(TAG, "stopCapture");
470 final CountDownLatch barrier = new CountDownLatch(1);
471 final boolean didPost = maybePostOnCameraThread(new Runnable() {
472 @Override public void run() {
473 stopCaptureOnCameraThread(true /* stopHandler */);
474 barrier.countDown();
475 }
476 });
477 if (!didPost) {
478 Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
479 return;
480 }
481 if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
482 Logging.e(TAG, "Camera stop timeout");
483 printStackTrace();
484 if (eventsHandler != null) {
485 eventsHandler.onCameraError("Camera stop timeout");
486 }
487 }
488 Logging.d(TAG, "stopCapture done");
489 }
490
491 private void stopCaptureOnCameraThread(boolean stopHandler) {
492 synchronized (handlerLock) {
493 if (cameraThreadHandler == null) {
494 Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
495 } else {
496 checkIsOnCameraThread();
497 }
498 }
499 Logging.d(TAG, "stopCaptureOnCameraThread");
500 // Note that the camera might still not be started here if startCaptureOnCam eraThread failed
501 // and we posted a retry.
502
503 // Make sure onTextureFrameAvailable() is not called anymore.
504 if (surfaceHelper != null) {
505 surfaceHelper.stopListening();
506 }
507 if (stopHandler) {
508 synchronized (handlerLock) {
509 // Clear the cameraThreadHandler first, in case stopPreview or
510 // other driver code deadlocks. Deadlock in
511 // android.hardware.Camera._stopPreview(Native Method) has
512 // been observed on Nexus 5 (hammerhead), OS version LMY48I.
513 // The camera might post another one or two preview frames
514 // before stopped, so we have to check for a null
515 // cameraThreadHandler in our handler. Remove all pending
516 // Runnables posted from |this|.
517 if (cameraThreadHandler != null) {
518 cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
519 cameraThreadHandler = null;
520 }
521 surfaceHelper = null;
522 }
523 }
524 if (cameraStatistics != null) {
525 cameraStatistics.release();
526 cameraStatistics = null;
527 }
528 Logging.d(TAG, "Stop preview.");
529 if (camera != null) {
530 camera.stopPreview();
531 camera.setPreviewCallbackWithBuffer(null);
532 }
533 queuedBuffers.clear();
534 captureFormat = null;
535
536 Logging.d(TAG, "Release camera.");
537 if (camera != null) {
538 camera.release();
539 camera = null;
540 }
541 if (eventsHandler != null) {
542 eventsHandler.onCameraClosed();
543 }
544 Logging.d(TAG, "stopCaptureOnCameraThread done");
545 }
546
547 private void switchCameraOnCameraThread() {
548 synchronized (handlerLock) {
549 if (cameraThreadHandler == null) {
550 Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
551 return;
552 } else {
553 checkIsOnCameraThread();
554 }
555 }
556 Logging.d(TAG, "switchCameraOnCameraThread");
557 stopCaptureOnCameraThread(false /* stopHandler */);
558 synchronized (cameraIdLock) {
559 id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
560 }
561 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera te, frameObserver,
562 applicationContext);
563 Logging.d(TAG, "switchCameraOnCameraThread done");
564 }
565
566 private void onOutputFormatRequestOnCameraThread(int width, int height, int fr amerate) {
567 synchronized (handlerLock) {
568 if (cameraThreadHandler == null || camera == null) {
569 Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped") ;
570 return;
571 } else {
572 checkIsOnCameraThread();
573 }
574 }
575 Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + heigh t +
576 "@" + framerate);
577 frameObserver.onOutputFormatRequest(width, height, framerate);
578 }
579
580 private int getDeviceOrientation() {
581 int orientation = 0;
582
583 WindowManager wm = (WindowManager) applicationContext.getSystemService(
584 Context.WINDOW_SERVICE);
585 switch(wm.getDefaultDisplay().getRotation()) {
586 case Surface.ROTATION_90:
587 orientation = 90;
588 break;
589 case Surface.ROTATION_180:
590 orientation = 180;
591 break;
592 case Surface.ROTATION_270:
593 orientation = 270;
594 break;
595 case Surface.ROTATION_0:
596 default:
597 orientation = 0;
598 break;
599 }
600 return orientation;
601 }
602
603 private int getFrameOrientation() {
604 int rotation = getDeviceOrientation();
605 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
606 rotation = 360 - rotation;
607 }
608 return (info.orientation + rotation) % 360;
609 }
610
611 // Called on cameraThread so must not "synchronized".
612 @Override
613 public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera ) {
614 synchronized (handlerLock) {
615 if (cameraThreadHandler == null) {
616 Logging.e(TAG, "onPreviewFrame: Camera is stopped");
617 return;
618 } else {
619 checkIsOnCameraThread();
620 }
621 }
622 if (!queuedBuffers.contains(data)) {
623 // |data| is an old invalid buffer.
624 return;
625 }
626 if (camera != callbackCamera) {
627 throw new RuntimeException("Unexpected camera in callback!");
628 }
629
630 final long captureTimeNs =
631 TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
632
633 if (eventsHandler != null && !firstFrameReported) {
634 eventsHandler.onFirstFrameAvailable();
635 firstFrameReported = true;
636 }
637
638 cameraStatistics.addFrame();
639 frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFo rmat.height,
640 getFrameOrientation(), captureTimeNs);
641 camera.addCallbackBuffer(data);
642 }
643
644 @Override
645 public void onTextureFrameAvailable(
646 int oesTextureId, float[] transformMatrix, long timestampNs) {
647 synchronized (handlerLock) {
648 if (cameraThreadHandler == null) {
649 Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
650 surfaceHelper.returnTextureFrame();
651 return;
652 } else {
653 checkIsOnCameraThread();
654 }
655 }
656 if (eventsHandler != null && !firstFrameReported) {
657 eventsHandler.onFirstFrameAvailable();
658 firstFrameReported = true;
659 }
660
661 int rotation = getFrameOrientation();
662 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
663 // Undo the mirror that the OS "helps" us with.
664 // http://developer.android.com/reference/android/hardware/Camera.html#set DisplayOrientation(int)
665 transformMatrix =
666 RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizo ntalFlipMatrix());
667 }
668 cameraStatistics.addFrame();
669 frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.heig ht, oesTextureId,
670 transformMatrix, rotation, timestampNs);
671 }
672 }
OLDNEW
« no previous file with comments | « webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java ('k') | webrtc/api/java/android/org/webrtc/VideoRendererGui.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698