OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.webrtc; | 11 package org.webrtc; |
12 | 12 |
13 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; | 13 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; |
14 import org.webrtc.Metrics.Histogram; | 14 import org.webrtc.Metrics.Histogram; |
15 | 15 |
16 import android.annotation.TargetApi; | |
17 import android.content.Context; | 16 import android.content.Context; |
18 import android.graphics.SurfaceTexture; | |
19 import android.hardware.camera2.CameraAccessException; | |
20 import android.hardware.camera2.CameraCaptureSession; | |
21 import android.hardware.camera2.CameraCharacteristics; | |
22 import android.hardware.camera2.CameraDevice; | |
23 import android.hardware.camera2.CameraManager; | |
24 import android.hardware.camera2.CameraMetadata; | |
25 import android.hardware.camera2.CaptureFailure; | |
26 import android.hardware.camera2.CaptureRequest; | |
27 import android.os.Handler; | 17 import android.os.Handler; |
28 import android.util.Range; | 18 import android.os.SystemClock; |
29 import android.view.Surface; | 19 import android.view.Surface; |
30 import android.view.WindowManager; | 20 import android.view.WindowManager; |
31 | 21 |
32 import java.util.Arrays; | 22 import java.io.IOException; |
23 import java.nio.ByteBuffer; | |
33 import java.util.List; | 24 import java.util.List; |
34 import java.util.concurrent.CountDownLatch; | 25 import java.util.concurrent.CountDownLatch; |
35 import java.util.concurrent.TimeUnit; | 26 import java.util.concurrent.TimeUnit; |
36 | 27 |
37 @TargetApi(21) | 28 @SuppressWarnings("deprecation") |
38 public class Camera2Session implements CameraSession { | 29 public class Camera1Session implements CameraSession { |
39 private static final String TAG = "Camera2Session"; | 30 private static final String TAG = "Camera1Session"; |
31 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; | |
40 | 32 |
41 private static final Histogram camera2StartTimeMsHistogram = | 33 private static final Histogram camera1StartTimeMsHistogram = |
42 Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50) ; | 34 Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50) ; |
43 private static final Histogram camera2StopTimeMsHistogram = | 35 private static final Histogram camera1StopTimeMsHistogram = |
44 Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50); | 36 Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50); |
45 | 37 |
46 private static enum SessionState { RUNNING, STOPPED }; | 38 private static enum SessionState { RUNNING, STOPPED }; |
47 | 39 |
48 private final Handler cameraThreadHandler; | 40 private final Handler cameraThreadHandler; |
49 private final CreateSessionCallback callback; | |
50 private final Events events; | 41 private final Events events; |
42 private final boolean captureToTexture; | |
51 private final Context applicationContext; | 43 private final Context applicationContext; |
52 private final CameraManager cameraManager; | |
53 private final SurfaceTextureHelper surfaceTextureHelper; | 44 private final SurfaceTextureHelper surfaceTextureHelper; |
54 private final String cameraId; | 45 private final int cameraId; |
55 private final int width; | 46 private final int width; |
56 private final int height; | 47 private final int height; |
57 private final int framerate; | 48 private final int framerate; |
58 | 49 private final android.hardware.Camera camera; |
59 // Initialized at start | 50 private final android.hardware.Camera.CameraInfo info; |
60 private CameraCharacteristics cameraCharacteristics; | 51 private final CaptureFormat captureFormat; |
61 private int cameraOrientation; | |
62 private boolean isCameraFrontFacing; | |
63 private int fpsUnitFactor; | |
64 private CaptureFormat captureFormat; | |
65 | |
66 // Initialized when camera opens | |
67 private CameraDevice cameraDevice; | |
68 private Surface surface; | |
69 | |
70 // Initialized when capture session is created | |
71 private CameraCaptureSession captureSession; | |
72 | |
73 // State | |
74 private SessionState state = SessionState.RUNNING; | |
75 private boolean firstFrameReported = false; | |
76 | |
77 // Used only for stats. Only used on the camera thread. | 52 // Used only for stats. Only used on the camera thread. |
78 private final long constructionTimeNs; // Construction time of this class. | 53 private final long constructionTimeNs; // Construction time of this class. |
79 | 54 |
80 private class CameraStateCallback extends CameraDevice.StateCallback { | 55 private SessionState state; |
81 private String getErrorDescription(int errorCode) { | 56 private boolean firstFrameReported = false; |
82 switch (errorCode) { | 57 |
83 case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: | 58 public static void create( |
84 return "Camera device has encountered a fatal error."; | 59 final CreateSessionCallback callback, final Events events, |
85 case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: | 60 final boolean captureToTexture, final Context applicationContext, |
86 return "Camera device could not be opened due to a device policy."; | 61 final SurfaceTextureHelper surfaceTextureHelper, |
87 case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: | 62 final int cameraId, final int width, final int height, final int framerate ) { |
88 return "Camera device is in use already."; | 63 final long constructionTimeNs = System.nanoTime(); |
89 case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: | 64 Logging.d(TAG, "Open camera " + cameraId); |
90 return "Camera service has encountered a fatal error."; | 65 events.onCameraOpening(); |
91 case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: | 66 |
92 return "Camera device could not be opened because" | 67 final android.hardware.Camera camera; |
93 + " there are too many other open camera devices."; | 68 try { |
94 default: | 69 camera = android.hardware.Camera.open(cameraId); |
95 return "Unknown camera error: " + errorCode; | 70 } catch (RuntimeException e) { |
71 callback.onFailure(e.getMessage()); | |
72 return; | |
73 } | |
74 | |
75 try { | |
76 camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); | |
77 } catch (IOException e) { | |
78 camera.release(); | |
79 callback.onFailure(e.getMessage()); | |
80 return; | |
81 } | |
82 | |
83 final android.hardware.Camera.CameraInfo info = new android.hardware.Camera. CameraInfo(); | |
84 android.hardware.Camera.getCameraInfo(cameraId, info); | |
85 | |
86 final android.hardware.Camera.Parameters parameters = camera.getParameters() ; | |
87 final CaptureFormat captureFormat = findClosestCaptureFormat( | |
88 parameters, width, height, framerate); | |
89 final Size pictureSize = findClosestPictureSize(parameters, width, height); | |
90 | |
91 updateCameraParameters(camera, parameters, captureFormat, captureToTexture); | |
92 | |
93 // Initialize the capture buffers. | |
94 if (!captureToTexture) { | |
95 final int frameSize = captureFormat.frameSize(); | |
96 for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { | |
97 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); | |
98 camera.addCallbackBuffer(buffer.array()); | |
96 } | 99 } |
97 } | 100 } |
98 | 101 |
99 @Override | 102 // Calculate orientation manually and send it as CVO insted. |
100 public void onDisconnected(CameraDevice camera) { | 103 camera.setDisplayOrientation(0 /* degrees */); |
101 checkIsOnCameraThread(); | 104 |
102 reportError("Camera disconnected."); | 105 callback.onDone(new Camera1Session( |
106 events, captureToTexture, applicationContext, surfaceTextureHelper, | |
107 cameraId, width, height, framerate, | |
108 camera, info, captureFormat, constructionTimeNs)); | |
109 } | |
110 | |
111 private static void updateCameraParameters(android.hardware.Camera camera, | |
112 android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat , | |
113 boolean captureToTexture) { | |
114 final List<String> focusModes = parameters.getSupportedFocusModes(); | |
115 | |
116 parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.fra merate.max); | |
117 parameters.setPreviewSize(captureFormat.width, captureFormat.height); | |
118 if (!captureToTexture) { | |
119 parameters.setPreviewFormat(captureFormat.imageFormat); | |
103 } | 120 } |
104 | 121 |
105 @Override | 122 if (parameters.isVideoStabilizationSupported()) { |
106 public void onError(CameraDevice camera, int errorCode) { | 123 parameters.setVideoStabilization(true); |
107 checkIsOnCameraThread(); | |
108 reportError(getErrorDescription(errorCode)); | |
109 } | 124 } |
110 | 125 if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTIN UOUS_VIDEO)) { |
111 @Override | 126 parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONT INUOUS_VIDEO); |
112 public void onOpened(CameraDevice camera) { | |
113 checkIsOnCameraThread(); | |
114 | |
115 Logging.d(TAG, "Camera opened."); | |
116 cameraDevice = camera; | |
117 | |
118 final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTextu re(); | |
119 surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.hei ght); | |
120 surface = new Surface(surfaceTexture); | |
121 try { | |
122 camera.createCaptureSession( | |
123 Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHa ndler); | |
124 } catch (CameraAccessException e) { | |
125 reportError("Failed to create capture session. " + e); | |
126 return; | |
127 } | |
128 } | 127 } |
129 | 128 camera.setParameters(parameters); |
130 @Override | |
131 public void onClosed(CameraDevice camera) { | |
132 checkIsOnCameraThread(); | |
133 | |
134 Logging.d(TAG, "Camera device closed."); | |
135 events.onCameraClosed(Camera2Session.this); | |
136 } | |
137 } | 129 } |
138 | 130 |
139 private class CaptureSessionCallback extends CameraCaptureSession.StateCallbac k { | 131 private static CaptureFormat findClosestCaptureFormat( |
140 @Override | 132 android.hardware.Camera.Parameters parameters, int width, int height, int framerate) { |
141 public void onConfigureFailed(CameraCaptureSession session) { | 133 // Find closest supported format for |width| x |height| @ |framerate|. |
142 checkIsOnCameraThread(); | 134 final List<CaptureFormat.FramerateRange> supportedFramerates = |
143 session.close(); | 135 Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRan ge()); |
144 reportError("Failed to configure capture session."); | 136 Logging.d(TAG, "Available fps ranges: " + supportedFramerates); |
145 } | |
146 | 137 |
147 @Override | 138 final CaptureFormat.FramerateRange fpsRange = |
148 public void onConfigured(CameraCaptureSession session) { | 139 CameraEnumerationAndroid.getClosestSupportedFramerateRange( |
149 checkIsOnCameraThread(); | 140 supportedFramerates, framerate); |
150 Logging.d(TAG, "Camera capture session configured."); | |
151 captureSession = session; | |
152 try { | |
153 /* | |
154 * The viable options for video capture requests are: | |
155 * TEMPLATE_PREVIEW: High frame rate is given priority over the highest- quality | |
156 * post-processing. | |
157 * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is se t for recording | |
158 * quality. | |
159 */ | |
160 final CaptureRequest.Builder captureRequestBuilder = | |
161 cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); | |
162 // Set auto exposure fps range. | |
163 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, ne w Range<Integer>( | |
164 captureFormat.framerate.min / fpsUnitFactor, | |
165 captureFormat.framerate.max / fpsUnitFactor)); | |
166 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, | |
167 CaptureRequest.CONTROL_AE_MODE_ON); | |
168 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); | |
169 | 141 |
170 captureRequestBuilder.addTarget(surface); | 142 final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( |
171 session.setRepeatingRequest( | 143 Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), |
172 captureRequestBuilder.build(), new CameraCaptureCallback(), cameraTh readHandler); | 144 width, height); |
173 } catch (CameraAccessException e) { | |
174 reportError("Failed to start capture request. " + e); | |
175 return; | |
176 } | |
177 | 145 |
178 surfaceTextureHelper.startListening( | 146 return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); |
179 new SurfaceTextureHelper.OnTextureFrameAvailableListener() { | |
180 @Override | |
181 public void onTextureFrameAvailable( | |
182 int oesTextureId, float[] transformMatrix, long timestampNs) { | |
183 checkIsOnCameraThread(); | |
184 | |
185 if (state != SessionState.RUNNING) { | |
186 Logging.d(TAG, "Texture frame captured but camera is no longer r unning."); | |
187 surfaceTextureHelper.returnTextureFrame(); | |
188 return; | |
189 } | |
190 | |
191 if (!firstFrameReported) { | |
192 firstFrameReported = true; | |
193 final int startTimeMs = | |
194 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - cons tructionTimeNs); | |
195 camera2StartTimeMsHistogram.addSample(startTimeMs); | |
196 } | |
197 | |
198 int rotation = getFrameOrientation(); | |
199 if (isCameraFrontFacing) { | |
200 // Undo the mirror that the OS "helps" us with. | |
201 // http://developer.android.com/reference/android/hardware/Camer a.html#setDisplayOrientation(int) | |
202 transformMatrix = RendererCommon.multiplyMatrices( | |
203 transformMatrix, RendererCommon.horizontalFlipMatrix()); | |
204 } | |
205 | |
206 // Undo camera orientation - we report it as rotation instead. | |
207 transformMatrix = RendererCommon.rotateTextureMatrix( | |
208 transformMatrix, -cameraOrientation); | |
209 | |
210 events.onTextureFrameCaptured(Camera2Session.this, captureFormat.w idth, | |
211 captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs); | |
212 } | |
213 }); | |
214 Logging.d(TAG, "Camera device successfully started."); | |
215 callback.onDone(Camera2Session.this); | |
216 } | |
217 } | 147 } |
218 | 148 |
219 private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallba ck { | 149 private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters, |
220 @Override | 150 int width, int height) { |
221 public void onCaptureFailed( | 151 return CameraEnumerationAndroid.getClosestSupportedSize( |
222 CameraCaptureSession session, CaptureRequest request, CaptureFailure fai lure) { | 152 Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), |
223 Logging.d(TAG, "Capture failed: " + failure); | 153 width, height); |
224 } | |
225 } | 154 } |
226 | 155 |
227 public static void create( | 156 private Camera1Session( |
228 CreateSessionCallback callback, Events events, | 157 Events events, boolean captureToTexture, |
229 Context applicationContext, CameraManager cameraManager, | 158 Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, |
230 SurfaceTextureHelper surfaceTextureHelper, | 159 int cameraId, int width, int height, int framerate, |
231 String cameraId, int width, int height, int framerate) { | 160 android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, |
232 new Camera2Session( | 161 CaptureFormat captureFormat, long constructionTimeNs) { |
233 callback, events, | 162 Logging.d(TAG, "Create new camera1 session on camera " + cameraId); |
234 applicationContext, cameraManager, | |
235 surfaceTextureHelper, | |
236 cameraId, width, height, framerate); | |
237 } | |
238 | |
239 private Camera2Session( | |
240 CreateSessionCallback callback, Events events, | |
241 Context applicationContext, CameraManager cameraManager, | |
242 SurfaceTextureHelper surfaceTextureHelper, | |
243 String cameraId, int width, int height, int framerate) { | |
244 Logging.d(TAG, "Create new camera2 session on camera " + cameraId); | |
245 | |
246 constructionTimeNs = System.nanoTime(); | |
247 | 163 |
248 this.cameraThreadHandler = new Handler(); | 164 this.cameraThreadHandler = new Handler(); |
249 this.callback = callback; | |
250 this.events = events; | 165 this.events = events; |
166 this.captureToTexture = captureToTexture; | |
251 this.applicationContext = applicationContext; | 167 this.applicationContext = applicationContext; |
252 this.cameraManager = cameraManager; | |
253 this.surfaceTextureHelper = surfaceTextureHelper; | 168 this.surfaceTextureHelper = surfaceTextureHelper; |
254 this.cameraId = cameraId; | 169 this.cameraId = cameraId; |
255 this.width = width; | 170 this.width = width; |
256 this.height = height; | 171 this.height = height; |
257 this.framerate = framerate; | 172 this.framerate = framerate; |
173 this.camera = camera; | |
174 this.info = info; | |
175 this.captureFormat = captureFormat; | |
176 this.constructionTimeNs = constructionTimeNs; | |
258 | 177 |
259 start(); | 178 startCapturing(); |
260 } | |
261 | |
262 private void start() { | |
263 checkIsOnCameraThread(); | |
264 Logging.d(TAG, "start"); | |
265 | |
266 try { | |
267 cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); | |
268 } catch (final CameraAccessException e) { | |
269 reportError("getCameraCharacteristics(): " + e.getMessage()); | |
270 } | |
271 cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_O RIENTATION); | |
272 isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_F ACING) | |
273 == CameraMetadata.LENS_FACING_FRONT; | |
274 | |
275 findCaptureFormat(); | |
276 openCamera(); | |
277 } | |
278 | |
279 private void findCaptureFormat() { | |
280 checkIsOnCameraThread(); | |
281 | |
282 Range<Integer>[] fpsRanges = | |
283 cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TAR GET_FPS_RANGES); | |
284 fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); | |
285 List<CaptureFormat.FramerateRange> framerateRanges = | |
286 Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); | |
287 List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics ); | |
288 | |
289 if (framerateRanges.isEmpty() || sizes.isEmpty()) { | |
290 reportError("No supported capture formats."); | |
291 } | |
292 | |
293 final CaptureFormat.FramerateRange bestFpsRange = | |
294 CameraEnumerationAndroid.getClosestSupportedFramerateRange( | |
295 framerateRanges, framerate); | |
296 | |
297 final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize( | |
298 sizes, width, height); | |
299 | |
300 captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRa nge); | |
301 Logging.d(TAG, "Using capture format: " + captureFormat); | |
302 } | |
303 | |
304 private void openCamera() { | |
305 checkIsOnCameraThread(); | |
306 | |
307 Logging.d(TAG, "Opening camera " + cameraId); | |
308 events.onCameraOpening(); | |
309 | |
310 try { | |
311 cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThread Handler); | |
312 } catch (CameraAccessException e) { | |
313 reportError("Failed to open camera: " + e); | |
314 } | |
315 } | 179 } |
316 | 180 |
317 @Override | 181 @Override |
318 public void stop() { | 182 public void stop() { |
319 final long stopStartTime = System.nanoTime(); | 183 final long stopStartTime = System.nanoTime(); |
320 Logging.d(TAG, "Stop camera2 session on camera " + cameraId); | 184 Logging.d(TAG, "Stop camera1 session on camera " + cameraId); |
321 if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) { | 185 if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) { |
322 if (state != SessionState.STOPPED) { | 186 if (state != SessionState.STOPPED) { |
323 state = SessionState.STOPPED; | 187 state = SessionState.STOPPED; |
324 // Post the stopInternal to return earlier. | 188 // Post the stopInternal to return earlier. |
325 cameraThreadHandler.post(new Runnable() { | 189 cameraThreadHandler.post(new Runnable() { |
326 @Override | 190 @Override |
327 public void run() { | 191 public void run() { |
328 stopInternal(); | 192 stopInternal(); |
329 final int stopTimeMs = | 193 final int stopTimeMs = |
330 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); | 194 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); |
331 camera2StopTimeMsHistogram.addSample(stopTimeMs); | 195 camera1StopTimeMsHistogram.addSample(stopTimeMs); |
332 } | 196 } |
333 }); | 197 }); |
334 } | 198 } |
335 } else { | 199 } else { |
336 final CountDownLatch stopLatch = new CountDownLatch(1); | 200 final CountDownLatch stopLatch = new CountDownLatch(1); |
337 | 201 |
338 cameraThreadHandler.post(new Runnable() { | 202 cameraThreadHandler.post(new Runnable() { |
339 @Override | 203 @Override |
340 public void run() { | 204 public void run() { |
341 if (state != SessionState.STOPPED) { | 205 if (state != SessionState.STOPPED) { |
342 state = SessionState.STOPPED; | 206 state = SessionState.STOPPED; |
343 stopLatch.countDown(); | 207 stopLatch.countDown(); |
344 stopInternal(); | 208 stopInternal(); |
345 final int stopTimeMs = | 209 final int stopTimeMs = |
346 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); | 210 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStar tTime); |
347 camera2StopTimeMsHistogram.addSample(stopTimeMs); | 211 camera1StopTimeMsHistogram.addSample(stopTimeMs); |
348 } | 212 } |
349 } | 213 } |
350 }); | 214 }); |
351 | 215 |
352 ThreadUtils.awaitUninterruptibly(stopLatch); | 216 ThreadUtils.awaitUninterruptibly(stopLatch); |
353 } | 217 } |
354 } | 218 } |
355 | 219 |
220 private void startCapturing() { | |
221 Logging.d(TAG, "Start capturing"); | |
222 checkIsOnCameraThread(); | |
223 | |
224 state = SessionState.RUNNING; | |
225 | |
226 camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() { | |
227 @Override | |
228 public void onError(int error, android.hardware.Camera camera) { | |
229 String errorMessage; | |
230 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { | |
231 errorMessage = "Camera server died!"; | |
232 } else { | |
233 errorMessage = "Camera error: " + error; | |
234 } | |
235 Logging.e(TAG, errorMessage); | |
236 events.onCameraError(Camera1Session.this, errorMessage); | |
magjed_webrtc
2016/09/19 14:32:16
Can we stop before notifying the callback, i.e. mo
sakal
2016/09/20 07:11:59
Done.
| |
237 state = SessionState.STOPPED; | |
238 stopInternal(); | |
239 } | |
240 }); | |
241 | |
242 if (captureToTexture) { | |
243 listenForTextureFrames(); | |
244 } else { | |
245 listenForBytebufferFrames(); | |
246 } | |
247 try { | |
248 camera.startPreview(); | |
249 } catch (RuntimeException e) { | |
250 events.onCameraError(this, e.getMessage()); | |
251 state = SessionState.STOPPED; | |
252 stopInternal(); | |
253 } | |
254 } | |
255 | |
356 private void stopInternal() { | 256 private void stopInternal() { |
357 Logging.d(TAG, "Stop internal"); | 257 Logging.d(TAG, "Stop internal"); |
358 checkIsOnCameraThread(); | 258 checkIsOnCameraThread(); |
359 | 259 |
360 surfaceTextureHelper.stopListening(); | 260 surfaceTextureHelper.stopListening(); |
361 | 261 |
362 if (captureSession != null) { | 262 // Note: stopPreview or other driver code might deadlock. Deadlock in |
363 captureSession.close(); | 263 // android.hardware.Camera._stopPreview(Native Method) has been observed on |
364 captureSession = null; | 264 // Nexus 5 (hammerhead), OS version LMY48I. |
365 } | 265 camera.stopPreview(); |
366 if (surface != null) { | 266 camera.release(); |
367 surface.release(); | 267 events.onCameraClosed(this); |
368 surface = null; | |
369 } | |
370 if (cameraDevice != null) { | |
371 cameraDevice.close(); | |
372 cameraDevice = null; | |
373 } | |
374 | 268 |
375 Logging.d(TAG, "Stop done"); | 269 Logging.d(TAG, "Stop done"); |
376 } | 270 } |
377 | 271 |
378 private void reportError(String error) { | 272 private void listenForTextureFrames() { |
379 checkIsOnCameraThread(); | 273 surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameA vailableListener() { |
380 Logging.e(TAG, "Error: " + error); | 274 @Override |
275 public void onTextureFrameAvailable( | |
276 int oesTextureId, float[] transformMatrix, long timestampNs) { | |
277 checkIsOnCameraThread(); | |
381 | 278 |
382 final boolean startFailure = (captureSession == null); | 279 if (state != SessionState.RUNNING) { |
383 state = SessionState.STOPPED; | 280 Logging.d(TAG, "Texture frame captured but camera is no longer running ."); |
384 stopInternal(); | 281 surfaceTextureHelper.returnTextureFrame(); |
385 if (startFailure) { | 282 return; |
386 callback.onFailure(error); | 283 } |
387 } else { | 284 |
388 events.onCameraError(this, error); | 285 if (!firstFrameReported) { |
389 } | 286 final int startTimeMs = |
287 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructi onTimeNs); | |
288 camera1StartTimeMsHistogram.addSample(startTimeMs); | |
289 firstFrameReported = true; | |
290 } | |
291 | |
292 int rotation = getFrameOrientation(); | |
293 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRON T) { | |
294 // Undo the mirror that the OS "helps" us with. | |
295 // http://developer.android.com/reference/android/hardware/Camera.html #setDisplayOrientation(int) | |
296 transformMatrix = RendererCommon.multiplyMatrices( | |
297 transformMatrix, RendererCommon.horizontalFlipMatrix()); | |
298 } | |
299 events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width, | |
300 captureFormat.height, oesTextureId, transformMatrix, rotation, times tampNs); | |
301 } | |
302 }); | |
303 } | |
304 | |
305 private void listenForBytebufferFrames() { | |
306 camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallb ack() { | |
307 @Override | |
308 public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCa mera) { | |
309 checkIsOnCameraThread(); | |
310 | |
311 if (callbackCamera != camera) { | |
312 Logging.e(TAG, "Callback from a different camera. This should never ha ppen."); | |
313 return; | |
314 } | |
315 | |
316 if (state != SessionState.RUNNING) { | |
317 Logging.d(TAG, "Bytebuffer frame captured but camera is no longer runn ing."); | |
318 return; | |
319 } | |
320 | |
321 final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.ela psedRealtime()); | |
322 | |
323 if (!firstFrameReported) { | |
324 final int startTimeMs = | |
325 (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructi onTimeNs); | |
326 camera1StartTimeMsHistogram.addSample(startTimeMs); | |
327 firstFrameReported = true; | |
328 } | |
329 | |
330 events.onByteBufferFrameCaptured(Camera1Session.this, data, captureForma t.width, | |
331 captureFormat.height, getFrameOrientation(), captureTimeNs); | |
332 camera.addCallbackBuffer(data); | |
333 } | |
334 }); | |
390 } | 335 } |
391 | 336 |
392 private int getDeviceOrientation() { | 337 private int getDeviceOrientation() { |
393 int orientation = 0; | 338 int orientation = 0; |
394 | 339 |
395 WindowManager wm = (WindowManager) applicationContext.getSystemService( | 340 WindowManager wm = (WindowManager) applicationContext.getSystemService( |
396 Context.WINDOW_SERVICE); | 341 Context.WINDOW_SERVICE); |
397 switch(wm.getDefaultDisplay().getRotation()) { | 342 switch(wm.getDefaultDisplay().getRotation()) { |
398 case Surface.ROTATION_90: | 343 case Surface.ROTATION_90: |
399 orientation = 90; | 344 orientation = 90; |
400 break; | 345 break; |
401 case Surface.ROTATION_180: | 346 case Surface.ROTATION_180: |
402 orientation = 180; | 347 orientation = 180; |
403 break; | 348 break; |
404 case Surface.ROTATION_270: | 349 case Surface.ROTATION_270: |
405 orientation = 270; | 350 orientation = 270; |
406 break; | 351 break; |
407 case Surface.ROTATION_0: | 352 case Surface.ROTATION_0: |
408 default: | 353 default: |
409 orientation = 0; | 354 orientation = 0; |
410 break; | 355 break; |
411 } | 356 } |
412 return orientation; | 357 return orientation; |
413 } | 358 } |
414 | 359 |
415 private int getFrameOrientation() { | 360 private int getFrameOrientation() { |
416 int rotation = getDeviceOrientation(); | 361 int rotation = getDeviceOrientation(); |
417 if (!isCameraFrontFacing) { | 362 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { |
418 rotation = 360 - rotation; | 363 rotation = 360 - rotation; |
419 } | 364 } |
420 return (cameraOrientation + rotation) % 360; | 365 return (info.orientation + rotation) % 360; |
421 } | 366 } |
422 | 367 |
423 private void checkIsOnCameraThread() { | 368 private void checkIsOnCameraThread() { |
424 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { | 369 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { |
425 throw new IllegalStateException("Wrong thread"); | 370 throw new IllegalStateException("Wrong thread"); |
426 } | 371 } |
427 } | 372 } |
428 } | 373 } |
OLD | NEW |