OLD | NEW |
---|---|
(Empty) | |
1 /* | |
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 package org.webrtc; | |
12 | |
13 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; | |
14 | |
15 import android.content.Context; | |
16 import android.os.Handler; | |
17 import android.os.SystemClock; | |
18 import android.view.Surface; | |
19 import android.view.WindowManager; | |
20 | |
21 import java.io.IOException; | |
22 import java.nio.ByteBuffer; | |
23 import java.util.HashSet; | |
24 import java.util.List; | |
25 import java.util.Set; | |
26 import java.util.concurrent.CountDownLatch; | |
27 import java.util.concurrent.TimeUnit; | |
28 | |
29 @SuppressWarnings("deprecation") | |
30 public class Camera1Session implements CameraSession { | |
31 private static final String TAG = "Camera1Session"; | |
32 private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; | |
33 | |
34 private static enum SessionState { RUNNING, STOPPED }; | |
35 | |
36 private final Handler cameraThreadHandler; | |
37 private final CameraVideoCapturer.CameraEventsHandler eventsHandler; | |
38 private final boolean captureToTexture; | |
39 private final Context applicationContext; | |
40 private final CameraVideoCapturer.CapturerObserver capturerObserver; | |
41 private final SurfaceTextureHelper surfaceTextureHelper; | |
42 private final int cameraId; | |
43 private final int width; | |
44 private final int height; | |
45 private final int framerate; | |
46 private final android.hardware.Camera camera; | |
47 private final android.hardware.Camera.CameraInfo info; | |
48 private final CaptureFormat captureFormat; | |
49 private final CameraVideoCapturer.CameraStatistics cameraStatistics; | |
50 | |
51 private SessionState state; | |
52 private boolean firstFrameReported = false; | |
53 | |
54 public static void create( | |
55 final CreateSessionCallback callback, | |
56 final CameraVideoCapturer.CameraEventsHandler eventsHandler, final boolean captureToTexture, | |
57 final Context applicationContext, final CameraVideoCapturer.CapturerObserv er capturerObserver, | |
58 final SurfaceTextureHelper surfaceTextureHelper, | |
59 final int cameraId, final int width, final int height, final int framerate ) { | |
60 Logging.d(TAG, "Open camera " + cameraId); | |
61 eventsHandler.onCameraOpening(cameraId); | |
62 | |
63 final android.hardware.Camera camera; | |
64 try { | |
65 camera = android.hardware.Camera.open(cameraId); | |
66 } catch (RuntimeException e) { | |
67 callback.onFailure(e.getMessage()); | |
68 capturerObserver.onCapturerStarted(false); | |
magjed_webrtc
2016/08/11 12:57:13
I guess this call could be moved into CameraCaptur
sakal
2016/08/15 09:11:58
Yes, it is very arbitrary. The reason I would like
| |
69 return; | |
70 } | |
71 | |
72 try { | |
73 camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture()); | |
74 } catch (IOException e) { | |
75 camera.release(); | |
76 callback.onFailure(e.getMessage()); | |
77 capturerObserver.onCapturerStarted(false); | |
78 return; | |
79 } | |
80 | |
81 final android.hardware.Camera.CameraInfo info = new android.hardware.Camera. CameraInfo(); | |
82 android.hardware.Camera.getCameraInfo(cameraId, info); | |
83 | |
84 camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() { | |
85 @Override | |
86 public void onError(int error, android.hardware.Camera camera) { | |
87 String errorMessage; | |
88 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { | |
89 errorMessage = "Camera server died!"; | |
90 } else { | |
91 errorMessage = "Camera error: " + error; | |
92 } | |
93 Logging.e(TAG, errorMessage); | |
94 eventsHandler.onCameraError(errorMessage); | |
95 } | |
96 }); | |
97 | |
98 final android.hardware.Camera.Parameters parameters = camera.getParameters() ; | |
99 final CaptureFormat captureFormat = findClosestCaptureFormat( | |
100 parameters, width, height, framerate); | |
101 final Size pictureSize = findClosestPictureSize(parameters, width, height); | |
102 | |
103 updateCameraParameters(camera, parameters, captureFormat, captureToTexture); | |
104 | |
105 // Initialize the capture buffers. | |
106 if (!captureToTexture) { | |
107 final int frameSize = captureFormat.frameSize(); | |
108 for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { | |
109 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); | |
110 camera.addCallbackBuffer(buffer.array()); | |
111 } | |
112 } | |
113 | |
114 // Calculate orientation manually and send it as CVO insted. | |
115 camera.setDisplayOrientation(0 /* degrees */); | |
116 | |
117 callback.onDone(new Camera1Session( | |
118 eventsHandler, captureToTexture, applicationContext, | |
119 capturerObserver, surfaceTextureHelper, cameraId, width, height, framera te, | |
120 camera, info, captureFormat)); | |
121 } | |
122 | |
123 private static void updateCameraParameters(android.hardware.Camera camera, | |
124 android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat , | |
125 boolean captureToTexture) { | |
126 final List<String> focusModes = parameters.getSupportedFocusModes(); | |
127 | |
128 parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.fra merate.max); | |
129 parameters.setPreviewSize(captureFormat.width, captureFormat.height); | |
130 if (!captureToTexture) { | |
131 parameters.setPreviewFormat(captureFormat.imageFormat); | |
132 } | |
133 | |
134 if (parameters.isVideoStabilizationSupported()) { | |
135 parameters.setVideoStabilization(true); | |
136 } | |
137 if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTIN UOUS_VIDEO)) { | |
138 parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONT INUOUS_VIDEO); | |
139 } | |
140 camera.setParameters(parameters); | |
141 } | |
142 | |
143 private static CaptureFormat findClosestCaptureFormat( | |
144 android.hardware.Camera.Parameters parameters, int width, int height, int framerate) { | |
145 // Find closest supported format for |width| x |height| @ |framerate|. | |
146 final List<CaptureFormat.FramerateRange> supportedFramerates = | |
147 Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRan ge()); | |
148 Logging.d(TAG, "Available fps ranges: " + supportedFramerates); | |
149 | |
150 final CaptureFormat.FramerateRange fpsRange = | |
151 CameraEnumerationAndroid.getClosestSupportedFramerateRange( | |
152 supportedFramerates, framerate); | |
153 | |
154 final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( | |
155 Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), | |
156 width, height); | |
157 | |
158 return new CaptureFormat(previewSize.width, previewSize.height, fpsRange); | |
159 } | |
160 | |
161 private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters, | |
162 int width, int height) { | |
163 return CameraEnumerationAndroid.getClosestSupportedSize( | |
164 Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), | |
165 width, height); | |
166 } | |
167 | |
168 private Camera1Session( | |
169 CameraVideoCapturer.CameraEventsHandler eventsHandler, boolean captureToTe xture, | |
170 Context applicationContext, CameraVideoCapturer.CapturerObserver capturerO bserver, | |
171 SurfaceTextureHelper surfaceTextureHelper, | |
172 int cameraId, int width, int height, int framerate, | |
173 android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, | |
174 CaptureFormat captureFormat) { | |
175 Logging.d(TAG, "Create new camera1 session on camera " + cameraId); | |
176 | |
177 this.cameraThreadHandler = new Handler(); | |
178 this.eventsHandler = eventsHandler; | |
179 this.captureToTexture = captureToTexture; | |
180 this.applicationContext = applicationContext; | |
181 this.capturerObserver = capturerObserver; | |
182 this.surfaceTextureHelper = surfaceTextureHelper; | |
183 this.cameraId = cameraId; | |
184 this.width = width; | |
185 this.height = height; | |
186 this.framerate = framerate; | |
187 this.camera = camera; | |
188 this.info = info; | |
189 this.captureFormat = captureFormat; | |
190 cameraStatistics = new CameraVideoCapturer.CameraStatistics( | |
191 surfaceTextureHelper, eventsHandler); | |
192 | |
193 startCapturing(); | |
194 } | |
195 | |
196 @Override | |
197 public void stop() { | |
198 Logging.d(TAG, "Stop camera1 session on camera " + cameraId); | |
199 final CountDownLatch stopLatch = new CountDownLatch(1); | |
200 | |
201 cameraThreadHandler.post(new Runnable() { | |
202 @Override | |
203 public void run() { | |
204 if (state != SessionState.STOPPED) { | |
205 state = SessionState.STOPPED; | |
206 capturerObserver.onCapturerStopped(); | |
207 stopLatch.countDown(); | |
208 stopInternal(); | |
209 } | |
210 } | |
211 }); | |
212 | |
213 ThreadUtils.awaitUninterruptibly(stopLatch); | |
214 } | |
215 | |
216 private void startCapturing() { | |
217 Logging.d(TAG, "Start capturing"); | |
218 checkIsOnCameraThread(); | |
219 | |
220 state = SessionState.RUNNING; | |
221 capturerObserver.onCapturerStarted(true); | |
222 | |
223 if (captureToTexture) { | |
224 listenForTextureFrames(); | |
225 } else { | |
226 listenForBytebufferFrames(); | |
227 } | |
228 try { | |
229 camera.startPreview(); | |
230 } catch (RuntimeException e) { | |
231 eventsHandler.onCameraError(e.getMessage()); | |
232 state = SessionState.STOPPED; | |
233 stopInternal(); | |
234 } | |
235 } | |
236 | |
237 private void stopInternal() { | |
238 Logging.d(TAG, "Stop internal"); | |
239 checkIsOnCameraThread(); | |
240 | |
241 surfaceTextureHelper.stopListening(); | |
242 cameraStatistics.release(); | |
243 | |
244 // Note: stopPreview or other driver code might deadlock. Deadlock in | |
magjed_webrtc
2016/08/11 12:57:13
If the app calls startCapture and stopCapture and
sakal
2016/08/15 09:11:57
I'd rather handle this issue in a separate CL.
| |
245 // android.hardware.Camera._stopPreview(Native Method) has been observed on | |
246 // Nexus 5 (hammerhead), OS version LMY48I. | |
247 camera.stopPreview(); | |
248 camera.release(); | |
249 eventsHandler.onCameraClosed(); | |
250 | |
251 Logging.d(TAG, "Stop done"); | |
252 } | |
253 | |
254 private void listenForTextureFrames() { | |
255 surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameA vailableListener() { | |
256 @Override | |
257 public void onTextureFrameAvailable( | |
258 int oesTextureId, float[] transformMatrix, long timestampNs) { | |
259 checkIsOnCameraThread(); | |
260 | |
261 if (state != SessionState.RUNNING) { | |
262 Logging.d(TAG, "Texture frame captured but camera is no longer running ."); | |
263 return; | |
264 } | |
265 | |
266 if (!firstFrameReported) { | |
267 eventsHandler.onFirstFrameAvailable(); | |
268 firstFrameReported = true; | |
269 } | |
270 | |
271 int rotation = getFrameOrientation(); | |
272 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRON T) { | |
273 // Undo the mirror that the OS "helps" us with. | |
274 // http://developer.android.com/reference/android/hardware/Camera.html #setDisplayOrientation(int) | |
275 transformMatrix = RendererCommon.multiplyMatrices( | |
276 transformMatrix, RendererCommon.horizontalFlipMatrix()); | |
277 } | |
278 cameraStatistics.addFrame(); | |
279 capturerObserver.onTextureFrameCaptured(captureFormat.width, captureForm at.height, | |
280 oesTextureId, transformMatrix, rotation, timestampNs); | |
281 } | |
282 }); | |
283 } | |
284 | |
285 private void listenForBytebufferFrames() { | |
286 camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallb ack() { | |
287 @Override | |
288 public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCa mera) { | |
289 checkIsOnCameraThread(); | |
290 | |
291 if (callbackCamera != camera) { | |
292 Logging.e(TAG, "Callback from a different camera. This should never ha ppen."); | |
293 return; | |
294 } | |
295 | |
296 if (state != SessionState.RUNNING) { | |
297 Logging.d(TAG, "Bytebuffer frame captured but camera is no longer runn ing."); | |
298 return; | |
299 } | |
300 | |
301 final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.ela psedRealtime()); | |
302 | |
303 if (!firstFrameReported) { | |
304 eventsHandler.onFirstFrameAvailable(); | |
305 firstFrameReported = true; | |
306 } | |
307 | |
308 cameraStatistics.addFrame(); | |
309 capturerObserver.onByteBufferFrameCaptured(data, captureFormat.width, ca ptureFormat.height, | |
310 getFrameOrientation(), captureTimeNs); | |
311 camera.addCallbackBuffer(data); | |
312 } | |
313 }); | |
314 } | |
315 | |
316 private int getDeviceOrientation() { | |
317 int orientation = 0; | |
318 | |
319 WindowManager wm = (WindowManager) applicationContext.getSystemService( | |
320 Context.WINDOW_SERVICE); | |
321 switch(wm.getDefaultDisplay().getRotation()) { | |
322 case Surface.ROTATION_90: | |
323 orientation = 90; | |
324 break; | |
325 case Surface.ROTATION_180: | |
326 orientation = 180; | |
327 break; | |
328 case Surface.ROTATION_270: | |
329 orientation = 270; | |
330 break; | |
331 case Surface.ROTATION_0: | |
332 default: | |
333 orientation = 0; | |
334 break; | |
335 } | |
336 return orientation; | |
337 } | |
338 | |
339 private int getFrameOrientation() { | |
340 int rotation = getDeviceOrientation(); | |
341 if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { | |
342 rotation = 360 - rotation; | |
343 } | |
344 return (info.orientation + rotation) % 360; | |
345 } | |
346 | |
347 private void checkIsOnCameraThread() { | |
348 if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { | |
349 throw new IllegalStateException("Wrong thread"); | |
350 } | |
351 } | |
352 } | |
OLD | NEW |