Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java

Issue 1343163003: Partial revert of r9936. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 18 matching lines...) Expand all
29 29
30 import android.content.Context; 30 import android.content.Context;
31 import android.graphics.SurfaceTexture; 31 import android.graphics.SurfaceTexture;
32 import android.hardware.Camera; 32 import android.hardware.Camera;
33 import android.hardware.Camera.PreviewCallback; 33 import android.hardware.Camera.PreviewCallback;
34 import android.opengl.GLES11Ext; 34 import android.opengl.GLES11Ext;
35 import android.opengl.GLES20; 35 import android.opengl.GLES20;
36 import android.os.Handler; 36 import android.os.Handler;
37 import android.os.Looper; 37 import android.os.Looper;
38 import android.os.SystemClock; 38 import android.os.SystemClock;
39 import android.util.Log;
39 import android.view.Surface; 40 import android.view.Surface;
40 import android.view.WindowManager; 41 import android.view.WindowManager;
41 42
42 import org.json.JSONException; 43 import org.json.JSONException;
43
44 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; 44 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
45 import org.webrtc.Logging;
46 45
47 import java.io.IOException; 46 import java.io.IOException;
48 import java.nio.ByteBuffer; 47 import java.nio.ByteBuffer;
49 import java.util.ArrayList; 48 import java.util.ArrayList;
50 import java.util.HashMap; 49 import java.util.HashMap;
51 import java.util.IdentityHashMap; 50 import java.util.IdentityHashMap;
52 import java.util.List; 51 import java.util.List;
53 import java.util.Map; 52 import java.util.Map;
54 import java.util.concurrent.Exchanger; 53 import java.util.concurrent.Exchanger;
55 import java.util.concurrent.TimeUnit; 54 import java.util.concurrent.TimeUnit;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 private final Camera.ErrorCallback cameraErrorCallback = 98 private final Camera.ErrorCallback cameraErrorCallback =
100 new Camera.ErrorCallback() { 99 new Camera.ErrorCallback() {
101 @Override 100 @Override
102 public void onError(int error, Camera camera) { 101 public void onError(int error, Camera camera) {
103 String errorMessage; 102 String errorMessage;
104 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { 103 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
105 errorMessage = "Camera server died!"; 104 errorMessage = "Camera server died!";
106 } else { 105 } else {
107 errorMessage = "Camera error: " + error; 106 errorMessage = "Camera error: " + error;
108 } 107 }
109 Logging.e(TAG, errorMessage); 108 Log.e(TAG, errorMessage);
110 if (errorHandler != null) { 109 if (errorHandler != null) {
111 errorHandler.onCameraError(errorMessage); 110 errorHandler.onCameraError(errorMessage);
112 } 111 }
113 } 112 }
114 }; 113 };
115 114
116 // Camera observer - monitors camera framerate and amount of available 115 // Camera observer - monitors camera framerate and amount of available
117 // camera buffers. Observer is excecuted on camera thread. 116 // camera buffers. Observer is excecuted on camera thread.
118 private final Runnable cameraObserver = new Runnable() { 117 private final Runnable cameraObserver = new Runnable() {
119 @Override 118 @Override
120 public void run() { 119 public void run() {
121 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) 120 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
122 / CAMERA_OBSERVER_PERIOD_MS; 121 / CAMERA_OBSERVER_PERIOD_MS;
123 double averageCaptureBuffersCount = 0; 122 double averageCaptureBuffersCount = 0;
124 if (cameraFramesCount > 0) { 123 if (cameraFramesCount > 0) {
125 averageCaptureBuffersCount = 124 averageCaptureBuffersCount =
126 (double)captureBuffersCount / cameraFramesCount; 125 (double)captureBuffersCount / cameraFramesCount;
127 } 126 }
128 Logging.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " + 127 Log.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " +
129 String.format("%.1f", averageCaptureBuffersCount) + 128 String.format("%.1f", averageCaptureBuffersCount) +
130 ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps()); 129 ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps());
131 if (cameraFramesCount == 0) { 130 if (cameraFramesCount == 0) {
132 Logging.e(TAG, "Camera freezed."); 131 Log.e(TAG, "Camera freezed.");
133 if (errorHandler != null) { 132 if (errorHandler != null) {
134 errorHandler.onCameraError("Camera failure."); 133 errorHandler.onCameraError("Camera failure.");
135 } 134 }
136 } else { 135 } else {
137 cameraFramesCount = 0; 136 cameraFramesCount = 0;
138 captureBuffersCount = 0; 137 captureBuffersCount = 0;
139 if (cameraThreadHandler != null) { 138 if (cameraThreadHandler != null) {
140 cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS); 139 cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
141 } 140 }
142 } 141 }
(...skipping 19 matching lines...) Expand all
162 161
163 // Switch camera to the next valid camera id. This can only be called while 162 // Switch camera to the next valid camera id. This can only be called while
164 // the camera is running. 163 // the camera is running.
165 // Returns true on success. False if the next camera does not support the 164 // Returns true on success. False if the next camera does not support the
166 // current resolution. 165 // current resolution.
167 public synchronized boolean switchCamera(final Runnable switchDoneEvent) { 166 public synchronized boolean switchCamera(final Runnable switchDoneEvent) {
168 if (Camera.getNumberOfCameras() < 2 ) 167 if (Camera.getNumberOfCameras() < 2 )
169 return false; 168 return false;
170 169
171 if (cameraThreadHandler == null) { 170 if (cameraThreadHandler == null) {
172 Logging.e(TAG, "Calling switchCamera() for stopped camera."); 171 Log.e(TAG, "Calling switchCamera() for stopped camera.");
173 return false; 172 return false;
174 } 173 }
175 if (pendingCameraSwitch) { 174 if (pendingCameraSwitch) {
176 // Do not handle multiple camera switch request to avoid blocking 175 // Do not handle multiple camera switch request to avoid blocking
177 // camera thread by handling too many switch request from a queue. 176 // camera thread by handling too many switch request from a queue.
178 Logging.w(TAG, "Ignoring camera switch request."); 177 Log.w(TAG, "Ignoring camera switch request.");
179 return false; 178 return false;
180 } 179 }
181 180
182 pendingCameraSwitch = true; 181 pendingCameraSwitch = true;
183 id = (id + 1) % Camera.getNumberOfCameras(); 182 id = (id + 1) % Camera.getNumberOfCameras();
184 cameraThreadHandler.post(new Runnable() { 183 cameraThreadHandler.post(new Runnable() {
185 @Override public void run() { 184 @Override public void run() {
186 switchCameraOnCameraThread(switchDoneEvent); 185 switchCameraOnCameraThread(switchDoneEvent);
187 } 186 }
188 }); 187 });
189 return true; 188 return true;
190 } 189 }
191 190
192 // Requests a new output format from the video capturer. Captured frames 191 // Requests a new output format from the video capturer. Captured frames
193 // by the camera will be scaled/or dropped by the video capturer. 192 // by the camera will be scaled/or dropped by the video capturer.
194 public synchronized void onOutputFormatRequest( 193 public synchronized void onOutputFormatRequest(
195 final int width, final int height, final int fps) { 194 final int width, final int height, final int fps) {
196 if (cameraThreadHandler == null) { 195 if (cameraThreadHandler == null) {
197 Logging.e(TAG, "Calling onOutputFormatRequest() for already stopped camera ."); 196 Log.e(TAG, "Calling onOutputFormatRequest() for already stopped camera.");
198 return; 197 return;
199 } 198 }
200 cameraThreadHandler.post(new Runnable() { 199 cameraThreadHandler.post(new Runnable() {
201 @Override public void run() { 200 @Override public void run() {
202 onOutputFormatRequestOnCameraThread(width, height, fps); 201 onOutputFormatRequestOnCameraThread(width, height, fps);
203 } 202 }
204 }); 203 });
205 } 204 }
206 205
207 // Reconfigure the camera to capture in a new format. This should only be call ed while the camera 206 // Reconfigure the camera to capture in a new format. This should only be call ed while the camera
208 // is running. 207 // is running.
209 public synchronized void changeCaptureFormat( 208 public synchronized void changeCaptureFormat(
210 final int width, final int height, final int framerate) { 209 final int width, final int height, final int framerate) {
211 if (cameraThreadHandler == null) { 210 if (cameraThreadHandler == null) {
212 Logging.e(TAG, "Calling changeCaptureFormat() for already stopped camera." ); 211 Log.e(TAG, "Calling changeCaptureFormat() for already stopped camera.");
213 return; 212 return;
214 } 213 }
215 cameraThreadHandler.post(new Runnable() { 214 cameraThreadHandler.post(new Runnable() {
216 @Override public void run() { 215 @Override public void run() {
217 startPreviewOnCameraThread(width, height, framerate); 216 startPreviewOnCameraThread(width, height, framerate);
218 } 217 }
219 }); 218 });
220 } 219 }
221 220
222 public synchronized List<CaptureFormat> getSupportedFormats() { 221 public synchronized List<CaptureFormat> getSupportedFormats() {
223 return CameraEnumerationAndroid.getSupportedFormats(id); 222 return CameraEnumerationAndroid.getSupportedFormats(id);
224 } 223 }
225 224
226 // Return a list of timestamps for the frames that have been sent out, but not returned yet. 225 // Return a list of timestamps for the frames that have been sent out, but not returned yet.
227 // Useful for logging and testing. 226 // Useful for logging and testing.
228 public String pendingFramesTimeStamps() { 227 public String pendingFramesTimeStamps() {
229 return videoBuffers.pendingFramesTimeStamps(); 228 return videoBuffers.pendingFramesTimeStamps();
230 } 229 }
231 230
232 private VideoCapturerAndroid() { 231 private VideoCapturerAndroid() {
233 Logging.d(TAG, "VideoCapturerAndroid"); 232 Log.d(TAG, "VideoCapturerAndroid");
234 } 233 }
235 234
236 // Called by native code. 235 // Called by native code.
237 // Initializes local variables for the camera named |deviceName|. If |deviceNa me| is empty, the 236 // Initializes local variables for the camera named |deviceName|. If |deviceNa me| is empty, the
238 // first available device is used in order to be compatible with the generic V ideoCapturer class. 237 // first available device is used in order to be compatible with the generic V ideoCapturer class.
239 synchronized boolean init(String deviceName) { 238 synchronized boolean init(String deviceName) {
240 Logging.d(TAG, "init: " + deviceName); 239 Log.d(TAG, "init: " + deviceName);
241 if (deviceName == null) 240 if (deviceName == null)
242 return false; 241 return false;
243 242
244 boolean foundDevice = false; 243 boolean foundDevice = false;
245 if (deviceName.isEmpty()) { 244 if (deviceName.isEmpty()) {
246 this.id = 0; 245 this.id = 0;
247 foundDevice = true; 246 foundDevice = true;
248 } else { 247 } else {
249 for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { 248 for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
250 String existing_device = CameraEnumerationAndroid.getDeviceName(i); 249 String existing_device = CameraEnumerationAndroid.getDeviceName(i);
(...skipping 24 matching lines...) Expand all
275 } 274 }
276 275
277 // Called by native code. Returns true if capturer is started. 276 // Called by native code. Returns true if capturer is started.
278 // 277 //
279 // Note that this actually opens the camera, and Camera callbacks run on the 278 // Note that this actually opens the camera, and Camera callbacks run on the
280 // thread that calls open(), so this is done on the CameraThread. Since the 279 // thread that calls open(), so this is done on the CameraThread. Since the
281 // API needs a synchronous success return value we wait for the result. 280 // API needs a synchronous success return value we wait for the result.
282 synchronized void startCapture( 281 synchronized void startCapture(
283 final int width, final int height, final int framerate, 282 final int width, final int height, final int framerate,
284 final Context applicationContext, final CapturerObserver frameObserver) { 283 final Context applicationContext, final CapturerObserver frameObserver) {
285 Logging.d(TAG, "startCapture requested: " + width + "x" + height 284 Log.d(TAG, "startCapture requested: " + width + "x" + height
286 + "@" + framerate); 285 + "@" + framerate);
287 if (applicationContext == null) { 286 if (applicationContext == null) {
288 throw new RuntimeException("applicationContext not set."); 287 throw new RuntimeException("applicationContext not set.");
289 } 288 }
290 if (frameObserver == null) { 289 if (frameObserver == null) {
291 throw new RuntimeException("frameObserver not set."); 290 throw new RuntimeException("frameObserver not set.");
292 } 291 }
293 if (cameraThreadHandler != null) { 292 if (cameraThreadHandler != null) {
294 throw new RuntimeException("Camera has already been started."); 293 throw new RuntimeException("Camera has already been started.");
295 } 294 }
(...skipping 10 matching lines...) Expand all
306 }); 305 });
307 } 306 }
308 307
309 private void startCaptureOnCameraThread( 308 private void startCaptureOnCameraThread(
310 int width, int height, int framerate, CapturerObserver frameObserver, 309 int width, int height, int framerate, CapturerObserver frameObserver,
311 Context applicationContext) { 310 Context applicationContext) {
312 Throwable error = null; 311 Throwable error = null;
313 this.applicationContext = applicationContext; 312 this.applicationContext = applicationContext;
314 this.frameObserver = frameObserver; 313 this.frameObserver = frameObserver;
315 try { 314 try {
316 Logging.d(TAG, "Opening camera " + id); 315 Log.d(TAG, "Opening camera " + id);
317 camera = Camera.open(id); 316 camera = Camera.open(id);
318 info = new Camera.CameraInfo(); 317 info = new Camera.CameraInfo();
319 Camera.getCameraInfo(id, info); 318 Camera.getCameraInfo(id, info);
320 // No local renderer (we only care about onPreviewFrame() buffers, not a 319 // No local renderer (we only care about onPreviewFrame() buffers, not a
321 // directly-displayed UI element). Camera won't capture without 320 // directly-displayed UI element). Camera won't capture without
322 // setPreview{Texture,Display}, so we create a SurfaceTexture and hand 321 // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
323 // it over to Camera, but never listen for frame-ready callbacks, 322 // it over to Camera, but never listen for frame-ready callbacks,
324 // and never call updateTexImage on it. 323 // and never call updateTexImage on it.
325 try { 324 try {
326 cameraGlTexture = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_O ES); 325 cameraGlTexture = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_O ES);
327 cameraSurfaceTexture = new SurfaceTexture(cameraGlTexture); 326 cameraSurfaceTexture = new SurfaceTexture(cameraGlTexture);
328 cameraSurfaceTexture.setOnFrameAvailableListener(null); 327 cameraSurfaceTexture.setOnFrameAvailableListener(null);
329 328
330 camera.setPreviewTexture(cameraSurfaceTexture); 329 camera.setPreviewTexture(cameraSurfaceTexture);
331 } catch (IOException e) { 330 } catch (IOException e) {
332 Logging.e(TAG, "setPreviewTexture failed", error); 331 Log.e(TAG, "setPreviewTexture failed", error);
333 throw new RuntimeException(e); 332 throw new RuntimeException(e);
334 } 333 }
335 334
336 Logging.d(TAG, "Camera orientation: " + info.orientation + 335 Log.d(TAG, "Camera orientation: " + info.orientation +
337 " .Device orientation: " + getDeviceOrientation()); 336 " .Device orientation: " + getDeviceOrientation());
338 camera.setErrorCallback(cameraErrorCallback); 337 camera.setErrorCallback(cameraErrorCallback);
339 startPreviewOnCameraThread(width, height, framerate); 338 startPreviewOnCameraThread(width, height, framerate);
340 frameObserver.OnCapturerStarted(true); 339 frameObserver.OnCapturerStarted(true);
341 340
342 // Start camera observer. 341 // Start camera observer.
343 cameraFramesCount = 0; 342 cameraFramesCount = 0;
344 captureBuffersCount = 0; 343 captureBuffersCount = 0;
345 cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS) ; 344 cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS) ;
346 return; 345 return;
347 } catch (RuntimeException e) { 346 } catch (RuntimeException e) {
348 error = e; 347 error = e;
349 } 348 }
350 Logging.e(TAG, "startCapture failed", error); 349 Log.e(TAG, "startCapture failed", error);
351 stopCaptureOnCameraThread(); 350 stopCaptureOnCameraThread();
352 cameraThreadHandler = null; 351 cameraThreadHandler = null;
353 frameObserver.OnCapturerStarted(false); 352 frameObserver.OnCapturerStarted(false);
354 if (errorHandler != null) { 353 if (errorHandler != null) {
355 errorHandler.onCameraError("Camera can not be started."); 354 errorHandler.onCameraError("Camera can not be started.");
356 } 355 }
357 return; 356 return;
358 } 357 }
359 358
360 // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|. 359 // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
361 private void startPreviewOnCameraThread(int width, int height, int framerate) { 360 private void startPreviewOnCameraThread(int width, int height, int framerate) {
362 Logging.d( 361 Log.d(
363 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + " @" + framerate); 362 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + " @" + framerate);
364 if (camera == null) { 363 if (camera == null) {
365 Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); 364 Log.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
366 return; 365 return;
367 } 366 }
368 367
369 requestedWidth = width; 368 requestedWidth = width;
370 requestedHeight = height; 369 requestedHeight = height;
371 requestedFramerate = framerate; 370 requestedFramerate = framerate;
372 371
373 // Find closest supported format for |width| x |height| @ |framerate|. 372 // Find closest supported format for |width| x |height| @ |framerate|.
374 final Camera.Parameters parameters = camera.getParameters(); 373 final Camera.Parameters parameters = camera.getParameters();
375 final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, f ramerate * 1000); 374 final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, f ramerate * 1000);
376 final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupported Size( 375 final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupported Size(
377 parameters.getSupportedPreviewSizes(), width, height); 376 parameters.getSupportedPreviewSizes(), width, height);
378 final CaptureFormat captureFormat = new CaptureFormat( 377 final CaptureFormat captureFormat = new CaptureFormat(
379 previewSize.width, previewSize.height, 378 previewSize.width, previewSize.height,
380 range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 379 range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
381 range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 380 range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
382 381
383 // Check if we are already using this capture format, then we don't need to do anything. 382 // Check if we are already using this capture format, then we don't need to do anything.
384 if (captureFormat.equals(this.captureFormat)) { 383 if (captureFormat.equals(this.captureFormat)) {
385 return; 384 return;
386 } 385 }
387 386
388 // Update camera parameters. 387 // Update camera parameters.
389 Logging.d(TAG, "isVideoStabilizationSupported: " + 388 Log.d(TAG, "isVideoStabilizationSupported: " +
390 parameters.isVideoStabilizationSupported()); 389 parameters.isVideoStabilizationSupported());
391 if (parameters.isVideoStabilizationSupported()) { 390 if (parameters.isVideoStabilizationSupported()) {
392 parameters.setVideoStabilization(true); 391 parameters.setVideoStabilization(true);
393 } 392 }
394 // Note: setRecordingHint(true) actually decrease frame rate on N5. 393 // Note: setRecordingHint(true) actually decrease frame rate on N5.
395 // parameters.setRecordingHint(true); 394 // parameters.setRecordingHint(true);
396 if (captureFormat.maxFramerate > 0) { 395 if (captureFormat.maxFramerate > 0) {
397 parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.ma xFramerate); 396 parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.ma xFramerate);
398 } 397 }
399 parameters.setPreviewSize(captureFormat.width, captureFormat.height); 398 parameters.setPreviewSize(captureFormat.width, captureFormat.height);
400 parameters.setPreviewFormat(captureFormat.imageFormat); 399 parameters.setPreviewFormat(captureFormat.imageFormat);
401 // Picture size is for taking pictures and not for preview/video, but we nee d to set it anyway 400 // Picture size is for taking pictures and not for preview/video, but we nee d to set it anyway
402 // as a workaround for an aspect ratio problem on Nexus 7. 401 // as a workaround for an aspect ratio problem on Nexus 7.
403 final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupported Size( 402 final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupported Size(
404 parameters.getSupportedPictureSizes(), width, height); 403 parameters.getSupportedPictureSizes(), width, height);
405 parameters.setPictureSize(pictureSize.width, pictureSize.height); 404 parameters.setPictureSize(pictureSize.width, pictureSize.height);
406 405
407 // Temporarily stop preview if it's already running. 406 // Temporarily stop preview if it's already running.
408 if (this.captureFormat != null) { 407 if (this.captureFormat != null) {
409 camera.stopPreview(); 408 camera.stopPreview();
410 // Calling |setPreviewCallbackWithBuffer| with null should clear the inter nal camera buffer 409 // Calling |setPreviewCallbackWithBuffer| with null should clear the inter nal camera buffer
411 // queue, but sometimes we receive a frame with the old resolution after t his call anyway. 410 // queue, but sometimes we receive a frame with the old resolution after t his call anyway.
412 camera.setPreviewCallbackWithBuffer(null); 411 camera.setPreviewCallbackWithBuffer(null);
413 } 412 }
414 413
415 // (Re)start preview. 414 // (Re)start preview.
416 Logging.d(TAG, "Start capturing: " + captureFormat); 415 Log.d(TAG, "Start capturing: " + captureFormat);
417 this.captureFormat = captureFormat; 416 this.captureFormat = captureFormat;
418 camera.setParameters(parameters); 417 camera.setParameters(parameters);
419 videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); 418 videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
420 camera.setPreviewCallbackWithBuffer(this); 419 camera.setPreviewCallbackWithBuffer(this);
421 camera.startPreview(); 420 camera.startPreview();
422 } 421 }
423 422
424 // Called by native code. Returns true when camera is known to be stopped. 423 // Called by native code. Returns true when camera is known to be stopped.
425 synchronized void stopCapture() throws InterruptedException { 424 synchronized void stopCapture() throws InterruptedException {
426 if (cameraThreadHandler == null) { 425 if (cameraThreadHandler == null) {
427 Logging.e(TAG, "Calling stopCapture() for already stopped camera."); 426 Log.e(TAG, "Calling stopCapture() for already stopped camera.");
428 return; 427 return;
429 } 428 }
430 Logging.d(TAG, "stopCapture"); 429 Log.d(TAG, "stopCapture");
431 cameraThreadHandler.post(new Runnable() { 430 cameraThreadHandler.post(new Runnable() {
432 @Override public void run() { 431 @Override public void run() {
433 stopCaptureOnCameraThread(); 432 stopCaptureOnCameraThread();
434 } 433 }
435 }); 434 });
436 cameraThread.join(); 435 cameraThread.join();
437 cameraThreadHandler = null; 436 cameraThreadHandler = null;
438 Logging.d(TAG, "stopCapture done"); 437 Log.d(TAG, "stopCapture done");
439 } 438 }
440 439
441 private void stopCaptureOnCameraThread() { 440 private void stopCaptureOnCameraThread() {
442 doStopCaptureOnCameraThread(); 441 doStopCaptureOnCameraThread();
443 Looper.myLooper().quit(); 442 Looper.myLooper().quit();
444 return; 443 return;
445 } 444 }
446 445
447 private void doStopCaptureOnCameraThread() { 446 private void doStopCaptureOnCameraThread() {
448 Logging.d(TAG, "stopCaptureOnCameraThread"); 447 Log.d(TAG, "stopCaptureOnCameraThread");
449 if (camera == null) { 448 if (camera == null) {
450 return; 449 return;
451 } 450 }
452 try { 451 try {
453 cameraThreadHandler.removeCallbacks(cameraObserver); 452 cameraThreadHandler.removeCallbacks(cameraObserver);
454 Logging.d(TAG, "Stop preview."); 453 Log.d(TAG, "Stop preview.");
455 camera.stopPreview(); 454 camera.stopPreview();
456 camera.setPreviewCallbackWithBuffer(null); 455 camera.setPreviewCallbackWithBuffer(null);
457 videoBuffers.stopReturnBuffersToCamera(); 456 videoBuffers.stopReturnBuffersToCamera();
458 captureFormat = null; 457 captureFormat = null;
459 458
460 camera.setPreviewTexture(null); 459 camera.setPreviewTexture(null);
461 cameraSurfaceTexture = null; 460 cameraSurfaceTexture = null;
462 if (cameraGlTexture != 0) { 461 if (cameraGlTexture != 0) {
463 GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0); 462 GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0);
464 cameraGlTexture = 0; 463 cameraGlTexture = 0;
465 } 464 }
466 Logging.d(TAG, "Release camera."); 465 Log.d(TAG, "Release camera.");
467 camera.release(); 466 camera.release();
468 camera = null; 467 camera = null;
469 } catch (IOException e) { 468 } catch (IOException e) {
470 Logging.e(TAG, "Failed to stop camera", e); 469 Log.e(TAG, "Failed to stop camera", e);
471 } 470 }
472 } 471 }
473 472
474 private void switchCameraOnCameraThread(Runnable switchDoneEvent) { 473 private void switchCameraOnCameraThread(Runnable switchDoneEvent) {
475 Logging.d(TAG, "switchCameraOnCameraThread"); 474 Log.d(TAG, "switchCameraOnCameraThread");
476 475
477 doStopCaptureOnCameraThread(); 476 doStopCaptureOnCameraThread();
478 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera te, frameObserver, 477 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera te, frameObserver,
479 applicationContext); 478 applicationContext);
480 pendingCameraSwitch = false; 479 pendingCameraSwitch = false;
481 Logging.d(TAG, "switchCameraOnCameraThread done"); 480 Log.d(TAG, "switchCameraOnCameraThread done");
482 if (switchDoneEvent != null) { 481 if (switchDoneEvent != null) {
483 switchDoneEvent.run(); 482 switchDoneEvent.run();
484 } 483 }
485 } 484 }
486 485
487 private void onOutputFormatRequestOnCameraThread( 486 private void onOutputFormatRequestOnCameraThread(
488 int width, int height, int fps) { 487 int width, int height, int fps) {
489 if (camera == null) { 488 if (camera == null) {
490 return; 489 return;
491 } 490 }
492 Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + heigh t + 491 Log.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
493 "@" + fps); 492 "@" + fps);
494 frameObserver.OnOutputFormatRequest(width, height, fps); 493 frameObserver.OnOutputFormatRequest(width, height, fps);
495 } 494 }
496 495
497 void returnBuffer(long timeStamp) { 496 void returnBuffer(long timeStamp) {
498 videoBuffers.returnBuffer(timeStamp); 497 videoBuffers.returnBuffer(timeStamp);
499 } 498 }
500 499
501 private int getDeviceOrientation() { 500 private int getDeviceOrientation() {
502 int orientation = 0; 501 int orientation = 0;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 } 543 }
545 rotation = (info.orientation + rotation) % 360; 544 rotation = (info.orientation + rotation) % 360;
546 // Mark the frame owning |data| as used. 545 // Mark the frame owning |data| as used.
547 // Note that since data is directBuffer, 546 // Note that since data is directBuffer,
548 // data.length >= videoBuffers.frameSize. 547 // data.length >= videoBuffers.frameSize.
549 if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { 548 if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
550 cameraFramesCount++; 549 cameraFramesCount++;
551 frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat. width, 550 frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat. width,
552 captureFormat.height, rotation, captureTimeNs); 551 captureFormat.height, rotation, captureTimeNs);
553 } else { 552 } else {
554 Logging.w(TAG, "reserveByteBuffer failed - dropping frame."); 553 Log.w(TAG, "reserveByteBuffer failed - dropping frame.");
555 } 554 }
556 } 555 }
557 556
558 // runCameraThreadUntilIdle make sure all posted messages to the cameraThread 557 // runCameraThreadUntilIdle make sure all posted messages to the cameraThread
559 // is processed before returning. It does that by itself posting a message to 558 // is processed before returning. It does that by itself posting a message to
560 // to the message queue and waits until is has been processed. 559 // to the message queue and waits until is has been processed.
561 // It is used in tests. 560 // It is used in tests.
562 void runCameraThreadUntilIdle() { 561 void runCameraThreadUntilIdle() {
563 if (cameraThreadHandler == null) 562 if (cameraThreadHandler == null)
564 return; 563 return;
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
607 synchronized void queueCameraBuffers(int frameSize, Camera camera) { 606 synchronized void queueCameraBuffers(int frameSize, Camera camera) {
608 this.camera = camera; 607 this.camera = camera;
609 this.frameSize = frameSize; 608 this.frameSize = frameSize;
610 609
611 queuedBuffers.clear(); 610 queuedBuffers.clear();
612 for (int i = 0; i < numCaptureBuffers; ++i) { 611 for (int i = 0; i < numCaptureBuffers; ++i) {
613 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); 612 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
614 camera.addCallbackBuffer(buffer.array()); 613 camera.addCallbackBuffer(buffer.array());
615 queuedBuffers.put(buffer.array(), buffer); 614 queuedBuffers.put(buffer.array(), buffer);
616 } 615 }
617 Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers 616 Log.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers
618 + " buffers of size " + frameSize + "."); 617 + " buffers of size " + frameSize + ".");
619 } 618 }
620 619
621 synchronized String pendingFramesTimeStamps() { 620 synchronized String pendingFramesTimeStamps() {
622 List<Long> timeStampsMs = new ArrayList<Long>(); 621 List<Long> timeStampsMs = new ArrayList<Long>();
623 for (Long timeStampNs : pendingBuffers.keySet()) { 622 for (Long timeStampNs : pendingBuffers.keySet()) {
624 timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs)); 623 timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs));
625 } 624 }
626 return timeStampsMs.toString(); 625 return timeStampsMs.toString();
627 } 626 }
628 627
629 synchronized void stopReturnBuffersToCamera() { 628 synchronized void stopReturnBuffersToCamera() {
630 this.camera = null; 629 this.camera = null;
631 queuedBuffers.clear(); 630 queuedBuffers.clear();
632 // Frames in |pendingBuffers| need to be kept alive until they are returne d. 631 // Frames in |pendingBuffers| need to be kept alive until they are returne d.
633 Logging.d(TAG, "stopReturnBuffersToCamera called." 632 Log.d(TAG, "stopReturnBuffersToCamera called."
634 + (pendingBuffers.isEmpty() ? 633 + (pendingBuffers.isEmpty() ?
635 " All buffers have been returned." 634 " All buffers have been returned."
636 : " Pending buffers: " + pendingFramesTimeStamps() + ".")); 635 : " Pending buffers: " + pendingFramesTimeStamps() + "."));
637 } 636 }
638 637
639 synchronized boolean reserveByteBuffer(byte[] data, long timeStamp) { 638 synchronized boolean reserveByteBuffer(byte[] data, long timeStamp) {
640 final ByteBuffer buffer = queuedBuffers.remove(data); 639 final ByteBuffer buffer = queuedBuffers.remove(data);
641 if (buffer == null) { 640 if (buffer == null) {
642 // Frames might be posted to |onPreviewFrame| with the previous format w hile changing 641 // Frames might be posted to |onPreviewFrame| with the previous format w hile changing
643 // capture format in |startPreviewOnCameraThread|. Drop these old frames . 642 // capture format in |startPreviewOnCameraThread|. Drop these old frames .
644 Logging.w(TAG, "Received callback buffer from previous configuration wit h length: " 643 Log.w(TAG, "Received callback buffer from previous configuration with le ngth: "
645 + (data == null ? "null" : data.length)); 644 + (data == null ? "null" : data.length));
646 return false; 645 return false;
647 } 646 }
648 if (buffer.capacity() != frameSize) { 647 if (buffer.capacity() != frameSize) {
649 throw new IllegalStateException("Callback buffer has unexpected frame si ze"); 648 throw new IllegalStateException("Callback buffer has unexpected frame si ze");
650 } 649 }
651 if (pendingBuffers.containsKey(timeStamp)) { 650 if (pendingBuffers.containsKey(timeStamp)) {
652 Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); 651 Log.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
653 return false; 652 return false;
654 } 653 }
655 pendingBuffers.put(timeStamp, buffer); 654 pendingBuffers.put(timeStamp, buffer);
656 if (queuedBuffers.isEmpty()) { 655 if (queuedBuffers.isEmpty()) {
657 Logging.v(TAG, "Camera is running out of capture buffers." 656 Log.v(TAG, "Camera is running out of capture buffers."
658 + " Pending buffers: " + pendingFramesTimeStamps()); 657 + " Pending buffers: " + pendingFramesTimeStamps());
659 } 658 }
660 return true; 659 return true;
661 } 660 }
662 661
663 synchronized void returnBuffer(long timeStamp) { 662 synchronized void returnBuffer(long timeStamp) {
664 final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp); 663 final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
665 if (returnedFrame == null) { 664 if (returnedFrame == null) {
666 throw new RuntimeException("unknown data buffer with time stamp " 665 throw new RuntimeException("unknown data buffer with time stamp "
667 + timeStamp + "returned?!?"); 666 + timeStamp + "returned?!?");
668 } 667 }
669 668
670 if (camera != null && returnedFrame.capacity() == frameSize) { 669 if (camera != null && returnedFrame.capacity() == frameSize) {
671 camera.addCallbackBuffer(returnedFrame.array()); 670 camera.addCallbackBuffer(returnedFrame.array());
672 if (queuedBuffers.isEmpty()) { 671 if (queuedBuffers.isEmpty()) {
673 Logging.v(TAG, "Frame returned when camera is running out of capture" 672 Log.v(TAG, "Frame returned when camera is running out of capture"
674 + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); 673 + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
675 } 674 }
676 queuedBuffers.put(returnedFrame.array(), returnedFrame); 675 queuedBuffers.put(returnedFrame.array(), returnedFrame);
677 return; 676 return;
678 } 677 }
679 678
680 if (returnedFrame.capacity() != frameSize) { 679 if (returnedFrame.capacity() != frameSize) {
681 Logging.d(TAG, "returnBuffer with time stamp " 680 Log.d(TAG, "returnBuffer with time stamp "
682 + TimeUnit.NANOSECONDS.toMillis(timeStamp) 681 + TimeUnit.NANOSECONDS.toMillis(timeStamp)
683 + " called with old frame size, " + returnedFrame.capacity() + "."); 682 + " called with old frame size, " + returnedFrame.capacity() + ".");
684 // Since this frame has the wrong size, don't requeue it. Frames with th e correct size are 683 // Since this frame has the wrong size, don't requeue it. Frames with th e correct size are
685 // created in queueCameraBuffers so this must be an old buffer. 684 // created in queueCameraBuffers so this must be an old buffer.
686 return; 685 return;
687 } 686 }
688 687
689 Logging.d(TAG, "returnBuffer with time stamp " 688 Log.d(TAG, "returnBuffer with time stamp "
690 + TimeUnit.NANOSECONDS.toMillis(timeStamp) 689 + TimeUnit.NANOSECONDS.toMillis(timeStamp)
691 + " called after camera has been stopped."); 690 + " called after camera has been stopped.");
692 } 691 }
693 } 692 }
694 693
695 // Interface used for providing callbacks to an observer. 694 // Interface used for providing callbacks to an observer.
696 interface CapturerObserver { 695 interface CapturerObserver {
697 // Notify if the camera have been started successfully or not. 696 // Notify if the camera have been started successfully or not.
698 // Called on a Java thread owned by VideoCapturerAndroid. 697 // Called on a Java thread owned by VideoCapturerAndroid.
699 abstract void OnCapturerStarted(boolean success); 698 abstract void OnCapturerStarted(boolean success);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
735 } 734 }
736 735
737 private native void nativeCapturerStarted(long nativeCapturer, 736 private native void nativeCapturerStarted(long nativeCapturer,
738 boolean success); 737 boolean success);
739 private native void nativeOnFrameCaptured(long nativeCapturer, 738 private native void nativeOnFrameCaptured(long nativeCapturer,
740 byte[] data, int length, int width, int height, int rotation, long timeS tamp); 739 byte[] data, int length, int width, int height, int rotation, long timeS tamp);
741 private native void nativeOnOutputFormatRequest(long nativeCapturer, 740 private native void nativeOnOutputFormatRequest(long nativeCapturer,
742 int width, int height, int fps); 741 int width, int height, int fps);
743 } 742 }
744 } 743 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698