Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(23)

Side by Side Diff: talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java

Issue 1338033003: Log to webrtc logging stream from java code. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 18 matching lines...) Expand all
29 29
30 import android.content.Context; 30 import android.content.Context;
31 import android.graphics.SurfaceTexture; 31 import android.graphics.SurfaceTexture;
32 import android.hardware.Camera; 32 import android.hardware.Camera;
33 import android.hardware.Camera.PreviewCallback; 33 import android.hardware.Camera.PreviewCallback;
34 import android.opengl.GLES11Ext; 34 import android.opengl.GLES11Ext;
35 import android.opengl.GLES20; 35 import android.opengl.GLES20;
36 import android.os.Handler; 36 import android.os.Handler;
37 import android.os.Looper; 37 import android.os.Looper;
38 import android.os.SystemClock; 38 import android.os.SystemClock;
39 import android.util.Log;
40 import android.view.Surface; 39 import android.view.Surface;
41 import android.view.WindowManager; 40 import android.view.WindowManager;
42 41
43 import org.json.JSONException; 42 import org.json.JSONException;
44 import org.webrtc.CameraEnumerationAndroid.CaptureFormat; 43 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
44 import org.webrtc.Logging;
45 45
46 import java.io.IOException; 46 import java.io.IOException;
47 import java.nio.ByteBuffer; 47 import java.nio.ByteBuffer;
48 import java.util.ArrayList; 48 import java.util.ArrayList;
49 import java.util.HashMap; 49 import java.util.HashMap;
50 import java.util.IdentityHashMap; 50 import java.util.IdentityHashMap;
51 import java.util.List; 51 import java.util.List;
52 import java.util.Map; 52 import java.util.Map;
53 import java.util.concurrent.Exchanger; 53 import java.util.concurrent.Exchanger;
54 import java.util.concurrent.TimeUnit; 54 import java.util.concurrent.TimeUnit;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
98 private final Camera.ErrorCallback cameraErrorCallback = 98 private final Camera.ErrorCallback cameraErrorCallback =
99 new Camera.ErrorCallback() { 99 new Camera.ErrorCallback() {
100 @Override 100 @Override
101 public void onError(int error, Camera camera) { 101 public void onError(int error, Camera camera) {
102 String errorMessage; 102 String errorMessage;
103 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { 103 if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
104 errorMessage = "Camera server died!"; 104 errorMessage = "Camera server died!";
105 } else { 105 } else {
106 errorMessage = "Camera error: " + error; 106 errorMessage = "Camera error: " + error;
107 } 107 }
108 Log.e(TAG, errorMessage); 108 Logging.e(TAG, errorMessage);
109 if (errorHandler != null) { 109 if (errorHandler != null) {
110 errorHandler.onCameraError(errorMessage); 110 errorHandler.onCameraError(errorMessage);
111 } 111 }
112 } 112 }
113 }; 113 };
114 114
115 // Camera observer - monitors camera framerate and amount of available 115 // Camera observer - monitors camera framerate and amount of available
116 // camera buffers. Observer is excecuted on camera thread. 116 // camera buffers. Observer is excecuted on camera thread.
117 private final Runnable cameraObserver = new Runnable() { 117 private final Runnable cameraObserver = new Runnable() {
118 @Override 118 @Override
119 public void run() { 119 public void run() {
120 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) 120 int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
121 / CAMERA_OBSERVER_PERIOD_MS; 121 / CAMERA_OBSERVER_PERIOD_MS;
122 double averageCaptureBuffersCount = 0; 122 double averageCaptureBuffersCount = 0;
123 if (cameraFramesCount > 0) { 123 if (cameraFramesCount > 0) {
124 averageCaptureBuffersCount = 124 averageCaptureBuffersCount =
125 (double)captureBuffersCount / cameraFramesCount; 125 (double)captureBuffersCount / cameraFramesCount;
126 } 126 }
127 Log.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " + 127 Logging.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " +
128 String.format("%.1f", averageCaptureBuffersCount) + 128 String.format("%.1f", averageCaptureBuffersCount) +
129 ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps()); 129 ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps());
130 if (cameraFramesCount == 0) { 130 if (cameraFramesCount == 0) {
131 Log.e(TAG, "Camera freezed."); 131 Logging.e(TAG, "Camera freezed.");
132 if (errorHandler != null) { 132 if (errorHandler != null) {
133 errorHandler.onCameraError("Camera failure."); 133 errorHandler.onCameraError("Camera failure.");
134 } 134 }
135 } else { 135 } else {
136 cameraFramesCount = 0; 136 cameraFramesCount = 0;
137 captureBuffersCount = 0; 137 captureBuffersCount = 0;
138 if (cameraThreadHandler != null) { 138 if (cameraThreadHandler != null) {
139 cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS); 139 cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
140 } 140 }
141 } 141 }
(...skipping 19 matching lines...) Expand all
161 161
162 // Switch camera to the next valid camera id. This can only be called while 162 // Switch camera to the next valid camera id. This can only be called while
163 // the camera is running. 163 // the camera is running.
164 // Returns true on success. False if the next camera does not support the 164 // Returns true on success. False if the next camera does not support the
165 // current resolution. 165 // current resolution.
166 public synchronized boolean switchCamera(final Runnable switchDoneEvent) { 166 public synchronized boolean switchCamera(final Runnable switchDoneEvent) {
167 if (Camera.getNumberOfCameras() < 2 ) 167 if (Camera.getNumberOfCameras() < 2 )
168 return false; 168 return false;
169 169
170 if (cameraThreadHandler == null) { 170 if (cameraThreadHandler == null) {
171 Log.e(TAG, "Calling switchCamera() for stopped camera."); 171 Logging.e(TAG, "Calling switchCamera() for stopped camera.");
172 return false; 172 return false;
173 } 173 }
174 if (pendingCameraSwitch) { 174 if (pendingCameraSwitch) {
175 // Do not handle multiple camera switch request to avoid blocking 175 // Do not handle multiple camera switch request to avoid blocking
176 // camera thread by handling too many switch request from a queue. 176 // camera thread by handling too many switch request from a queue.
177 Log.w(TAG, "Ignoring camera switch request."); 177 Logging.w(TAG, "Ignoring camera switch request.");
178 return false; 178 return false;
179 } 179 }
180 180
181 pendingCameraSwitch = true; 181 pendingCameraSwitch = true;
182 id = (id + 1) % Camera.getNumberOfCameras(); 182 id = (id + 1) % Camera.getNumberOfCameras();
183 cameraThreadHandler.post(new Runnable() { 183 cameraThreadHandler.post(new Runnable() {
184 @Override public void run() { 184 @Override public void run() {
185 switchCameraOnCameraThread(switchDoneEvent); 185 switchCameraOnCameraThread(switchDoneEvent);
186 } 186 }
187 }); 187 });
188 return true; 188 return true;
189 } 189 }
190 190
191 // Requests a new output format from the video capturer. Captured frames 191 // Requests a new output format from the video capturer. Captured frames
192 // by the camera will be scaled/or dropped by the video capturer. 192 // by the camera will be scaled/or dropped by the video capturer.
193 public synchronized void onOutputFormatRequest( 193 public synchronized void onOutputFormatRequest(
194 final int width, final int height, final int fps) { 194 final int width, final int height, final int fps) {
195 if (cameraThreadHandler == null) { 195 if (cameraThreadHandler == null) {
196 Log.e(TAG, "Calling onOutputFormatRequest() for already stopped camera."); 196 Logging.e(TAG, "Calling onOutputFormatRequest() for already stopped camera .");
197 return; 197 return;
198 } 198 }
199 cameraThreadHandler.post(new Runnable() { 199 cameraThreadHandler.post(new Runnable() {
200 @Override public void run() { 200 @Override public void run() {
201 onOutputFormatRequestOnCameraThread(width, height, fps); 201 onOutputFormatRequestOnCameraThread(width, height, fps);
202 } 202 }
203 }); 203 });
204 } 204 }
205 205
206 // Reconfigure the camera to capture in a new format. This should only be call ed while the camera 206 // Reconfigure the camera to capture in a new format. This should only be call ed while the camera
207 // is running. 207 // is running.
208 public synchronized void changeCaptureFormat( 208 public synchronized void changeCaptureFormat(
209 final int width, final int height, final int framerate) { 209 final int width, final int height, final int framerate) {
210 if (cameraThreadHandler == null) { 210 if (cameraThreadHandler == null) {
211 Log.e(TAG, "Calling changeCaptureFormat() for already stopped camera."); 211 Logging.e(TAG, "Calling changeCaptureFormat() for already stopped camera." );
212 return; 212 return;
213 } 213 }
214 cameraThreadHandler.post(new Runnable() { 214 cameraThreadHandler.post(new Runnable() {
215 @Override public void run() { 215 @Override public void run() {
216 startPreviewOnCameraThread(width, height, framerate); 216 startPreviewOnCameraThread(width, height, framerate);
217 } 217 }
218 }); 218 });
219 } 219 }
220 220
221 public synchronized List<CaptureFormat> getSupportedFormats() { 221 public synchronized List<CaptureFormat> getSupportedFormats() {
222 return CameraEnumerationAndroid.getSupportedFormats(id); 222 return CameraEnumerationAndroid.getSupportedFormats(id);
223 } 223 }
224 224
225 // Return a list of timestamps for the frames that have been sent out, but not returned yet. 225 // Return a list of timestamps for the frames that have been sent out, but not returned yet.
226 // Useful for logging and testing. 226 // Useful for logging and testing.
227 public String pendingFramesTimeStamps() { 227 public String pendingFramesTimeStamps() {
228 return videoBuffers.pendingFramesTimeStamps(); 228 return videoBuffers.pendingFramesTimeStamps();
229 } 229 }
230 230
231 private VideoCapturerAndroid() { 231 private VideoCapturerAndroid() {
232 Log.d(TAG, "VideoCapturerAndroid"); 232 Logging.d(TAG, "VideoCapturerAndroid");
233 } 233 }
234 234
235 // Called by native code. 235 // Called by native code.
236 // Initializes local variables for the camera named |deviceName|. If |deviceNa me| is empty, the 236 // Initializes local variables for the camera named |deviceName|. If |deviceNa me| is empty, the
237 // first available device is used in order to be compatible with the generic V ideoCapturer class. 237 // first available device is used in order to be compatible with the generic V ideoCapturer class.
238 synchronized boolean init(String deviceName) { 238 synchronized boolean init(String deviceName) {
239 Log.d(TAG, "init: " + deviceName); 239 Logging.d(TAG, "init: " + deviceName);
240 if (deviceName == null) 240 if (deviceName == null)
241 return false; 241 return false;
242 242
243 boolean foundDevice = false; 243 boolean foundDevice = false;
244 if (deviceName.isEmpty()) { 244 if (deviceName.isEmpty()) {
245 this.id = 0; 245 this.id = 0;
246 foundDevice = true; 246 foundDevice = true;
247 } else { 247 } else {
248 for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { 248 for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
249 String existing_device = CameraEnumerationAndroid.getDeviceName(i); 249 String existing_device = CameraEnumerationAndroid.getDeviceName(i);
(...skipping 24 matching lines...) Expand all
274 } 274 }
275 275
276 // Called by native code. Returns true if capturer is started. 276 // Called by native code. Returns true if capturer is started.
277 // 277 //
278 // Note that this actually opens the camera, and Camera callbacks run on the 278 // Note that this actually opens the camera, and Camera callbacks run on the
279 // thread that calls open(), so this is done on the CameraThread. Since the 279 // thread that calls open(), so this is done on the CameraThread. Since the
280 // API needs a synchronous success return value we wait for the result. 280 // API needs a synchronous success return value we wait for the result.
281 synchronized void startCapture( 281 synchronized void startCapture(
282 final int width, final int height, final int framerate, 282 final int width, final int height, final int framerate,
283 final Context applicationContext, final CapturerObserver frameObserver) { 283 final Context applicationContext, final CapturerObserver frameObserver) {
284 Log.d(TAG, "startCapture requested: " + width + "x" + height 284 Logging.d(TAG, "startCapture requested: " + width + "x" + height
285 + "@" + framerate); 285 + "@" + framerate);
286 if (applicationContext == null) { 286 if (applicationContext == null) {
287 throw new RuntimeException("applicationContext not set."); 287 throw new RuntimeException("applicationContext not set.");
288 } 288 }
289 if (frameObserver == null) { 289 if (frameObserver == null) {
290 throw new RuntimeException("frameObserver not set."); 290 throw new RuntimeException("frameObserver not set.");
291 } 291 }
292 if (cameraThreadHandler != null) { 292 if (cameraThreadHandler != null) {
293 throw new RuntimeException("Camera has already been started."); 293 throw new RuntimeException("Camera has already been started.");
294 } 294 }
(...skipping 10 matching lines...) Expand all
305 }); 305 });
306 } 306 }
307 307
308 private void startCaptureOnCameraThread( 308 private void startCaptureOnCameraThread(
309 int width, int height, int framerate, CapturerObserver frameObserver, 309 int width, int height, int framerate, CapturerObserver frameObserver,
310 Context applicationContext) { 310 Context applicationContext) {
311 Throwable error = null; 311 Throwable error = null;
312 this.applicationContext = applicationContext; 312 this.applicationContext = applicationContext;
313 this.frameObserver = frameObserver; 313 this.frameObserver = frameObserver;
314 try { 314 try {
315 Log.d(TAG, "Opening camera " + id); 315 Logging.d(TAG, "Opening camera " + id);
316 camera = Camera.open(id); 316 camera = Camera.open(id);
317 info = new Camera.CameraInfo(); 317 info = new Camera.CameraInfo();
318 Camera.getCameraInfo(id, info); 318 Camera.getCameraInfo(id, info);
319 // No local renderer (we only care about onPreviewFrame() buffers, not a 319 // No local renderer (we only care about onPreviewFrame() buffers, not a
320 // directly-displayed UI element). Camera won't capture without 320 // directly-displayed UI element). Camera won't capture without
321 // setPreview{Texture,Display}, so we create a SurfaceTexture and hand 321 // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
322 // it over to Camera, but never listen for frame-ready callbacks, 322 // it over to Camera, but never listen for frame-ready callbacks,
323 // and never call updateTexImage on it. 323 // and never call updateTexImage on it.
324 try { 324 try {
325 cameraGlTexture = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_O ES); 325 cameraGlTexture = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_O ES);
326 cameraSurfaceTexture = new SurfaceTexture(cameraGlTexture); 326 cameraSurfaceTexture = new SurfaceTexture(cameraGlTexture);
327 cameraSurfaceTexture.setOnFrameAvailableListener(null); 327 cameraSurfaceTexture.setOnFrameAvailableListener(null);
328 328
329 camera.setPreviewTexture(cameraSurfaceTexture); 329 camera.setPreviewTexture(cameraSurfaceTexture);
330 } catch (IOException e) { 330 } catch (IOException e) {
331 Log.e(TAG, "setPreviewTexture failed", error); 331 Logging.e(TAG, "setPreviewTexture failed", error);
332 throw new RuntimeException(e); 332 throw new RuntimeException(e);
333 } 333 }
334 334
335 Log.d(TAG, "Camera orientation: " + info.orientation + 335 Logging.d(TAG, "Camera orientation: " + info.orientation +
336 " .Device orientation: " + getDeviceOrientation()); 336 " .Device orientation: " + getDeviceOrientation());
337 camera.setErrorCallback(cameraErrorCallback); 337 camera.setErrorCallback(cameraErrorCallback);
338 startPreviewOnCameraThread(width, height, framerate); 338 startPreviewOnCameraThread(width, height, framerate);
339 frameObserver.OnCapturerStarted(true); 339 frameObserver.OnCapturerStarted(true);
340 340
341 // Start camera observer. 341 // Start camera observer.
342 cameraFramesCount = 0; 342 cameraFramesCount = 0;
343 captureBuffersCount = 0; 343 captureBuffersCount = 0;
344 cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS) ; 344 cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS) ;
345 return; 345 return;
346 } catch (RuntimeException e) { 346 } catch (RuntimeException e) {
347 error = e; 347 error = e;
348 } 348 }
349 Log.e(TAG, "startCapture failed", error); 349 Logging.e(TAG, "startCapture failed", error);
350 stopCaptureOnCameraThread(); 350 stopCaptureOnCameraThread();
351 cameraThreadHandler = null; 351 cameraThreadHandler = null;
352 frameObserver.OnCapturerStarted(false); 352 frameObserver.OnCapturerStarted(false);
353 if (errorHandler != null) { 353 if (errorHandler != null) {
354 errorHandler.onCameraError("Camera can not be started."); 354 errorHandler.onCameraError("Camera can not be started.");
355 } 355 }
356 return; 356 return;
357 } 357 }
358 358
359 // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|. 359 // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
360 private void startPreviewOnCameraThread(int width, int height, int framerate) { 360 private void startPreviewOnCameraThread(int width, int height, int framerate) {
361 Log.d( 361 Logging.d(
362 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + " @" + framerate); 362 TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + " @" + framerate);
363 if (camera == null) { 363 if (camera == null) {
364 Log.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); 364 Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
365 return; 365 return;
366 } 366 }
367 367
368 requestedWidth = width; 368 requestedWidth = width;
369 requestedHeight = height; 369 requestedHeight = height;
370 requestedFramerate = framerate; 370 requestedFramerate = framerate;
371 371
372 // Find closest supported format for |width| x |height| @ |framerate|. 372 // Find closest supported format for |width| x |height| @ |framerate|.
373 final Camera.Parameters parameters = camera.getParameters(); 373 final Camera.Parameters parameters = camera.getParameters();
374 final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, f ramerate * 1000); 374 final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, f ramerate * 1000);
375 final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupported Size( 375 final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupported Size(
376 parameters.getSupportedPreviewSizes(), width, height); 376 parameters.getSupportedPreviewSizes(), width, height);
377 final CaptureFormat captureFormat = new CaptureFormat( 377 final CaptureFormat captureFormat = new CaptureFormat(
378 previewSize.width, previewSize.height, 378 previewSize.width, previewSize.height,
379 range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 379 range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
380 range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 380 range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
381 381
382 // Check if we are already using this capture format, then we don't need to do anything. 382 // Check if we are already using this capture format, then we don't need to do anything.
383 if (captureFormat.equals(this.captureFormat)) { 383 if (captureFormat.equals(this.captureFormat)) {
384 return; 384 return;
385 } 385 }
386 386
387 // Update camera parameters. 387 // Update camera parameters.
388 Log.d(TAG, "isVideoStabilizationSupported: " + 388 Logging.d(TAG, "isVideoStabilizationSupported: " +
389 parameters.isVideoStabilizationSupported()); 389 parameters.isVideoStabilizationSupported());
390 if (parameters.isVideoStabilizationSupported()) { 390 if (parameters.isVideoStabilizationSupported()) {
391 parameters.setVideoStabilization(true); 391 parameters.setVideoStabilization(true);
392 } 392 }
393 // Note: setRecordingHint(true) actually decrease frame rate on N5. 393 // Note: setRecordingHint(true) actually decrease frame rate on N5.
394 // parameters.setRecordingHint(true); 394 // parameters.setRecordingHint(true);
395 if (captureFormat.maxFramerate > 0) { 395 if (captureFormat.maxFramerate > 0) {
396 parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.ma xFramerate); 396 parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.ma xFramerate);
397 } 397 }
398 parameters.setPreviewSize(captureFormat.width, captureFormat.height); 398 parameters.setPreviewSize(captureFormat.width, captureFormat.height);
399 parameters.setPreviewFormat(captureFormat.imageFormat); 399 parameters.setPreviewFormat(captureFormat.imageFormat);
400 // Picture size is for taking pictures and not for preview/video, but we nee d to set it anyway 400 // Picture size is for taking pictures and not for preview/video, but we nee d to set it anyway
401 // as a workaround for an aspect ratio problem on Nexus 7. 401 // as a workaround for an aspect ratio problem on Nexus 7.
402 final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupported Size( 402 final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupported Size(
403 parameters.getSupportedPictureSizes(), width, height); 403 parameters.getSupportedPictureSizes(), width, height);
404 parameters.setPictureSize(pictureSize.width, pictureSize.height); 404 parameters.setPictureSize(pictureSize.width, pictureSize.height);
405 405
406 // Temporarily stop preview if it's already running. 406 // Temporarily stop preview if it's already running.
407 if (this.captureFormat != null) { 407 if (this.captureFormat != null) {
408 camera.stopPreview(); 408 camera.stopPreview();
409 // Calling |setPreviewCallbackWithBuffer| with null should clear the inter nal camera buffer 409 // Calling |setPreviewCallbackWithBuffer| with null should clear the inter nal camera buffer
410 // queue, but sometimes we receive a frame with the old resolution after t his call anyway. 410 // queue, but sometimes we receive a frame with the old resolution after t his call anyway.
411 camera.setPreviewCallbackWithBuffer(null); 411 camera.setPreviewCallbackWithBuffer(null);
412 } 412 }
413 413
414 // (Re)start preview. 414 // (Re)start preview.
415 Log.d(TAG, "Start capturing: " + captureFormat); 415 Logging.d(TAG, "Start capturing: " + captureFormat);
416 this.captureFormat = captureFormat; 416 this.captureFormat = captureFormat;
417 camera.setParameters(parameters); 417 camera.setParameters(parameters);
418 videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); 418 videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
419 camera.setPreviewCallbackWithBuffer(this); 419 camera.setPreviewCallbackWithBuffer(this);
420 camera.startPreview(); 420 camera.startPreview();
421 } 421 }
422 422
423 // Called by native code. Returns true when camera is known to be stopped. 423 // Called by native code. Returns true when camera is known to be stopped.
424 synchronized void stopCapture() throws InterruptedException { 424 synchronized void stopCapture() throws InterruptedException {
425 if (cameraThreadHandler == null) { 425 if (cameraThreadHandler == null) {
426 Log.e(TAG, "Calling stopCapture() for already stopped camera."); 426 Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
427 return; 427 return;
428 } 428 }
429 Log.d(TAG, "stopCapture"); 429 Logging.d(TAG, "stopCapture");
430 cameraThreadHandler.post(new Runnable() { 430 cameraThreadHandler.post(new Runnable() {
431 @Override public void run() { 431 @Override public void run() {
432 stopCaptureOnCameraThread(); 432 stopCaptureOnCameraThread();
433 } 433 }
434 }); 434 });
435 cameraThread.join(); 435 cameraThread.join();
436 cameraThreadHandler = null; 436 cameraThreadHandler = null;
437 Log.d(TAG, "stopCapture done"); 437 Logging.d(TAG, "stopCapture done");
438 } 438 }
439 439
440 private void stopCaptureOnCameraThread() { 440 private void stopCaptureOnCameraThread() {
441 doStopCaptureOnCameraThread(); 441 doStopCaptureOnCameraThread();
442 Looper.myLooper().quit(); 442 Looper.myLooper().quit();
443 return; 443 return;
444 } 444 }
445 445
446 private void doStopCaptureOnCameraThread() { 446 private void doStopCaptureOnCameraThread() {
447 Log.d(TAG, "stopCaptureOnCameraThread"); 447 Logging.d(TAG, "stopCaptureOnCameraThread");
448 if (camera == null) { 448 if (camera == null) {
449 return; 449 return;
450 } 450 }
451 try { 451 try {
452 cameraThreadHandler.removeCallbacks(cameraObserver); 452 cameraThreadHandler.removeCallbacks(cameraObserver);
453 Log.d(TAG, "Stop preview."); 453 Logging.d(TAG, "Stop preview.");
454 camera.stopPreview(); 454 camera.stopPreview();
455 camera.setPreviewCallbackWithBuffer(null); 455 camera.setPreviewCallbackWithBuffer(null);
456 videoBuffers.stopReturnBuffersToCamera(); 456 videoBuffers.stopReturnBuffersToCamera();
457 captureFormat = null; 457 captureFormat = null;
458 458
459 camera.setPreviewTexture(null); 459 camera.setPreviewTexture(null);
460 cameraSurfaceTexture = null; 460 cameraSurfaceTexture = null;
461 if (cameraGlTexture != 0) { 461 if (cameraGlTexture != 0) {
462 GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0); 462 GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0);
463 cameraGlTexture = 0; 463 cameraGlTexture = 0;
464 } 464 }
465 Log.d(TAG, "Release camera."); 465 Logging.d(TAG, "Release camera.");
466 camera.release(); 466 camera.release();
467 camera = null; 467 camera = null;
468 } catch (IOException e) { 468 } catch (IOException e) {
469 Log.e(TAG, "Failed to stop camera", e); 469 Logging.e(TAG, "Failed to stop camera", e);
470 } 470 }
471 } 471 }
472 472
473 private void switchCameraOnCameraThread(Runnable switchDoneEvent) { 473 private void switchCameraOnCameraThread(Runnable switchDoneEvent) {
474 Log.d(TAG, "switchCameraOnCameraThread"); 474 Logging.d(TAG, "switchCameraOnCameraThread");
475 475
476 doStopCaptureOnCameraThread(); 476 doStopCaptureOnCameraThread();
477 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera te, frameObserver, 477 startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramera te, frameObserver,
478 applicationContext); 478 applicationContext);
479 pendingCameraSwitch = false; 479 pendingCameraSwitch = false;
480 Log.d(TAG, "switchCameraOnCameraThread done"); 480 Logging.d(TAG, "switchCameraOnCameraThread done");
481 if (switchDoneEvent != null) { 481 if (switchDoneEvent != null) {
482 switchDoneEvent.run(); 482 switchDoneEvent.run();
483 } 483 }
484 } 484 }
485 485
486 private void onOutputFormatRequestOnCameraThread( 486 private void onOutputFormatRequestOnCameraThread(
487 int width, int height, int fps) { 487 int width, int height, int fps) {
488 if (camera == null) { 488 if (camera == null) {
489 return; 489 return;
490 } 490 }
491 Log.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height + 491 Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + heigh t +
492 "@" + fps); 492 "@" + fps);
493 frameObserver.OnOutputFormatRequest(width, height, fps); 493 frameObserver.OnOutputFormatRequest(width, height, fps);
494 } 494 }
495 495
496 void returnBuffer(long timeStamp) { 496 void returnBuffer(long timeStamp) {
497 videoBuffers.returnBuffer(timeStamp); 497 videoBuffers.returnBuffer(timeStamp);
498 } 498 }
499 499
500 private int getDeviceOrientation() { 500 private int getDeviceOrientation() {
501 int orientation = 0; 501 int orientation = 0;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
543 } 543 }
544 rotation = (info.orientation + rotation) % 360; 544 rotation = (info.orientation + rotation) % 360;
545 // Mark the frame owning |data| as used. 545 // Mark the frame owning |data| as used.
546 // Note that since data is directBuffer, 546 // Note that since data is directBuffer,
547 // data.length >= videoBuffers.frameSize. 547 // data.length >= videoBuffers.frameSize.
548 if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { 548 if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
549 cameraFramesCount++; 549 cameraFramesCount++;
550 frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat. width, 550 frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat. width,
551 captureFormat.height, rotation, captureTimeNs); 551 captureFormat.height, rotation, captureTimeNs);
552 } else { 552 } else {
553 Log.w(TAG, "reserveByteBuffer failed - dropping frame."); 553 Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
554 } 554 }
555 } 555 }
556 556
557 // runCameraThreadUntilIdle make sure all posted messages to the cameraThread 557 // runCameraThreadUntilIdle make sure all posted messages to the cameraThread
558 // is processed before returning. It does that by itself posting a message to 558 // is processed before returning. It does that by itself posting a message to
559 // to the message queue and waits until is has been processed. 559 // to the message queue and waits until is has been processed.
560 // It is used in tests. 560 // It is used in tests.
561 void runCameraThreadUntilIdle() { 561 void runCameraThreadUntilIdle() {
562 if (cameraThreadHandler == null) 562 if (cameraThreadHandler == null)
563 return; 563 return;
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 synchronized void queueCameraBuffers(int frameSize, Camera camera) { 606 synchronized void queueCameraBuffers(int frameSize, Camera camera) {
607 this.camera = camera; 607 this.camera = camera;
608 this.frameSize = frameSize; 608 this.frameSize = frameSize;
609 609
610 queuedBuffers.clear(); 610 queuedBuffers.clear();
611 for (int i = 0; i < numCaptureBuffers; ++i) { 611 for (int i = 0; i < numCaptureBuffers; ++i) {
612 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); 612 final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
613 camera.addCallbackBuffer(buffer.array()); 613 camera.addCallbackBuffer(buffer.array());
614 queuedBuffers.put(buffer.array(), buffer); 614 queuedBuffers.put(buffer.array(), buffer);
615 } 615 }
616 Log.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers 616 Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers
617 + " buffers of size " + frameSize + "."); 617 + " buffers of size " + frameSize + ".");
618 } 618 }
619 619
620 synchronized String pendingFramesTimeStamps() { 620 synchronized String pendingFramesTimeStamps() {
621 List<Long> timeStampsMs = new ArrayList<Long>(); 621 List<Long> timeStampsMs = new ArrayList<Long>();
622 for (Long timeStampNs : pendingBuffers.keySet()) { 622 for (Long timeStampNs : pendingBuffers.keySet()) {
623 timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs)); 623 timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs));
624 } 624 }
625 return timeStampsMs.toString(); 625 return timeStampsMs.toString();
626 } 626 }
627 627
628 synchronized void stopReturnBuffersToCamera() { 628 synchronized void stopReturnBuffersToCamera() {
629 this.camera = null; 629 this.camera = null;
630 queuedBuffers.clear(); 630 queuedBuffers.clear();
631 // Frames in |pendingBuffers| need to be kept alive until they are returne d. 631 // Frames in |pendingBuffers| need to be kept alive until they are returne d.
632 Log.d(TAG, "stopReturnBuffersToCamera called." 632 Logging.d(TAG, "stopReturnBuffersToCamera called."
633 + (pendingBuffers.isEmpty() ? 633 + (pendingBuffers.isEmpty() ?
634 " All buffers have been returned." 634 " All buffers have been returned."
635 : " Pending buffers: " + pendingFramesTimeStamps() + ".")); 635 : " Pending buffers: " + pendingFramesTimeStamps() + "."));
636 } 636 }
637 637
638 synchronized boolean reserveByteBuffer(byte[] data, long timeStamp) { 638 synchronized boolean reserveByteBuffer(byte[] data, long timeStamp) {
639 final ByteBuffer buffer = queuedBuffers.remove(data); 639 final ByteBuffer buffer = queuedBuffers.remove(data);
640 if (buffer == null) { 640 if (buffer == null) {
641 // Frames might be posted to |onPreviewFrame| with the previous format w hile changing 641 // Frames might be posted to |onPreviewFrame| with the previous format w hile changing
642 // capture format in |startPreviewOnCameraThread|. Drop these old frames . 642 // capture format in |startPreviewOnCameraThread|. Drop these old frames .
643 Log.w(TAG, "Received callback buffer from previous configuration with le ngth: " 643 Logging.w(TAG, "Received callback buffer from previous configuration wit h length: "
644 + (data == null ? "null" : data.length)); 644 + (data == null ? "null" : data.length));
645 return false; 645 return false;
646 } 646 }
647 if (buffer.capacity() != frameSize) { 647 if (buffer.capacity() != frameSize) {
648 throw new IllegalStateException("Callback buffer has unexpected frame si ze"); 648 throw new IllegalStateException("Callback buffer has unexpected frame si ze");
649 } 649 }
650 if (pendingBuffers.containsKey(timeStamp)) { 650 if (pendingBuffers.containsKey(timeStamp)) {
651 Log.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); 651 Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
652 return false; 652 return false;
653 } 653 }
654 pendingBuffers.put(timeStamp, buffer); 654 pendingBuffers.put(timeStamp, buffer);
655 if (queuedBuffers.isEmpty()) { 655 if (queuedBuffers.isEmpty()) {
656 Log.v(TAG, "Camera is running out of capture buffers." 656 Logging.v(TAG, "Camera is running out of capture buffers."
657 + " Pending buffers: " + pendingFramesTimeStamps()); 657 + " Pending buffers: " + pendingFramesTimeStamps());
658 } 658 }
659 return true; 659 return true;
660 } 660 }
661 661
662 synchronized void returnBuffer(long timeStamp) { 662 synchronized void returnBuffer(long timeStamp) {
663 final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp); 663 final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
664 if (returnedFrame == null) { 664 if (returnedFrame == null) {
665 throw new RuntimeException("unknown data buffer with time stamp " 665 throw new RuntimeException("unknown data buffer with time stamp "
666 + timeStamp + "returned?!?"); 666 + timeStamp + "returned?!?");
667 } 667 }
668 668
669 if (camera != null && returnedFrame.capacity() == frameSize) { 669 if (camera != null && returnedFrame.capacity() == frameSize) {
670 camera.addCallbackBuffer(returnedFrame.array()); 670 camera.addCallbackBuffer(returnedFrame.array());
671 if (queuedBuffers.isEmpty()) { 671 if (queuedBuffers.isEmpty()) {
672 Log.v(TAG, "Frame returned when camera is running out of capture" 672 Logging.v(TAG, "Frame returned when camera is running out of capture"
673 + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); 673 + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
674 } 674 }
675 queuedBuffers.put(returnedFrame.array(), returnedFrame); 675 queuedBuffers.put(returnedFrame.array(), returnedFrame);
676 return; 676 return;
677 } 677 }
678 678
679 if (returnedFrame.capacity() != frameSize) { 679 if (returnedFrame.capacity() != frameSize) {
680 Log.d(TAG, "returnBuffer with time stamp " 680 Logging.d(TAG, "returnBuffer with time stamp "
681 + TimeUnit.NANOSECONDS.toMillis(timeStamp) 681 + TimeUnit.NANOSECONDS.toMillis(timeStamp)
682 + " called with old frame size, " + returnedFrame.capacity() + "."); 682 + " called with old frame size, " + returnedFrame.capacity() + ".");
683 // Since this frame has the wrong size, don't requeue it. Frames with th e correct size are 683 // Since this frame has the wrong size, don't requeue it. Frames with th e correct size are
684 // created in queueCameraBuffers so this must be an old buffer. 684 // created in queueCameraBuffers so this must be an old buffer.
685 return; 685 return;
686 } 686 }
687 687
688 Log.d(TAG, "returnBuffer with time stamp " 688 Logging.d(TAG, "returnBuffer with time stamp "
689 + TimeUnit.NANOSECONDS.toMillis(timeStamp) 689 + TimeUnit.NANOSECONDS.toMillis(timeStamp)
690 + " called after camera has been stopped."); 690 + " called after camera has been stopped.");
691 } 691 }
692 } 692 }
693 693
694 // Interface used for providing callbacks to an observer. 694 // Interface used for providing callbacks to an observer.
695 interface CapturerObserver { 695 interface CapturerObserver {
696 // Notify if the camera have been started successfully or not. 696 // Notify if the camera have been started successfully or not.
697 // Called on a Java thread owned by VideoCapturerAndroid. 697 // Called on a Java thread owned by VideoCapturerAndroid.
698 abstract void OnCapturerStarted(boolean success); 698 abstract void OnCapturerStarted(boolean success);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
734 } 734 }
735 735
736 private native void nativeCapturerStarted(long nativeCapturer, 736 private native void nativeCapturerStarted(long nativeCapturer,
737 boolean success); 737 boolean success);
738 private native void nativeOnFrameCaptured(long nativeCapturer, 738 private native void nativeOnFrameCaptured(long nativeCapturer,
739 byte[] data, int length, int width, int height, int rotation, long timeS tamp); 739 byte[] data, int length, int width, int height, int rotation, long timeS tamp);
740 private native void nativeOnOutputFormatRequest(long nativeCapturer, 740 private native void nativeOnOutputFormatRequest(long nativeCapturer,
741 int width, int height, int fps); 741 int width, int height, int fps);
742 } 742 }
743 } 743 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698