Index: talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java |
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java |
index d85f2c5588c879b32aed083a046700f13b29968c..a02e8b6886545f313cb2a09607cd5a3d7115c1b0 100644 |
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java |
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java |
@@ -77,7 +77,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
private final Object cameraIdLock = new Object(); |
private int id; |
private android.hardware.Camera.CameraInfo info; |
- private final FramePool videoBuffers; |
private final CameraStatistics cameraStatistics; |
// Remember the requested format in case we want to switch cameras. |
private int requestedWidth; |
@@ -90,8 +89,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
private CapturerObserver frameObserver = null; |
private final CameraEventsHandler eventsHandler; |
private boolean firstFrameReported; |
+ // Arbitrary queue depth. Higher number means more memory allocated & held, |
+ // lower number means more sensitivity to processing time in the client (and |
+ // potentially stalling the capturer if it runs out of buffers to write to). |
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3; |
private final boolean isCapturingToTexture; |
- private final SurfaceTextureHelper surfaceHelper; |
+ final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes. |
// The camera API can output one old frame after the camera has been switched or the resolution |
// has been changed. This flag is used for dropping the first frame after camera restart. |
private boolean dropNextFrame = false; |
@@ -129,18 +132,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) |
/ CAMERA_OBSERVER_PERIOD_MS; |
- Logging.d(TAG, "Camera fps: " + cameraFps + |
- ". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps()); |
+ Logging.d(TAG, "Camera fps: " + cameraFps +"."); |
if (cameraFramesCount == 0) { |
++freezePeriodCount; |
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS |
&& eventsHandler != null) { |
Logging.e(TAG, "Camera freezed."); |
- if (cameraStatistics.pendingFramesCount() == cameraStatistics.maxPendingFrames) { |
magjed_webrtc
2015/12/17 08:02:42
Why not keep this distinction? The client can stil
perkj_webrtc
2015/12/17 12:42:44
Done.
|
- eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers."); |
- } else { |
- eventsHandler.onCameraFreezed("Camera failure."); |
- } |
+ eventsHandler.onCameraFreezed("Camera failure."); |
return; |
} |
} else { |
@@ -153,27 +151,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
private static class CameraStatistics { |
private int frameCount = 0; |
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); |
- private final Set<Long> timeStampsNs = new HashSet<Long>(); |
- public final int maxPendingFrames; |
- CameraStatistics(int maxPendingFrames) { |
- this.maxPendingFrames = maxPendingFrames; |
+ CameraStatistics() { |
threadChecker.detachThread(); |
} |
- public void addPendingFrame(long timestamp) { |
+ public void addFrame() { |
threadChecker.checkIsOnValidThread(); |
++frameCount; |
- timeStampsNs.add(timestamp); |
- } |
- |
- public void frameReturned(long timestamp) { |
- threadChecker.checkIsOnValidThread(); |
- if (!timeStampsNs.contains(timestamp)) { |
- throw new IllegalStateException( |
- "CameraStatistics.frameReturned called with unknown timestamp " + timestamp); |
- } |
- timeStampsNs.remove(timestamp); |
} |
public int getAndResetFrameCount() { |
@@ -182,21 +167,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
frameCount = 0; |
return count; |
} |
- |
- // Return number of pending frames that have not been returned. |
- public int pendingFramesCount() { |
- threadChecker.checkIsOnValidThread(); |
- return timeStampsNs.size(); |
- } |
- |
- public String pendingFramesTimeStamps() { |
- threadChecker.checkIsOnValidThread(); |
- List<Long> timeStampsMs = new ArrayList<Long>(); |
- for (long ts : timeStampsNs) { |
- timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts)); |
- } |
- return timeStampsMs.toString(); |
- } |
} |
public static interface CameraEventsHandler { |
@@ -350,20 +320,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler, |
EglBase.Context sharedContext) { |
- Logging.d(TAG, "VideoCapturerAndroid"); |
this.id = cameraId; |
this.eventsHandler = eventsHandler; |
cameraThread = new HandlerThread(TAG); |
cameraThread.start(); |
cameraThreadHandler = new Handler(cameraThread.getLooper()); |
- videoBuffers = new FramePool(cameraThread); |
isCapturingToTexture = (sharedContext != null); |
- cameraStatistics = |
- new CameraStatistics(isCapturingToTexture ? 1 : videoBuffers.numCaptureBuffers); |
+ cameraStatistics = new CameraStatistics(); |
surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler); |
if (isCapturingToTexture) { |
surfaceHelper.setListener(this); |
} |
+ Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture); |
} |
private void checkIsOnCameraThread() { |
@@ -403,9 +371,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
if (camera != null) { |
throw new IllegalStateException("Release called while camera is running"); |
} |
- if (cameraStatistics.pendingFramesCount() != 0) { |
- throw new IllegalStateException("Release called with pending frames left"); |
- } |
} |
}); |
surfaceHelper.disconnect(cameraThreadHandler); |
@@ -582,7 +547,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
camera.setParameters(parameters); |
if (!isCapturingToTexture) { |
- videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); |
+ final int frameSize = captureFormat.frameSize(); |
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) { |
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); |
+ camera.addCallbackBuffer(buffer.array()); |
+ } |
camera.setPreviewCallbackWithBuffer(this); |
} |
camera.startPreview(); |
@@ -619,13 +588,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
Logging.d(TAG, "Stop preview."); |
camera.stopPreview(); |
camera.setPreviewCallbackWithBuffer(null); |
- if (!isCapturingToTexture()) { |
- videoBuffers.stopReturnBuffersToCamera(); |
- Logging.d(TAG, "stopReturnBuffersToCamera called." |
- + (cameraStatistics.pendingFramesCount() == 0? |
- " All buffers have been returned." |
- : " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + ".")); |
- } |
captureFormat = null; |
Logging.d(TAG, "Release camera."); |
@@ -665,19 +627,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
return cameraThreadHandler; |
} |
- public void returnBuffer(final long timeStamp) { |
- cameraThreadHandler.post(new Runnable() { |
- @Override public void run() { |
- cameraStatistics.frameReturned(timeStamp); |
- if (isCapturingToTexture) { |
- surfaceHelper.returnTextureFrame(); |
- } else { |
- videoBuffers.returnBuffer(timeStamp); |
- } |
- } |
- }); |
- } |
- |
private int getDeviceOrientation() { |
int orientation = 0; |
@@ -728,16 +677,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
firstFrameReported = true; |
} |
- // Mark the frame owning |data| as used. |
- // Note that since data is directBuffer, |
- // data.length >= videoBuffers.frameSize. |
- if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { |
- cameraStatistics.addPendingFrame(captureTimeNs); |
- frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width, |
- captureFormat.height, getFrameOrientation(), captureTimeNs); |
- } else { |
- Logging.w(TAG, "reserveByteBuffer failed - dropping frame."); |
- } |
+ cameraStatistics.addFrame(); |
+ // Note that since data is directBuffer, data.length >= captureFormat.frameSize. |
magjed_webrtc
2015/12/17 08:02:42
I think there is a high risk of getting a frame wi
|
+ frameObserver.onByteBufferFrameCaptured(data, captureFormat.frameSize(), captureFormat.width, |
+ captureFormat.height, getFrameOrientation(), captureTimeNs); |
+ camera.addCallbackBuffer(data); |
} |
@Override |
@@ -762,121 +706,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements |
transformMatrix = |
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix()); |
} |
- cameraStatistics.addPendingFrame(timestampNs); |
- |
+ cameraStatistics.addFrame(); |
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId, |
transformMatrix, rotation, timestampNs); |
} |
- // Class used for allocating and bookkeeping video frames. All buffers are |
- // direct allocated so that they can be directly used from native code. This class is |
- // not thread-safe, and enforces single thread use. |
- private static class FramePool { |
- // Thread that all calls should be made on. |
- private final Thread thread; |
- // Arbitrary queue depth. Higher number means more memory allocated & held, |
- // lower number means more sensitivity to processing time in the client (and |
- // potentially stalling the capturer if it runs out of buffers to write to). |
- public static final int numCaptureBuffers = 3; |
- // This container tracks the buffers added as camera callback buffers. It is needed for finding |
- // the corresponding ByteBuffer given a byte[]. |
- private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>(); |
- // This container tracks the frames that have been sent but not returned. It is needed for |
- // keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp. |
- private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>(); |
- private int frameSize = 0; |
- private android.hardware.Camera camera; |
- |
- public FramePool(Thread thread) { |
- this.thread = thread; |
- } |
- |
- private void checkIsOnValidThread() { |
- if (Thread.currentThread() != thread) { |
- throw new IllegalStateException("Wrong thread"); |
- } |
- } |
- |
- // Discards previous queued buffers and adds new callback buffers to camera. |
- public void queueCameraBuffers(int frameSize, android.hardware.Camera camera) { |
- checkIsOnValidThread(); |
- this.camera = camera; |
- this.frameSize = frameSize; |
- |
- queuedBuffers.clear(); |
- for (int i = 0; i < numCaptureBuffers; ++i) { |
- final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); |
- camera.addCallbackBuffer(buffer.array()); |
- queuedBuffers.put(buffer.array(), buffer); |
- } |
- Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers |
- + " buffers of size " + frameSize + "."); |
- } |
- |
- public void stopReturnBuffersToCamera() { |
- checkIsOnValidThread(); |
- this.camera = null; |
- queuedBuffers.clear(); |
- // Frames in |pendingBuffers| need to be kept alive until they are returned. |
- } |
- |
- public boolean reserveByteBuffer(byte[] data, long timeStamp) { |
- checkIsOnValidThread(); |
- final ByteBuffer buffer = queuedBuffers.remove(data); |
- if (buffer == null) { |
- // Frames might be posted to |onPreviewFrame| with the previous format while changing |
- // capture format in |startPreviewOnCameraThread|. Drop these old frames. |
- Logging.w(TAG, "Received callback buffer from previous configuration with length: " |
- + (data == null ? "null" : data.length)); |
- return false; |
- } |
- if (buffer.capacity() != frameSize) { |
- throw new IllegalStateException("Callback buffer has unexpected frame size"); |
- } |
- if (pendingBuffers.containsKey(timeStamp)) { |
- Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); |
- return false; |
- } |
- pendingBuffers.put(timeStamp, buffer); |
- if (queuedBuffers.isEmpty()) { |
- Logging.d(TAG, "Camera is running out of capture buffers."); |
- } |
- return true; |
- } |
- |
- public void returnBuffer(long timeStamp) { |
- checkIsOnValidThread(); |
- final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp); |
- if (returnedFrame == null) { |
- throw new RuntimeException("unknown data buffer with time stamp " |
- + timeStamp + "returned?!?"); |
- } |
- |
- if (camera != null && returnedFrame.capacity() == frameSize) { |
- camera.addCallbackBuffer(returnedFrame.array()); |
- if (queuedBuffers.isEmpty()) { |
- Logging.d(TAG, "Frame returned when camera is running out of capture" |
- + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); |
- } |
- queuedBuffers.put(returnedFrame.array(), returnedFrame); |
- return; |
- } |
- |
- if (returnedFrame.capacity() != frameSize) { |
- Logging.d(TAG, "returnBuffer with time stamp " |
- + TimeUnit.NANOSECONDS.toMillis(timeStamp) |
- + " called with old frame size, " + returnedFrame.capacity() + "."); |
- // Since this frame has the wrong size, don't requeue it. Frames with the correct size are |
- // created in queueCameraBuffers so this must be an old buffer. |
- return; |
- } |
- |
- Logging.d(TAG, "returnBuffer with time stamp " |
- + TimeUnit.NANOSECONDS.toMillis(timeStamp) |
- + " called after camera has been stopped."); |
- } |
- } |
- |
// Interface used for providing callbacks to an observer. |
interface CapturerObserver { |
// Notify if the camera have been started successfully or not. |