Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(743)

Unified Diff: webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java

Issue 2024843002: Refactor VideoCapturerAndroid tests in WebRTC. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Reorder imports to match Java style guide Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
similarity index 31%
rename from webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
rename to webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
index 4c96b8cff071fb54bdd666a9b769ba2463e97db1..30119e5a1f87d60142fc759395e6263c778beaa2 100644
--- a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -9,21 +9,21 @@
*/
package org.webrtc;
-import android.content.Context;
+import static junit.framework.Assert.*;
-import org.webrtc.VideoCapturerAndroidTestFixtures;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
+import android.content.Context;
+
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import static junit.framework.Assert.*;
+class CameraVideoCapturerTestFixtures {
+ static final String TAG = "CameraVideoCapturerTestFixtures";
-@SuppressWarnings("deprecation")
-public class VideoCapturerAndroidTestFixtures {
- static class RendererCallbacks implements VideoRenderer.Callbacks {
+ static private class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
private int width = 0;
@@ -52,7 +52,8 @@ public class VideoCapturerAndroidTestFixtures {
}
}
- public int WaitForNextFrameToRender() throws InterruptedException {
+ public int waitForNextFrameToRender() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next frame to render");
synchronized (frameLock) {
frameLock.wait();
return framesRendered;
@@ -60,7 +61,7 @@ public class VideoCapturerAndroidTestFixtures {
}
}
- static class FakeAsyncRenderer implements VideoRenderer.Callbacks {
+ static private class FakeAsyncRenderer implements VideoRenderer.Callbacks {
private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
@Override
@@ -73,6 +74,7 @@ public class VideoCapturerAndroidTestFixtures {
// Wait until at least one frame have been received, before returning them.
public List<I420Frame> waitForPendingFrames() throws InterruptedException {
+ Logging.d(TAG, "Waiting for pending frames");
synchronized (pendingFrames) {
while (pendingFrames.isEmpty()) {
pendingFrames.wait();
@@ -82,21 +84,23 @@ public class VideoCapturerAndroidTestFixtures {
}
}
- static class FakeCapturerObserver implements VideoCapturer.CapturerObserver {
+ static private class FakeCapturerObserver implements CameraVideoCapturer.CapturerObserver {
private int framesCaptured = 0;
private int frameSize = 0;
private int frameWidth = 0;
private int frameHeight = 0;
- private Object frameLock = 0;
- private Object capturerStartLock = 0;
- private boolean captureStartResult = false;
- private List<Long> timestamps = new ArrayList<Long>();
+ final private Object frameLock = new Object();
+ final private Object capturerStartLock = new Object();
+ private boolean capturerStartResult = false;
+ final private List<Long> timestamps = new ArrayList<Long>();
@Override
public void onCapturerStarted(boolean success) {
+ Logging.d(TAG, "onCapturerStarted: " + success);
+
synchronized (capturerStartLock) {
- captureStartResult = success;
- capturerStartLock.notify();
+ capturerStartResult = success;
+ capturerStartLock.notifyAll();
}
}
@@ -129,14 +133,16 @@ public class VideoCapturerAndroidTestFixtures {
@Override
public void onOutputFormatRequest(int width, int height, int fps) {}
- public boolean WaitForCapturerToStart() throws InterruptedException {
+ public boolean waitForCapturerToStart() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the capturer to start");
synchronized (capturerStartLock) {
capturerStartLock.wait();
- return captureStartResult;
+ return capturerStartResult;
}
}
- public int WaitForNextCapturedFrame() throws InterruptedException {
+ public int waitForNextCapturedFrame() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next captured frame");
synchronized (frameLock) {
frameLock.wait();
return framesCaptured;
@@ -171,7 +177,7 @@ public class VideoCapturerAndroidTestFixtures {
}
static class CameraEvents implements
- VideoCapturerAndroid.CameraEventsHandler {
+ CameraVideoCapturer.CameraEventsHandler {
public boolean onCameraOpeningCalled;
public boolean onFirstFrameAvailableCalled;
public final Object onCameraFreezedLock = new Object();
@@ -202,7 +208,8 @@ public class VideoCapturerAndroidTestFixtures {
@Override
public void onCameraClosed() { }
- public String WaitForCameraFreezed() throws InterruptedException {
+ public String waitForCameraFreezed() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the camera to freeze");
synchronized (onCameraFreezedLock) {
onCameraFreezedLock.wait();
return onCameraFreezedDescription;
@@ -210,45 +217,193 @@ public class VideoCapturerAndroidTestFixtures {
}
}
- static public CameraEvents createCameraEvents() {
- return new CameraEvents();
+ /**
+ * Class to collect all classes related to single capturer instance.
+ */
+ static private class CapturerInstance {
+ public CameraVideoCapturer capturer;
+ public CameraEvents cameraEvents;
+ public SurfaceTextureHelper surfaceTextureHelper;
+ public FakeCapturerObserver observer;
+ public List<CaptureFormat> supportedFormats;
+ public CaptureFormat format;
}
- // Return true if the device under test have at least two cameras.
- @SuppressWarnings("deprecation")
- static public boolean HaveTwoCameras() {
- return (android.hardware.Camera.getNumberOfCameras() >= 2);
+ /**
+ * Class used for collecting a VideoSource, a VideoTrack and a renderer. The class
+ * is used for testing local rendering from a capturer.
+ */
+ static private class VideoTrackWithRenderer {
+ public VideoSource source;
+ public VideoTrack track;
+ public RendererCallbacks rendererCallbacks;
+ public FakeAsyncRenderer fakeAsyncRenderer;
}
- static public void release(VideoCapturerAndroid capturer) {
- assertNotNull(capturer);
- capturer.dispose();
+ public interface TestObjectFactory {
+ CameraVideoCapturer createCapturer(
+ String name, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+ String getNameOfFrontFacingDevice();
+ String getNameOfBackFacingDevice();
+ boolean haveTwoCameras();
+ boolean isCapturingToTexture();
+ Context getAppContext();
+
+ // CameraVideoCapturer API is too slow for some of our tests where we need to open a competing
+ // camera. These methods are used instead.
+ Object rawOpenCamera(String cameraName);
+ void rawCloseCamera(Object camera);
}
- static public void startCapturerAndRender(VideoCapturerAndroid capturer)
- throws InterruptedException {
- PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- VideoSource source =
- factory.createVideoSource(capturer, new MediaConstraints());
- VideoTrack track = factory.createVideoTrack("dummy", source);
- RendererCallbacks callbacks = new RendererCallbacks();
- track.addRenderer(new VideoRenderer(callbacks));
- assertTrue(callbacks.WaitForNextFrameToRender() > 0);
- track.dispose();
- source.dispose();
- factory.dispose();
- }
-
- static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException {
- PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- VideoSource source =
- factory.createVideoSource(capturer, new MediaConstraints());
- VideoTrack track = factory.createVideoTrack("dummy", source);
+ private PeerConnectionFactory peerConnectionFactory;
+ private TestObjectFactory testObjectFactory;
+
+ CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
+ PeerConnectionFactory.initializeAndroidGlobals(
+ testObjectFactory.getAppContext(), true, true, true);
+
+ this.peerConnectionFactory = new PeerConnectionFactory(null /* options */);
+ this.testObjectFactory = testObjectFactory;
+ }
+
+ public void dispose() {
+ this.peerConnectionFactory.dispose();
+ }
+
+ // Internal helper methods
+ private CapturerInstance createCapturer(String name) {
+ CapturerInstance instance = new CapturerInstance();
+ instance.cameraEvents = new CameraEvents();
+ instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
+ instance.surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
+ instance.observer = new FakeCapturerObserver();
+ instance.supportedFormats = instance.capturer.getSupportedFormats();
+ return instance;
+ }
+
+ private CapturerInstance createCapturer() {
+ return createCapturer("");
+ }
+
+ private void startCapture(CapturerInstance instance) {
+ startCapture(instance, 0);
+ }
+
+ private void startCapture(CapturerInstance instance, int formatIndex) {
+ final CameraEnumerationAndroid.CaptureFormat format =
+ instance.supportedFormats.get(formatIndex);
+
+ instance.capturer.startCapture(format.width, format.height, format.framerate.max,
+ instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
+ instance.format = format;
+ }
+
+ private void disposeCapturer(CapturerInstance instance) {
+ instance.capturer.dispose();
+ instance.surfaceTextureHelper.returnTextureFrame();
+ instance.surfaceTextureHelper.dispose();
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer,
+ VideoRenderer.Callbacks rendererCallbacks) {
+ VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
+ videoTrackWithRenderer.source =
+ peerConnectionFactory.createVideoSource(capturer, new MediaConstraints());
+ videoTrackWithRenderer.track =
+ peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
+ videoTrackWithRenderer.track.addRenderer(new VideoRenderer(rendererCallbacks));
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) {
+ RendererCallbacks rendererCallbacks = new RendererCallbacks();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, rendererCallbacks);
+ videoTrackWithRenderer.rendererCallbacks = rendererCallbacks;
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer(
+ CameraVideoCapturer capturer) {
+ FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, fakeAsyncRenderer);
+ videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer;
+ return videoTrackWithRenderer;
+ }
+
+ private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) {
+ videoTrackWithRenderer.track.dispose();
+ videoTrackWithRenderer.source.dispose();
+ }
+
+ private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override public void run() {
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ }
+
+ private void createCapturerAndRender(String name) throws InterruptedException {
+ if (name == null) {
+ Logging.w(TAG, "Skipping video capturer test because device name is null.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer(name);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ disposeCapturer(capturerInstance);
+ }
+
+ // Test methods
+ public void createCapturerAndDispose() {
+ disposeCapturer(createCapturer());
+ }
+
+ public void createNonExistingCamera() {
+ try {
+ disposeCapturer(createCapturer("non-existing camera"));
+ } catch (IllegalArgumentException e) {
+ return;
+ }
+
+ fail("Expected illegal argument exception when creating non-existing camera.");
+ }
+
+ public void createCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender("");
+ }
+
+ public void createFrontFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice());
+ }
+
+ public void createBackFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice());
+ }
+
+ public void switchCamera() throws InterruptedException {
+ if (!testObjectFactory.haveTwoCameras()) {
+ Logging.w(TAG,
+ "Skipping test switch video capturer because the device doesn't have two cameras.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer();
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
// Array with one element to avoid final problem in nested classes.
final boolean[] cameraSwitchSuccessful = new boolean[1];
final CountDownLatch barrier = new CountDownLatch(1);
- capturer.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() {
+ capturerInstance.capturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(boolean isFrontCamera) {
cameraSwitchSuccessful[0] = true;
@@ -264,265 +419,134 @@ public class VideoCapturerAndroidTestFixtures {
barrier.await();
// Check result.
- if (HaveTwoCameras()) {
- assertTrue(cameraSwitchSuccessful[0]);
- } else {
- assertFalse(cameraSwitchSuccessful[0]);
- }
+ assertTrue(cameraSwitchSuccessful[0]);
// Ensure that frames are received.
- RendererCallbacks callbacks = new RendererCallbacks();
- track.addRenderer(new VideoRenderer(callbacks));
- assertTrue(callbacks.WaitForNextFrameToRender() > 0);
- track.dispose();
- source.dispose();
- factory.dispose();
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ disposeCapturer(capturerInstance);
}
- static public void cameraEventsInvoked(VideoCapturerAndroid capturer, CameraEvents events,
- Context appContext) throws InterruptedException {
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
-
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final FakeCapturerObserver observer = new FakeCapturerObserver();
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
+ public void cameraEventsInvoked() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ startCapture(capturerInstance);
// Make sure camera is started and first frame is received and then stop it.
- assertTrue(observer.WaitForCapturerToStart());
- observer.WaitForNextCapturedFrame();
- capturer.stopCapture();
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
- }
- release(capturer);
- surfaceTextureHelper.dispose();
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
- assertTrue(events.onCameraOpeningCalled);
- assertTrue(events.onFirstFrameAvailableCalled);
+ assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled);
+ assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled);
}
- static public void cameraCallsAfterStop(
- VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
-
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final FakeCapturerObserver observer = new FakeCapturerObserver();
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
+ public void cameraCallsAfterStop() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ startCapture(capturerInstance);
// Make sure camera is started and then stop it.
- assertTrue(observer.WaitForCapturerToStart());
- capturer.stopCapture();
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
- }
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.surfaceTextureHelper.returnTextureFrame();
// We can't change |capturer| at this point, but we should not crash.
- capturer.switchCamera(null);
- capturer.onOutputFormatRequest(640, 480, 15);
- capturer.changeCaptureFormat(640, 480, 15);
+ capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
+ capturerInstance.capturer.onOutputFormatRequest(640, 480, 15);
+ capturerInstance.capturer.changeCaptureFormat(640, 480, 15);
- release(capturer);
- surfaceTextureHelper.dispose();
+ disposeCapturer(capturerInstance);
}
- static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
- throws InterruptedException {
- PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- VideoSource source =
- factory.createVideoSource(capturer, new MediaConstraints());
- VideoTrack track = factory.createVideoTrack("dummy", source);
- RendererCallbacks callbacks = new RendererCallbacks();
- track.addRenderer(new VideoRenderer(callbacks));
- assertTrue(callbacks.WaitForNextFrameToRender() > 0);
- assertEquals(MediaSource.State.LIVE, source.state());
-
- source.stop();
- assertEquals(MediaSource.State.ENDED, source.state());
-
- source.restart();
- assertTrue(callbacks.WaitForNextFrameToRender() > 0);
- assertEquals(MediaSource.State.LIVE, source.state());
- track.dispose();
- source.dispose();
- factory.dispose();
- }
-
- static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
- Context appContext) throws InterruptedException {
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- FakeCapturerObserver observer = new FakeCapturerObserver();
- List<CaptureFormat> formats = capturer.getSupportedFormats();
+ public void stopRestartVideoSource() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ videoTrackWithRenderer.source.stop();
+ assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state());
+
+ videoTrackWithRenderer.source.restart();
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startStopWithDifferentResolutions() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
for(int i = 0; i < 3 ; ++i) {
- CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
- assertTrue(observer.WaitForCapturerToStart());
- observer.WaitForNextCapturedFrame();
+ startCapture(capturerInstance, i);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
// Check the frame size. The actual width and height depend on how the capturer is mounted.
- final boolean identicalResolution = (observer.frameWidth() == format.width
- && observer.frameHeight() == format.height);
- final boolean flippedResolution = (observer.frameWidth() == format.height
- && observer.frameHeight() == format.width);
+ final boolean identicalResolution = (
+ capturerInstance.observer.frameWidth() == capturerInstance.format.width
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.height);
+ final boolean flippedResolution = (
+ capturerInstance.observer.frameWidth() == capturerInstance.format.height
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
if (!identicalResolution && !flippedResolution) {
- fail("Wrong resolution, got: " + observer.frameWidth() + "x" + observer.frameHeight()
- + " expected: " + format.width + "x" + format.height + " or " + format.height + "x"
- + format.width);
+ fail("Wrong resolution, got: "
+ + capturerInstance.observer.frameWidth() + "x" + capturerInstance.observer.frameHeight()
+ + " expected: "+ capturerInstance.format.width + "x" + capturerInstance.format.height
+ + " or " + capturerInstance.format.height + "x" + capturerInstance.format.width);
}
- if (capturer.isCapturingToTexture()) {
- assertEquals(0, observer.frameSize());
+ if (testObjectFactory.isCapturingToTexture()) {
+ assertEquals(0, capturerInstance.observer.frameSize());
} else {
- assertTrue(format.frameSize() <= observer.frameSize());
- }
- capturer.stopCapture();
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
+ assertTrue(capturerInstance.format.frameSize() <= capturerInstance.observer.frameSize());
}
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.surfaceTextureHelper.returnTextureFrame();
}
- release(capturer);
- surfaceTextureHelper.dispose();
- }
-
- static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
- final CountDownLatch barrier = new CountDownLatch(1);
- capturer.getCameraThreadHandler().post(new Runnable() {
- @Override public void run() {
- barrier.countDown();
- }
- });
- barrier.await();
- }
-
- static public void startWhileCameraIsAlreadyOpen(
- VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
- android.hardware.Camera camera = android.hardware.Camera.open(capturer.getCurrentCameraId());
-
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final FakeCapturerObserver observer = new FakeCapturerObserver();
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
-
- if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
- // The first opened camera client will be evicted.
- assertTrue(observer.WaitForCapturerToStart());
- capturer.stopCapture();
- } else {
- assertFalse(observer.WaitForCapturerToStart());
- }
-
- release(capturer);
- camera.release();
- surfaceTextureHelper.dispose();
- }
-
- static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
- VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
- final PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
- android.hardware.Camera camera = android.hardware.Camera.open(capturer.getCurrentCameraId());
-
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
- final VideoTrack track = factory.createVideoTrack("dummy", source);
- final RendererCallbacks callbacks = new RendererCallbacks();
- track.addRenderer(new VideoRenderer(callbacks));
- waitUntilIdle(capturer);
-
- camera.release();
-
- // Make sure camera is started and first frame is received and then stop it.
- callbacks.WaitForNextFrameToRender();
- capturer.stopCapture();
- release(capturer);
- surfaceTextureHelper.dispose();
+ disposeCapturer(capturerInstance);
}
- static public void startWhileCameraIsAlreadyOpenAndStop(
- VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
- android.hardware.Camera camera = android.hardware.Camera.open(capturer.getCurrentCameraId());
+ public void returnBufferLate() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ startCapture(capturerInstance);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final FakeCapturerObserver observer = new FakeCapturerObserver();
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
- capturer.stopCapture();
- release(capturer);
- camera.release();
- surfaceTextureHelper.dispose();
- }
-
- static public void returnBufferLate(VideoCapturerAndroid capturer,
- Context appContext) throws InterruptedException {
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- FakeCapturerObserver observer = new FakeCapturerObserver();
-
- List<CaptureFormat> formats = capturer.getSupportedFormats();
- CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
- assertTrue(observer.WaitForCapturerToStart());
-
- observer.WaitForNextCapturedFrame();
- capturer.stopCapture();
- List<Long> listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+ List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
- format = formats.get(1);
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
- observer.WaitForCapturerToStart();
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
- }
+ startCapture(capturerInstance, 1);
+ capturerInstance.observer.waitForCapturerToStart();
+ capturerInstance.surfaceTextureHelper.returnTextureFrame();
- observer.WaitForNextCapturedFrame();
- capturer.stopCapture();
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
- listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+ listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
- }
- release(capturer);
- surfaceTextureHelper.dispose();
+ disposeCapturer(capturerInstance);
}
- static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
+ public void returnBufferLateEndToEnd()
throws InterruptedException {
- final PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
- final VideoTrack track = factory.createVideoTrack("dummy", source);
- final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
-
- track.addRenderer(new VideoRenderer(renderer));
+ final CapturerInstance capturerInstance = createCapturer();
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
// Wait for at least one frame that has not been returned.
- assertFalse(renderer.waitForPendingFrames().isEmpty());
+ assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());
- capturer.stopCapture();
+ capturerInstance.capturer.stopCapture();
// Dispose everything.
- track.dispose();
- source.dispose();
- factory.dispose();
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ disposeCapturer(capturerInstance);
// Return the frame(s), on a different thread out of spite.
- final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
+ final List<I420Frame> pendingFrames =
+ videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
final Thread returnThread = new Thread(new Runnable() {
@Override
public void run() {
@@ -535,70 +559,109 @@ public class VideoCapturerAndroidTestFixtures {
returnThread.join();
}
- static public void cameraFreezedEventOnBufferStarvationUsingTextures(
- VideoCapturerAndroid capturer,
- CameraEvents events, Context appContext) throws InterruptedException {
- assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
-
- final List<CaptureFormat> formats = capturer.getSupportedFormats();
- final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
-
- final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
- "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
- final FakeCapturerObserver observer = new FakeCapturerObserver();
- capturer.startCapture(format.width, format.height, format.framerate.max,
- surfaceTextureHelper, appContext, observer);
+ public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ startCapture(capturerInstance);
// Make sure camera is started.
- assertTrue(observer.WaitForCapturerToStart());
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
// Since we don't return the buffer, we should get a starvation message if we are
// capturing to a texture.
assertEquals("Camera failure. Client must return video buffers.",
- events.WaitForCameraFreezed());
-
- capturer.stopCapture();
- if (capturer.isCapturingToTexture()) {
- surfaceTextureHelper.returnTextureFrame();
- }
+ capturerInstance.cameraEvents.waitForCameraFreezed());
- release(capturer);
- surfaceTextureHelper.dispose();
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
}
- static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
- PeerConnectionFactory factory = new PeerConnectionFactory(null /* options */);
- VideoSource source =
- factory.createVideoSource(capturer, new MediaConstraints());
- VideoTrack track = factory.createVideoTrack("dummy", source);
- RendererCallbacks renderer = new RendererCallbacks();
- track.addRenderer(new VideoRenderer(renderer));
- assertTrue(renderer.WaitForNextFrameToRender() > 0);
+ public void scaleCameraOutput() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer();
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
- final int startWidth = renderer.frameWidth();
- final int startHeight = renderer.frameHeight();
+ final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
+ final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
final int frameRate = 30;
final int scaledWidth = startWidth / 2;
final int scaledHeight = startHeight / 2;
// Request the captured frames to be scaled.
- capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
+ capturerInstance.capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
boolean gotExpectedResolution = false;
int numberOfInspectedFrames = 0;
do {
- renderer.WaitForNextFrameToRender();
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
++numberOfInspectedFrames;
- gotExpectedResolution = (renderer.frameWidth() == scaledWidth
- && renderer.frameHeight() == scaledHeight);
+ gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
+ && videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
} while (!gotExpectedResolution && numberOfInspectedFrames < 30);
- source.stop();
- track.dispose();
- source.dispose();
- factory.dispose();
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ disposeCapturer(capturerInstance);
assertTrue(gotExpectedResolution);
}
+ public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+
+ if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+ // The first opened camera client will be evicted.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.capturer.stopCapture();
+ } else {
+ assertFalse(capturerInstance.observer.waitForCapturerToStart());
+ }
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera.");
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ waitUntilIdle(capturerInstance);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera.");
+ testObjectFactory.rawCloseCamera(competingCamera);
+
+ // Make sure camera is started and first frame is received and then stop it.
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start.");
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture.");
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ }
}

Powered by Google App Engine
This is Rietveld 408576698