Chromium Code Reviews| Index: webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java |
| diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java |
| index ae82a9fef6f881d96f79e76b972adc0d375e3dd7..fb2d5843be580b9733be8ea9aecb6a53024834bd 100644 |
| --- a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java |
| +++ b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java |
| @@ -229,6 +229,7 @@ public class VideoCapturerAndroidTestFixtures { |
| static public void startCapturerAndRender(VideoCapturerAndroid capturer) |
| throws InterruptedException { |
| PeerConnectionFactory factory = new PeerConnectionFactory(); |
| + factory.setVideoHwAccelerationOptions(null /* localEglContext */, null /* remoteEglContext */); |
|
perkj_webrtc
2016/03/11 15:25:02
why is this needed?
magjed_webrtc
2016/03/13 11:14:34
I wanted to make sure texture capture is enabled,
|
| VideoSource source = |
| factory.createVideoSource(capturer, new MediaConstraints()); |
| VideoTrack track = factory.createVideoTrack("dummy", source); |
| @@ -243,6 +244,7 @@ public class VideoCapturerAndroidTestFixtures { |
| static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException { |
| PeerConnectionFactory factory = new PeerConnectionFactory(); |
| + factory.setVideoHwAccelerationOptions(null /* localEglContext */, null /* remoteEglContext */); |
|
perkj_webrtc
2016/03/11 15:25:02
dito?
|
| VideoSource source = |
| factory.createVideoSource(capturer, new MediaConstraints()); |
| VideoTrack track = factory.createVideoTrack("dummy", source); |
| @@ -286,17 +288,20 @@ public class VideoCapturerAndroidTestFixtures { |
| final List<CaptureFormat> formats = capturer.getSupportedFormats(); |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| // Make sure camera is started and first frame is received and then stop it. |
| assertTrue(observer.WaitForCapturerToStart()); |
| observer.WaitForNextCapturedFrame(); |
| capturer.stopCapture(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| assertTrue(events.onCameraOpeningCalled); |
| assertTrue(events.onFirstFrameAvailableCalled); |
| @@ -307,14 +312,16 @@ public class VideoCapturerAndroidTestFixtures { |
| final List<CaptureFormat> formats = capturer.getSupportedFormats(); |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| // Make sure camera is started and then stop it. |
| assertTrue(observer.WaitForCapturerToStart()); |
| capturer.stopCapture(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| // We can't change |capturer| at this point, but we should not crash. |
| @@ -323,11 +330,13 @@ public class VideoCapturerAndroidTestFixtures { |
| capturer.changeCaptureFormat(640, 480, 15); |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void stopRestartVideoSource(VideoCapturerAndroid capturer) |
| throws InterruptedException { |
| PeerConnectionFactory factory = new PeerConnectionFactory(); |
| + factory.setVideoHwAccelerationOptions(null /* localEglContext */, null /* remoteEglContext */); |
| VideoSource source = |
| factory.createVideoSource(capturer, new MediaConstraints()); |
| VideoTrack track = factory.createVideoTrack("dummy", source); |
| @@ -350,13 +359,15 @@ public class VideoCapturerAndroidTestFixtures { |
| static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer, |
| Context appContext) throws InterruptedException { |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| FakeCapturerObserver observer = new FakeCapturerObserver(); |
| List<CaptureFormat> formats = capturer.getSupportedFormats(); |
| for(int i = 0; i < 3 ; ++i) { |
| CameraEnumerationAndroid.CaptureFormat format = formats.get(i); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| assertTrue(observer.WaitForCapturerToStart()); |
| observer.WaitForNextCapturedFrame(); |
| @@ -378,10 +389,11 @@ public class VideoCapturerAndroidTestFixtures { |
| } |
| capturer.stopCapture(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| } |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| } |
| static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException { |
| @@ -400,9 +412,11 @@ public class VideoCapturerAndroidTestFixtures { |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| Camera camera = Camera.open(capturer.getCurrentCameraId()); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) { |
| // The first opened camera client will be evicted. |
| @@ -414,6 +428,7 @@ public class VideoCapturerAndroidTestFixtures { |
| release(capturer); |
| camera.release(); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void startWhileCameraIsAlreadyOpenAndCloseCamera( |
| @@ -422,9 +437,11 @@ public class VideoCapturerAndroidTestFixtures { |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| Camera camera = Camera.open(capturer.getCurrentCameraId()); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| waitUntilIdle(capturer); |
| camera.release(); |
| @@ -434,9 +451,10 @@ public class VideoCapturerAndroidTestFixtures { |
| observer.WaitForNextCapturedFrame(); |
| capturer.stopCapture(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void startWhileCameraIsAlreadyOpenAndStop( |
| @@ -445,22 +463,27 @@ public class VideoCapturerAndroidTestFixtures { |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| Camera camera = Camera.open(capturer.getCurrentCameraId()); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| capturer.stopCapture(); |
| release(capturer); |
| camera.release(); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void returnBufferLate(VideoCapturerAndroid capturer, |
| Context appContext) throws InterruptedException { |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| FakeCapturerObserver observer = new FakeCapturerObserver(); |
| List<CaptureFormat> formats = capturer.getSupportedFormats(); |
| CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| assertTrue(observer.WaitForCapturerToStart()); |
| observer.WaitForNextCapturedFrame(); |
| @@ -470,10 +493,10 @@ public class VideoCapturerAndroidTestFixtures { |
| format = formats.get(1); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| observer.WaitForCapturerToStart(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| observer.WaitForNextCapturedFrame(); |
| @@ -482,15 +505,17 @@ public class VideoCapturerAndroidTestFixtures { |
| listOftimestamps = observer.getCopyAndResetListOftimeStamps(); |
| assertTrue(listOftimestamps.size() >= 1); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer) |
| throws InterruptedException { |
| final PeerConnectionFactory factory = new PeerConnectionFactory(); |
| + factory.setVideoHwAccelerationOptions(null /* localEglContext */, null /* remoteEglContext */); |
| final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints()); |
| final VideoTrack track = factory.createVideoTrack("dummy", source); |
| final FakeAsyncRenderer renderer = new FakeAsyncRenderer(); |
| @@ -529,9 +554,11 @@ public class VideoCapturerAndroidTestFixtures { |
| final List<CaptureFormat> formats = capturer.getSupportedFormats(); |
| final CameraEnumerationAndroid.CaptureFormat format = formats.get(0); |
| + final SurfaceTextureHelper surfaceTextureHelper = |
| + SurfaceTextureHelper.create(null /* sharedContext */); |
| final FakeCapturerObserver observer = new FakeCapturerObserver(); |
| capturer.startCapture(format.width, format.height, format.maxFramerate, |
| - appContext, observer); |
| + surfaceTextureHelper, appContext, observer); |
| // Make sure camera is started. |
| assertTrue(observer.WaitForCapturerToStart()); |
| // Since we don't return the buffer, we should get a starvation message if we are |
| @@ -541,14 +568,16 @@ public class VideoCapturerAndroidTestFixtures { |
| capturer.stopCapture(); |
| if (capturer.isCapturingToTexture()) { |
| - capturer.surfaceHelper.returnTextureFrame(); |
| + surfaceTextureHelper.returnTextureFrame(); |
| } |
| release(capturer); |
| + surfaceTextureHelper.dispose(); |
| } |
| static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException { |
| PeerConnectionFactory factory = new PeerConnectionFactory(); |
| + factory.setVideoHwAccelerationOptions(null /* localEglContext */, null /* remoteEglContext */); |
| VideoSource source = |
| factory.createVideoSource(capturer, new MediaConstraints()); |
| VideoTrack track = factory.createVideoTrack("dummy", source); |