| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
| 12 | 12 |
| 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
| 14 import org.appspot.apprtc.PeerConnectionClient; | 14 import org.appspot.apprtc.PeerConnectionClient; |
| 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
| 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
| 17 | 17 |
| 18 import android.os.Build; | 18 import android.os.Build; |
| 19 import android.test.FlakyTest; | 19 import android.test.FlakyTest; |
| 20 import android.test.InstrumentationTestCase; | 20 import android.test.InstrumentationTestCase; |
| 21 import android.test.suitebuilder.annotation.SmallTest; | 21 import android.test.suitebuilder.annotation.SmallTest; |
| 22 import android.util.Log; | 22 import android.util.Log; |
| 23 | 23 |
| 24 import org.webrtc.Camera1Enumerator; |
| 24 import org.webrtc.Camera2Enumerator; | 25 import org.webrtc.Camera2Enumerator; |
| 26 import org.webrtc.CameraEnumerator; |
| 25 import org.webrtc.EglBase; | 27 import org.webrtc.EglBase; |
| 26 import org.webrtc.IceCandidate; | 28 import org.webrtc.IceCandidate; |
| 27 import org.webrtc.MediaCodecVideoEncoder; | 29 import org.webrtc.MediaCodecVideoEncoder; |
| 28 import org.webrtc.PeerConnection; | 30 import org.webrtc.PeerConnection; |
| 29 import org.webrtc.PeerConnectionFactory; | 31 import org.webrtc.PeerConnectionFactory; |
| 30 import org.webrtc.SessionDescription; | 32 import org.webrtc.SessionDescription; |
| 31 import org.webrtc.StatsReport; | 33 import org.webrtc.StatsReport; |
| 34 import org.webrtc.VideoCapturer; |
| 32 import org.webrtc.VideoRenderer; | 35 import org.webrtc.VideoRenderer; |
| 33 | 36 |
| 34 import java.util.LinkedList; | 37 import java.util.LinkedList; |
| 35 import java.util.List; | 38 import java.util.List; |
| 36 import java.util.concurrent.CountDownLatch; | 39 import java.util.concurrent.CountDownLatch; |
| 37 import java.util.concurrent.ExecutorService; | 40 import java.util.concurrent.ExecutorService; |
| 38 import java.util.concurrent.Executors; | 41 import java.util.concurrent.Executors; |
| 39 import java.util.concurrent.TimeUnit; | 42 import java.util.concurrent.TimeUnit; |
| 40 | 43 |
| 41 public class PeerConnectionClientTest | 44 public class PeerConnectionClientTest |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 228 synchronized (closeEvent) { | 231 synchronized (closeEvent) { |
| 229 if (!isClosed) { | 232 if (!isClosed) { |
| 230 closeEvent.wait(timeoutMs); | 233 closeEvent.wait(timeoutMs); |
| 231 } | 234 } |
| 232 return isClosed; | 235 return isClosed; |
| 233 } | 236 } |
| 234 } | 237 } |
| 235 | 238 |
| 236 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, | 239 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, |
| 237 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame
ters, | 240 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame
ters, |
| 238 EglBase.Context eglContext) { | 241 VideoCapturer videoCapturer, EglBase.Context eglContext) { |
| 239 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic
eServer>(); | 242 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic
eServer>(); |
| 240 SignalingParameters signalingParameters = | 243 SignalingParameters signalingParameters = |
| 241 new SignalingParameters(iceServers, true, // iceServers, initiator. | 244 new SignalingParameters(iceServers, true, // iceServers, initiator. |
| 242 null, null, null, // clientId, wssUrl, wssPostUrl. | 245 null, null, null, // clientId, wssUrl, wssPostUrl. |
| 243 null, null); // offerSdp, iceCandidates. | 246 null, null); // offerSdp, iceCandidates. |
| 244 | 247 |
| 245 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 248 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
| 246 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 249 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
| 247 options.networkIgnoreMask = 0; | 250 options.networkIgnoreMask = 0; |
| 248 options.disableNetworkMonitor = true; | 251 options.disableNetworkMonitor = true; |
| 249 client.setPeerConnectionFactoryOptions(options); | 252 client.setPeerConnectionFactoryOptions(options); |
| 250 client.createPeerConnectionFactory( | 253 client.createPeerConnectionFactory( |
| 251 getInstrumentation().getTargetContext(), peerConnectionParameters, this)
; | 254 getInstrumentation().getTargetContext(), peerConnectionParameters, this)
; |
| 252 client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signa
lingParameters); | 255 client.createPeerConnection( |
| 256 eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParam
eters); |
| 253 client.createOffer(); | 257 client.createOffer(); |
| 254 return client; | 258 return client; |
| 255 } | 259 } |
| 256 | 260 |
| 257 private PeerConnectionParameters createParametersForAudioCall() { | 261 private PeerConnectionParameters createParametersForAudioCall() { |
| 258 PeerConnectionParameters peerConnectionParameters = | 262 PeerConnectionParameters peerConnectionParameters = |
| 259 new PeerConnectionParameters(false, /* videoCallEnabled */ | 263 new PeerConnectionParameters(false, /* videoCallEnabled */ |
| 260 true, /* loopback */ | 264 true, /* loopback */ |
| 261 false, /* tracing */ | 265 false, /* tracing */ |
| 262 // Video codec parameters. | 266 // Video codec parameters. |
| 263 true, /* useCamera2 */ | |
| 264 0, /* videoWidth */ | 267 0, /* videoWidth */ |
| 265 0, /* videoHeight */ | 268 0, /* videoHeight */ |
| 266 0, /* videoFps */ | 269 0, /* videoFps */ |
| 267 0, /* videoStartBitrate */ | 270 0, /* videoStartBitrate */ |
| 268 "", /* videoCodec */ | 271 "", /* videoCodec */ |
| 269 true, /* videoCodecHwAcceleration */ | 272 true, /* videoCodecHwAcceleration */ |
| 270 false, /* captureToToTexture */ | |
| 271 // Audio codec parameters. | 273 // Audio codec parameters. |
| 272 0, /* audioStartBitrate */ | 274 0, /* audioStartBitrate */ |
| 273 "OPUS", /* audioCodec */ | 275 "OPUS", /* audioCodec */ |
| 274 false, /* noAudioProcessing */ | 276 false, /* noAudioProcessing */ |
| 275 false, /* aecDump */ | 277 false, /* aecDump */ |
| 276 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 278 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
| 277 false /* disableBuiltInNS */, false /* enableLevelControl */); | 279 false /* disableBuiltInNS */, false /* enableLevelControl */); |
| 280 |
| 278 return peerConnectionParameters; | 281 return peerConnectionParameters; |
| 279 } | 282 } |
| 280 | 283 |
| 281 private PeerConnectionParameters createParametersForVideoCall( | 284 private VideoCapturer createCameraCapturer(boolean captureToTexture) { |
| 282 String videoCodec, boolean captureToTexture) { | |
| 283 final boolean useCamera2 = | 285 final boolean useCamera2 = |
| 284 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g
etTargetContext()); | 286 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g
etTargetContext()); |
| 285 | 287 |
| 288 CameraEnumerator enumerator; |
| 289 if (useCamera2) { |
| 290 enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext()
); |
| 291 } else { |
| 292 enumerator = new Camera1Enumerator(captureToTexture); |
| 293 } |
| 294 String deviceName = enumerator.getDeviceNames()[0]; |
| 295 return enumerator.createCapturer(deviceName, null); |
| 296 } |
| 297 |
| 298 private PeerConnectionParameters createParametersForVideoCall(String videoCode
c) { |
| 286 PeerConnectionParameters peerConnectionParameters = | 299 PeerConnectionParameters peerConnectionParameters = |
| 287 new PeerConnectionParameters(true, /* videoCallEnabled */ | 300 new PeerConnectionParameters(true, /* videoCallEnabled */ |
| 288 true, /* loopback */ | 301 true, /* loopback */ |
| 289 false, /* tracing */ | 302 false, /* tracing */ |
| 290 // Video codec parameters. | 303 // Video codec parameters. |
| 291 useCamera2, /* useCamera2 */ | |
| 292 0, /* videoWidth */ | 304 0, /* videoWidth */ |
| 293 0, /* videoHeight */ | 305 0, /* videoHeight */ |
| 294 0, /* videoFps */ | 306 0, /* videoFps */ |
| 295 0, /* videoStartBitrate */ | 307 0, /* videoStartBitrate */ |
| 296 videoCodec, /* videoCodec */ | 308 videoCodec, /* videoCodec */ |
| 297 true, /* videoCodecHwAcceleration */ | 309 true, /* videoCodecHwAcceleration */ |
| 298 captureToTexture, /* captureToToTexture */ | |
| 299 // Audio codec parameters. | 310 // Audio codec parameters. |
| 300 0, /* audioStartBitrate */ | 311 0, /* audioStartBitrate */ |
| 301 "OPUS", /* audioCodec */ | 312 "OPUS", /* audioCodec */ |
| 302 false, /* noAudioProcessing */ | 313 false, /* noAudioProcessing */ |
| 303 false, /* aecDump */ | 314 false, /* aecDump */ |
| 304 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 315 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
| 305 false /* disableBuiltInNS */, false /* enableLevelControl */); | 316 false /* disableBuiltInNS */, false /* enableLevelControl */); |
| 317 |
| 306 return peerConnectionParameters; | 318 return peerConnectionParameters; |
| 307 } | 319 } |
| 308 | 320 |
| 309 @Override | 321 @Override |
| 310 public void setUp() { | 322 public void setUp() { |
| 311 signalingExecutor = Executors.newSingleThreadExecutor(); | 323 signalingExecutor = Executors.newSingleThreadExecutor(); |
| 312 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 324 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
| 313 eglBase = EglBase.create(); | 325 eglBase = EglBase.create(); |
| 314 } | 326 } |
| 315 } | 327 } |
| 316 | 328 |
| 317 @Override | 329 @Override |
| 318 public void tearDown() { | 330 public void tearDown() { |
| 319 signalingExecutor.shutdown(); | 331 signalingExecutor.shutdown(); |
| 320 if (eglBase != null) { | 332 if (eglBase != null) { |
| 321 eglBase.release(); | 333 eglBase.release(); |
| 322 } | 334 } |
| 323 } | 335 } |
| 324 | 336 |
| 325 @SmallTest | 337 @SmallTest |
| 326 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { | 338 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { |
| 327 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 339 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
| 328 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 340 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 329 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), | 341 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), |
| 330 createParametersForVideoCall(VIDEO_CODEC_VP8, false), null); | 342 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 343 createCameraCapturer(false /* captureToTexture */), null); |
| 331 | 344 |
| 332 // Wait for local SDP and ice candidates set events. | 345 // Wait for local SDP and ice candidates set events. |
| 333 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 346 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 334 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T
IMEOUT)); | 347 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T
IMEOUT)); |
| 335 | 348 |
| 336 // Check that local video frames were rendered. | 349 // Check that local video frames were rendered. |
| 337 assertTrue( | 350 assertTrue( |
| 338 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); | 351 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); |
| 339 | 352 |
| 340 pcClient.close(); | 353 pcClient.close(); |
| 341 assertTrue( | 354 assertTrue( |
| 342 "PeerConnection close event was not received.", waitForPeerConnectionClo
sed(WAIT_TIMEOUT)); | 355 "PeerConnection close event was not received.", waitForPeerConnectionClo
sed(WAIT_TIMEOUT)); |
| 343 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); | 356 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); |
| 344 } | 357 } |
| 345 | 358 |
| 346 private void doLoopbackTest(PeerConnectionParameters parameters, boolean decod
eToTexure) | 359 private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer
videoCapturer, |
| 347 throws InterruptedException { | 360 boolean decodeToTexture) throws InterruptedException { |
| 348 loopback = true; | 361 loopback = true; |
| 349 MockRenderer localRenderer = null; | 362 MockRenderer localRenderer = null; |
| 350 MockRenderer remoteRenderer = null; | 363 MockRenderer remoteRenderer = null; |
| 351 if (parameters.videoCallEnabled) { | 364 if (parameters.videoCallEnabled) { |
| 352 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); | 365 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); |
| 353 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM
E); | 366 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM
E); |
| 354 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N
AME); | 367 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N
AME); |
| 355 } else { | 368 } else { |
| 356 Log.d(TAG, "testLoopback for audio."); | 369 Log.d(TAG, "testLoopback for audio."); |
| 357 } | 370 } |
| 358 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, | 371 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, videoCapturer, |
| 359 decodeToTexure ? eglBase.getEglBaseContext() : null); | 372 decodeToTexture ? eglBase.getEglBaseContext() : null); |
| 360 | 373 |
| 361 // Wait for local SDP, rename it to answer and set as remote SDP. | 374 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 362 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 375 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 363 SessionDescription remoteSdp = new SessionDescription( | 376 SessionDescription remoteSdp = new SessionDescription( |
| 364 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 377 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
| 365 pcClient.setRemoteDescription(remoteSdp); | 378 pcClient.setRemoteDescription(remoteSdp); |
| 366 | 379 |
| 367 // Wait for ICE connection. | 380 // Wait for ICE connection. |
| 368 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 381 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
| 369 | 382 |
| 370 if (parameters.videoCallEnabled) { | 383 if (parameters.videoCallEnabled) { |
| 371 // Check that local and remote video frames were rendered. | 384 // Check that local and remote video frames were rendered. |
| 372 assertTrue("Local video frames were not rendered.", | 385 assertTrue("Local video frames were not rendered.", |
| 373 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 386 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 374 assertTrue("Remote video frames were not rendered.", | 387 assertTrue("Remote video frames were not rendered.", |
| 375 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 388 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 376 } else { | 389 } else { |
| 377 // For audio just sleep for 1 sec. | 390 // For audio just sleep for 1 sec. |
| 378 // TODO(glaznev): check how we can detect that remote audio was rendered. | 391 // TODO(glaznev): check how we can detect that remote audio was rendered. |
| 379 Thread.sleep(AUDIO_RUN_TIMEOUT); | 392 Thread.sleep(AUDIO_RUN_TIMEOUT); |
| 380 } | 393 } |
| 381 | 394 |
| 382 pcClient.close(); | 395 pcClient.close(); |
| 383 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 396 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 384 Log.d(TAG, "testLoopback done."); | 397 Log.d(TAG, "testLoopback done."); |
| 385 } | 398 } |
| 386 | 399 |
| 387 @SmallTest | 400 @SmallTest |
| 388 public void testLoopbackAudio() throws InterruptedException { | 401 public void testLoopbackAudio() throws InterruptedException { |
| 389 doLoopbackTest(createParametersForAudioCall(), false); | 402 doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTextur
e */); |
| 390 } | 403 } |
| 391 | 404 |
| 392 @SmallTest | 405 @SmallTest |
| 393 public void testLoopbackVp8() throws InterruptedException { | 406 public void testLoopbackVp8() throws InterruptedException { |
| 394 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); | 407 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 408 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
| 395 } | 409 } |
| 396 | 410 |
| 397 @SmallTest | 411 @SmallTest |
| 398 public void testLoopbackVp9() throws InterruptedException { | 412 public void testLoopbackVp9() throws InterruptedException { |
| 399 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false); | 413 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
| 414 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
| 400 } | 415 } |
| 401 | 416 |
| 402 @SmallTest | 417 @SmallTest |
| 403 public void testLoopbackH264() throws InterruptedException { | 418 public void testLoopbackH264() throws InterruptedException { |
| 404 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false)
; | 419 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 420 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
| 405 } | 421 } |
| 406 | 422 |
| 407 @SmallTest | 423 @SmallTest |
| 408 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 424 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
| 409 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 425 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 410 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 426 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
| 411 return; | 427 return; |
| 412 } | 428 } |
| 413 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); | 429 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 430 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
| 414 } | 431 } |
| 415 | 432 |
| 416 @SmallTest | 433 @SmallTest |
| 417 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { | 434 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { |
| 418 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 435 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 419 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 436 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
| 420 return; | 437 return; |
| 421 } | 438 } |
| 422 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true); | 439 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
| 440 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
| 423 } | 441 } |
| 424 | 442 |
| 425 @SmallTest | 443 @SmallTest |
| 426 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 444 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
| 427 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 445 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 428 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 446 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
| 429 return; | 447 return; |
| 430 } | 448 } |
| 431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true); | 449 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 450 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
| 432 } | 451 } |
| 433 | 452 |
| 434 @SmallTest | 453 @SmallTest |
| 435 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { | 454 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
| 436 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 455 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 437 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; | 456 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; |
| 438 return; | 457 return; |
| 439 } | 458 } |
| 440 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 459 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
| 441 // hardware encoder supports to encode from a texture. | 460 // hardware encoder supports to encode from a texture. |
| 442 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 461 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
| 443 Log.i(TAG, "VP8 encode to textures is not supported."); | 462 Log.i(TAG, "VP8 encode to textures is not supported."); |
| 444 return; | 463 return; |
| 445 } | 464 } |
| 446 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); | 465 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 466 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); |
| 447 } | 467 } |
| 448 | 468 |
| 449 // Test that a call can be setup even if the EGL context used during initializ
ation is | 469 // Test that a call can be setup even if the EGL context used during initializ
ation is |
| 450 // released before the Video codecs are created. The HW encoder and decoder is
setup to use | 470 // released before the Video codecs are created. The HW encoder and decoder is
setup to use |
| 451 // textures. | 471 // textures. |
| 452 @SmallTest | 472 @SmallTest |
| 453 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { | 473 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { |
| 454 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 474 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 455 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; | 475 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; |
| 456 return; | 476 return; |
| 457 } | 477 } |
| 458 | 478 |
| 459 loopback = true; | 479 loopback = true; |
| 460 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8, true); | 480 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8); |
| 461 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 481 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 462 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 482 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
| 463 pcClient = createPeerConnectionClient( | 483 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, |
| 464 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); | 484 createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseCon
text()); |
| 465 | 485 |
| 466 // Wait for local SDP, rename it to answer and set as remote SDP. | 486 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 467 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 487 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 468 | 488 |
| 469 // Release the EGL context used for creating the PeerConnectionClient. | 489 // Release the EGL context used for creating the PeerConnectionClient. |
| 470 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal | 490 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal |
| 471 // SessionDescription. | 491 // SessionDescription. |
| 472 eglBase.release(); | 492 eglBase.release(); |
| 473 eglBase = null; | 493 eglBase = null; |
| 474 | 494 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 494 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 514 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 495 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); | 515 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
| 496 return; | 516 return; |
| 497 } | 517 } |
| 498 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 518 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
| 499 // hardware encoder supports to encode from a texture. | 519 // hardware encoder supports to encode from a texture. |
| 500 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { | 520 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
| 501 Log.i(TAG, "H264 encode to textures is not supported."); | 521 Log.i(TAG, "H264 encode to textures is not supported."); |
| 502 return; | 522 return; |
| 503 } | 523 } |
| 504 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); | 524 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 525 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); |
| 505 } | 526 } |
| 506 | 527 |
| 507 // Checks if default front camera can be switched to back camera and then | 528 // Checks if default front camera can be switched to back camera and then |
| 508 // again to front camera. | 529 // again to front camera. |
| 509 @SmallTest | 530 @SmallTest |
| 510 public void testCameraSwitch() throws InterruptedException { | 531 public void testCameraSwitch() throws InterruptedException { |
| 511 Log.d(TAG, "testCameraSwitch"); | 532 Log.d(TAG, "testCameraSwitch"); |
| 512 loopback = true; | 533 loopback = true; |
| 513 | 534 |
| 514 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 535 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 515 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 536 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
| 516 | 537 |
| 517 pcClient = createPeerConnectionClient( | 538 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
| 518 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); | 539 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 540 createCameraCapturer(false /* captureToTexture */), null); |
| 519 | 541 |
| 520 // Wait for local SDP, rename it to answer and set as remote SDP. | 542 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 521 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 543 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 522 SessionDescription remoteSdp = new SessionDescription( | 544 SessionDescription remoteSdp = new SessionDescription( |
| 523 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 545 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
| 524 pcClient.setRemoteDescription(remoteSdp); | 546 pcClient.setRemoteDescription(remoteSdp); |
| 525 | 547 |
| 526 // Wait for ICE connection. | 548 // Wait for ICE connection. |
| 527 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 549 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
| 528 | 550 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 555 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 577 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
| 556 @FlakyTest | 578 @FlakyTest |
| 557 //@SmallTest | 579 //@SmallTest |
| 558 public void testVideoSourceRestart() throws InterruptedException { | 580 public void testVideoSourceRestart() throws InterruptedException { |
| 559 Log.d(TAG, "testVideoSourceRestart"); | 581 Log.d(TAG, "testVideoSourceRestart"); |
| 560 loopback = true; | 582 loopback = true; |
| 561 | 583 |
| 562 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 584 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 563 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 585 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
| 564 | 586 |
| 565 pcClient = createPeerConnectionClient( | 587 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
| 566 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); | 588 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 589 createCameraCapturer(false /* captureToTexture */), null); |
| 567 | 590 |
| 568 // Wait for local SDP, rename it to answer and set as remote SDP. | 591 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 569 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 592 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 570 SessionDescription remoteSdp = new SessionDescription( | 593 SessionDescription remoteSdp = new SessionDescription( |
| 571 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 594 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
| 572 pcClient.setRemoteDescription(remoteSdp); | 595 pcClient.setRemoteDescription(remoteSdp); |
| 573 | 596 |
| 574 // Wait for ICE connection. | 597 // Wait for ICE connection. |
| 575 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 598 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
| 576 | 599 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 604 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 627 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
| 605 @FlakyTest | 628 @FlakyTest |
| 606 //@SmallTest | 629 //@SmallTest |
| 607 public void testCaptureFormatChange() throws InterruptedException { | 630 public void testCaptureFormatChange() throws InterruptedException { |
| 608 Log.d(TAG, "testCaptureFormatChange"); | 631 Log.d(TAG, "testCaptureFormatChange"); |
| 609 loopback = true; | 632 loopback = true; |
| 610 | 633 |
| 611 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 634 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 612 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 635 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
| 613 | 636 |
| 614 pcClient = createPeerConnectionClient( | 637 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
| 615 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); | 638 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 639 createCameraCapturer(false /* captureToTexture */), null); |
| 616 | 640 |
| 617 // Wait for local SDP, rename it to answer and set as remote SDP. | 641 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 618 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 642 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 619 SessionDescription remoteSdp = new SessionDescription( | 643 SessionDescription remoteSdp = new SessionDescription( |
| 620 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 644 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
| 621 pcClient.setRemoteDescription(remoteSdp); | 645 pcClient.setRemoteDescription(remoteSdp); |
| 622 | 646 |
| 623 // Wait for ICE connection. | 647 // Wait for ICE connection. |
| 624 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 648 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
| 625 | 649 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 645 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 669 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 646 assertTrue("Remote video frames were not rendered after capture format cha
nge.", | 670 assertTrue("Remote video frames were not rendered after capture format cha
nge.", |
| 647 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 671 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 648 } | 672 } |
| 649 | 673 |
| 650 pcClient.close(); | 674 pcClient.close(); |
| 651 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 675 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 652 Log.d(TAG, "testCaptureFormatChange done."); | 676 Log.d(TAG, "testCaptureFormatChange done."); |
| 653 } | 677 } |
| 654 } | 678 } |
| OLD | NEW |