Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
| 12 | 12 |
| 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
| 14 import org.appspot.apprtc.PeerConnectionClient; | 14 import org.appspot.apprtc.PeerConnectionClient; |
| 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
| 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
| 17 | 17 |
| 18 import android.os.Build; | 18 import android.os.Build; |
| 19 import android.test.FlakyTest; | 19 import android.test.FlakyTest; |
| 20 import android.test.InstrumentationTestCase; | 20 import android.test.InstrumentationTestCase; |
| 21 import android.test.suitebuilder.annotation.SmallTest; | 21 import android.test.suitebuilder.annotation.SmallTest; |
| 22 import android.util.Log; | 22 import android.util.Log; |
| 23 | 23 |
| 24 import org.webrtc.Camera1Enumerator; | |
| 24 import org.webrtc.Camera2Enumerator; | 25 import org.webrtc.Camera2Enumerator; |
| 26 import org.webrtc.CameraEnumerator; | |
| 25 import org.webrtc.EglBase; | 27 import org.webrtc.EglBase; |
| 26 import org.webrtc.IceCandidate; | 28 import org.webrtc.IceCandidate; |
| 27 import org.webrtc.MediaCodecVideoEncoder; | 29 import org.webrtc.MediaCodecVideoEncoder; |
| 28 import org.webrtc.PeerConnection; | 30 import org.webrtc.PeerConnection; |
| 29 import org.webrtc.PeerConnectionFactory; | 31 import org.webrtc.PeerConnectionFactory; |
| 30 import org.webrtc.SessionDescription; | 32 import org.webrtc.SessionDescription; |
| 31 import org.webrtc.StatsReport; | 33 import org.webrtc.StatsReport; |
| 34 import org.webrtc.VideoCapturer; | |
| 32 import org.webrtc.VideoRenderer; | 35 import org.webrtc.VideoRenderer; |
| 33 | 36 |
| 34 import java.util.LinkedList; | 37 import java.util.LinkedList; |
| 35 import java.util.List; | 38 import java.util.List; |
| 36 import java.util.concurrent.CountDownLatch; | 39 import java.util.concurrent.CountDownLatch; |
| 37 import java.util.concurrent.ExecutorService; | 40 import java.util.concurrent.ExecutorService; |
| 38 import java.util.concurrent.Executors; | 41 import java.util.concurrent.Executors; |
| 39 import java.util.concurrent.TimeUnit; | 42 import java.util.concurrent.TimeUnit; |
| 40 | 43 |
| 41 public class PeerConnectionClientTest | 44 public class PeerConnectionClientTest |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 228 synchronized (closeEvent) { | 231 synchronized (closeEvent) { |
| 229 if (!isClosed) { | 232 if (!isClosed) { |
| 230 closeEvent.wait(timeoutMs); | 233 closeEvent.wait(timeoutMs); |
| 231 } | 234 } |
| 232 return isClosed; | 235 return isClosed; |
| 233 } | 236 } |
| 234 } | 237 } |
| 235 | 238 |
| 236 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, | 239 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, |
| 237 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame ters, | 240 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame ters, |
| 238 EglBase.Context eglContext) { | 241 VideoCapturer videoCapturer, EglBase.Context eglContext) { |
| 239 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic eServer>(); | 242 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic eServer>(); |
| 240 SignalingParameters signalingParameters = | 243 SignalingParameters signalingParameters = |
| 241 new SignalingParameters(iceServers, true, // iceServers, initiator. | 244 new SignalingParameters(iceServers, true, // iceServers, initiator. |
| 242 null, null, null, // clientId, wssUrl, wssPostUrl. | 245 null, null, null, // clientId, wssUrl, wssPostUrl. |
| 243 null, null); // offerSdp, iceCandidates. | 246 null, null); // offerSdp, iceCandidates. |
| 244 | 247 |
| 245 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 248 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
| 246 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 249 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
| 247 options.networkIgnoreMask = 0; | 250 options.networkIgnoreMask = 0; |
| 248 options.disableNetworkMonitor = true; | 251 options.disableNetworkMonitor = true; |
| 249 client.setPeerConnectionFactoryOptions(options); | 252 client.setPeerConnectionFactoryOptions(options); |
| 250 client.createPeerConnectionFactory( | 253 client.createPeerConnectionFactory( |
| 251 getInstrumentation().getTargetContext(), peerConnectionParameters, this) ; | 254 getInstrumentation().getTargetContext(), peerConnectionParameters, this) ; |
| 252 client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signa lingParameters); | 255 client.createPeerConnection( |
| 256 eglContext, localRenderer, remoteRenderer, | |
| 257 videoCapturer, signalingParameters); | |
| 253 client.createOffer(); | 258 client.createOffer(); |
| 254 return client; | 259 return client; |
| 255 } | 260 } |
| 256 | 261 |
| 257 private PeerConnectionParameters createParametersForAudioCall() { | 262 private PeerConnectionParameters createParametersForAudioCall() { |
| 258 PeerConnectionParameters peerConnectionParameters = | 263 PeerConnectionParameters peerConnectionParameters = |
| 259 new PeerConnectionParameters(false, /* videoCallEnabled */ | 264 new PeerConnectionParameters(false, /* videoCallEnabled */ |
| 260 true, /* loopback */ | 265 true, /* loopback */ |
| 261 false, /* tracing */ | 266 false, /* tracing */ |
| 262 // Video codec parameters. | 267 // Video codec parameters. |
| 263 true, /* useCamera2 */ | |
| 264 0, /* videoWidth */ | 268 0, /* videoWidth */ |
| 265 0, /* videoHeight */ | 269 0, /* videoHeight */ |
| 266 0, /* videoFps */ | 270 0, /* videoFps */ |
| 267 0, /* videoStartBitrate */ | 271 0, /* videoStartBitrate */ |
| 268 "", /* videoCodec */ | 272 "", /* videoCodec */ |
| 269 true, /* videoCodecHwAcceleration */ | 273 true, /* videoCodecHwAcceleration */ |
| 270 false, /* captureToToTexture */ | |
| 271 // Audio codec parameters. | 274 // Audio codec parameters. |
| 272 0, /* audioStartBitrate */ | 275 0, /* audioStartBitrate */ |
| 273 "OPUS", /* audioCodec */ | 276 "OPUS", /* audioCodec */ |
| 274 false, /* noAudioProcessing */ | 277 false, /* noAudioProcessing */ |
| 275 false, /* aecDump */ | 278 false, /* aecDump */ |
| 276 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis ableBuiltInAGC */, | 279 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis ableBuiltInAGC */, |
| 277 false /* disableBuiltInNS */, false /* enableLevelControl */); | 280 false /* disableBuiltInNS */, false /* enableLevelControl */); |
| 281 | |
| 278 return peerConnectionParameters; | 282 return peerConnectionParameters; |
| 279 } | 283 } |
| 280 | 284 |
| 281 private PeerConnectionParameters createParametersForVideoCall( | 285 private VideoCapturer createCameraCapturer(boolean captureToTexture) { |
| 282 String videoCodec, boolean captureToTexture) { | |
| 283 final boolean useCamera2 = | 286 final boolean useCamera2 = |
| 284 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g etTargetContext()); | 287 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g etTargetContext()); |
| 285 | 288 |
| 289 CameraEnumerator enumerator; | |
| 290 if (useCamera2) { | |
| 291 enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext() ); | |
| 292 } else { | |
| 293 enumerator = new Camera1Enumerator(captureToTexture); | |
| 294 } | |
| 295 String deviceName = enumerator.getDeviceNames()[0]; | |
| 296 return enumerator.createCapturer(deviceName, null); | |
| 297 } | |
| 298 | |
| 299 private PeerConnectionParameters createParametersForVideoCall( | |
| 300 String videoCodec) { | |
| 286 PeerConnectionParameters peerConnectionParameters = | 301 PeerConnectionParameters peerConnectionParameters = |
| 287 new PeerConnectionParameters(true, /* videoCallEnabled */ | 302 new PeerConnectionParameters(true, /* videoCallEnabled */ |
| 288 true, /* loopback */ | 303 true, /* loopback */ |
| 289 false, /* tracing */ | 304 false, /* tracing */ |
| 290 // Video codec parameters. | 305 // Video codec parameters. |
| 291 useCamera2, /* useCamera2 */ | |
| 292 0, /* videoWidth */ | 306 0, /* videoWidth */ |
| 293 0, /* videoHeight */ | 307 0, /* videoHeight */ |
| 294 0, /* videoFps */ | 308 0, /* videoFps */ |
| 295 0, /* videoStartBitrate */ | 309 0, /* videoStartBitrate */ |
| 296 videoCodec, /* videoCodec */ | 310 videoCodec, /* videoCodec */ |
| 297 true, /* videoCodecHwAcceleration */ | 311 true, /* videoCodecHwAcceleration */ |
| 298 captureToTexture, /* captureToToTexture */ | |
| 299 // Audio codec parameters. | 312 // Audio codec parameters. |
| 300 0, /* audioStartBitrate */ | 313 0, /* audioStartBitrate */ |
| 301 "OPUS", /* audioCodec */ | 314 "OPUS", /* audioCodec */ |
| 302 false, /* noAudioProcessing */ | 315 false, /* noAudioProcessing */ |
| 303 false, /* aecDump */ | 316 false, /* aecDump */ |
| 304 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis ableBuiltInAGC */, | 317 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis ableBuiltInAGC */, |
| 305 false /* disableBuiltInNS */, false /* enableLevelControl */); | 318 false /* disableBuiltInNS */, false /* enableLevelControl */); |
| 319 | |
| 306 return peerConnectionParameters; | 320 return peerConnectionParameters; |
| 307 } | 321 } |
| 308 | 322 |
| 309 @Override | 323 @Override |
| 310 public void setUp() { | 324 public void setUp() { |
| 311 signalingExecutor = Executors.newSingleThreadExecutor(); | 325 signalingExecutor = Executors.newSingleThreadExecutor(); |
| 312 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 326 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
| 313 eglBase = EglBase.create(); | 327 eglBase = EglBase.create(); |
| 314 } | 328 } |
| 315 } | 329 } |
| 316 | 330 |
| 317 @Override | 331 @Override |
| 318 public void tearDown() { | 332 public void tearDown() { |
| 319 signalingExecutor.shutdown(); | 333 signalingExecutor.shutdown(); |
| 320 if (eglBase != null) { | 334 if (eglBase != null) { |
| 321 eglBase.release(); | 335 eglBase.release(); |
| 322 } | 336 } |
| 323 } | 337 } |
| 324 | 338 |
| 325 @SmallTest | 339 @SmallTest |
| 326 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti on { | 340 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti on { |
| 327 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 341 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
| 328 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 342 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 329 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul l), | 343 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul l), |
| 330 createParametersForVideoCall(VIDEO_CODEC_VP8, false), null); | 344 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 345 createCameraCapturer(false), null); | |
| 331 | 346 |
| 332 // Wait for local SDP and ice candidates set events. | 347 // Wait for local SDP and ice candidates set events. |
| 333 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 348 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 334 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T IMEOUT)); | 349 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T IMEOUT)); |
| 335 | 350 |
| 336 // Check that local video frames were rendered. | 351 // Check that local video frames were rendered. |
| 337 assertTrue( | 352 assertTrue( |
| 338 "Local video frames were not rendered.", localRenderer.waitForFramesRend ered(WAIT_TIMEOUT)); | 353 "Local video frames were not rendered.", localRenderer.waitForFramesRend ered(WAIT_TIMEOUT)); |
| 339 | 354 |
| 340 pcClient.close(); | 355 pcClient.close(); |
| 341 assertTrue( | 356 assertTrue( |
| 342 "PeerConnection close event was not received.", waitForPeerConnectionClo sed(WAIT_TIMEOUT)); | 357 "PeerConnection close event was not received.", waitForPeerConnectionClo sed(WAIT_TIMEOUT)); |
| 343 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); | 358 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); |
| 344 } | 359 } |
| 345 | 360 |
| 346 private void doLoopbackTest(PeerConnectionParameters parameters, boolean decod eToTexure) | 361 private void doLoopbackTest( |
| 362 PeerConnectionParameters parameters, | |
| 363 VideoCapturer videoCapturer, | |
| 364 boolean decodeToTexture) | |
| 347 throws InterruptedException { | 365 throws InterruptedException { |
| 348 loopback = true; | 366 loopback = true; |
| 349 MockRenderer localRenderer = null; | 367 MockRenderer localRenderer = null; |
| 350 MockRenderer remoteRenderer = null; | 368 MockRenderer remoteRenderer = null; |
| 351 if (parameters.videoCallEnabled) { | 369 if (parameters.videoCallEnabled) { |
| 352 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); | 370 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); |
| 353 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM E); | 371 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM E); |
| 354 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N AME); | 372 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N AME); |
| 355 } else { | 373 } else { |
| 356 Log.d(TAG, "testLoopback for audio."); | 374 Log.d(TAG, "testLoopback for audio."); |
| 357 } | 375 } |
| 358 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet ers, | 376 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
| 359 decodeToTexure ? eglBase.getEglBaseContext() : null); | 377 parameters, videoCapturer, |
| 378 decodeToTexture ? eglBase.getEglBaseContext() : null); | |
| 360 | 379 |
| 361 // Wait for local SDP, rename it to answer and set as remote SDP. | 380 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 362 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 381 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 363 SessionDescription remoteSdp = new SessionDescription( | 382 SessionDescription remoteSdp = new SessionDescription( |
| 364 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); | 383 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); |
| 365 pcClient.setRemoteDescription(remoteSdp); | 384 pcClient.setRemoteDescription(remoteSdp); |
| 366 | 385 |
| 367 // Wait for ICE connection. | 386 // Wait for ICE connection. |
| 368 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 387 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| 369 | 388 |
| 370 if (parameters.videoCallEnabled) { | 389 if (parameters.videoCallEnabled) { |
| 371 // Check that local and remote video frames were rendered. | 390 // Check that local and remote video frames were rendered. |
| 372 assertTrue("Local video frames were not rendered.", | 391 assertTrue("Local video frames were not rendered.", |
| 373 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 392 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 374 assertTrue("Remote video frames were not rendered.", | 393 assertTrue("Remote video frames were not rendered.", |
| 375 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 394 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 376 } else { | 395 } else { |
| 377 // For audio just sleep for 1 sec. | 396 // For audio just sleep for 1 sec. |
| 378 // TODO(glaznev): check how we can detect that remote audio was rendered. | 397 // TODO(glaznev): check how we can detect that remote audio was rendered. |
| 379 Thread.sleep(AUDIO_RUN_TIMEOUT); | 398 Thread.sleep(AUDIO_RUN_TIMEOUT); |
| 380 } | 399 } |
| 381 | 400 |
| 382 pcClient.close(); | 401 pcClient.close(); |
| 383 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 402 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 384 Log.d(TAG, "testLoopback done."); | 403 Log.d(TAG, "testLoopback done."); |
| 385 } | 404 } |
| 386 | 405 |
| 387 @SmallTest | 406 @SmallTest |
| 388 public void testLoopbackAudio() throws InterruptedException { | 407 public void testLoopbackAudio() throws InterruptedException { |
| 389 doLoopbackTest(createParametersForAudioCall(), false); | 408 doLoopbackTest(createParametersForAudioCall(), null, false); |
| 390 } | 409 } |
| 391 | 410 |
| 392 @SmallTest | 411 @SmallTest |
| 393 public void testLoopbackVp8() throws InterruptedException { | 412 public void testLoopbackVp8() throws InterruptedException { |
| 394 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); | 413 doLoopbackTest( |
| 414 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 415 createCameraCapturer(false), false); | |
|
sakal
2016/10/07 11:57:21
nit: add comments to literals in this file, like c
mandermo
2016/10/10 12:22:14
Done.
| |
| 395 } | 416 } |
| 396 | 417 |
| 397 @SmallTest | 418 @SmallTest |
| 398 public void testLoopbackVp9() throws InterruptedException { | 419 public void testLoopbackVp9() throws InterruptedException { |
| 399 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false); | 420 doLoopbackTest( |
| 421 createParametersForVideoCall(VIDEO_CODEC_VP9), | |
| 422 createCameraCapturer(false), false); | |
| 400 } | 423 } |
| 401 | 424 |
| 402 @SmallTest | 425 @SmallTest |
| 403 public void testLoopbackH264() throws InterruptedException { | 426 public void testLoopbackH264() throws InterruptedException { |
| 404 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false) ; | 427 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 428 createCameraCapturer(false), false); | |
| 405 } | 429 } |
| 406 | 430 |
| 407 @SmallTest | 431 @SmallTest |
| 408 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 432 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
| 409 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 433 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 410 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 434 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 411 return; | 435 return; |
| 412 } | 436 } |
| 413 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); | 437 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 438 createCameraCapturer(false), true); | |
| 414 } | 439 } |
| 415 | 440 |
| 416 @SmallTest | 441 @SmallTest |
| 417 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { | 442 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { |
| 418 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 443 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 419 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 444 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 420 return; | 445 return; |
| 421 } | 446 } |
| 422 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true); | 447 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
| 448 createCameraCapturer(false), true); | |
| 423 } | 449 } |
| 424 | 450 |
| 425 @SmallTest | 451 @SmallTest |
| 426 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 452 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
| 427 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 453 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 428 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 454 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 429 return; | 455 return; |
| 430 } | 456 } |
| 431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true); | 457 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 458 createCameraCapturer(false), true); | |
| 432 } | 459 } |
| 433 | 460 |
| 434 @SmallTest | 461 @SmallTest |
| 435 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { | 462 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
| 436 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 463 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 437 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19") ; | 464 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19") ; |
| 438 return; | 465 return; |
| 439 } | 466 } |
| 440 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the | 467 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the |
| 441 // hardware encoder supports to encode from a texture. | 468 // hardware encoder supports to encode from a texture. |
| 442 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 469 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
| 443 Log.i(TAG, "VP8 encode to textures is not supported."); | 470 Log.i(TAG, "VP8 encode to textures is not supported."); |
| 444 return; | 471 return; |
| 445 } | 472 } |
| 446 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); | 473 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 474 createCameraCapturer(true), true); | |
| 447 } | 475 } |
| 448 | 476 |
| 449 // Test that a call can be setup even if the EGL context used during initializ ation is | 477 // Test that a call can be setup even if the EGL context used during initializ ation is |
| 450 // released before the Video codecs are created. The HW encoder and decoder is setup to use | 478 // released before the Video codecs are created. The HW encoder and decoder is setup to use |
| 451 // textures. | 479 // textures. |
| 452 @SmallTest | 480 @SmallTest |
| 453 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted Exception { | 481 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted Exception { |
| 454 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 482 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 455 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19") ; | 483 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19") ; |
| 456 return; | 484 return; |
| 457 } | 485 } |
| 458 | 486 |
| 459 loopback = true; | 487 loopback = true; |
| 460 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD EC_VP8, true); | 488 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD EC_VP8); |
| 461 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 489 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 462 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 490 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 463 pcClient = createPeerConnectionClient( | 491 pcClient = createPeerConnectionClient( |
| 464 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); | 492 localRenderer, remoteRenderer, parameters, createCameraCapturer(true), |
| 493 eglBase.getEglBaseContext()); | |
| 465 | 494 |
| 466 // Wait for local SDP, rename it to answer and set as remote SDP. | 495 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 467 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 496 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 468 | 497 |
| 469 // Release the EGL context used for creating the PeerConnectionClient. | 498 // Release the EGL context used for creating the PeerConnectionClient. |
| 470 // Since createPeerConnectionClient is asynchronous, we must wait for the lo cal | 499 // Since createPeerConnectionClient is asynchronous, we must wait for the lo cal |
| 471 // SessionDescription. | 500 // SessionDescription. |
| 472 eglBase.release(); | 501 eglBase.release(); |
| 473 eglBase = null; | 502 eglBase = null; |
| 474 | 503 |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 494 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 523 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 495 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); | 524 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
| 496 return; | 525 return; |
| 497 } | 526 } |
| 498 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the | 527 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the |
| 499 // hardware encoder supports to encode from a texture. | 528 // hardware encoder supports to encode from a texture. |
| 500 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { | 529 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
| 501 Log.i(TAG, "H264 encode to textures is not supported."); | 530 Log.i(TAG, "H264 encode to textures is not supported."); |
| 502 return; | 531 return; |
| 503 } | 532 } |
| 504 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); | 533 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 534 createCameraCapturer(true), true); | |
| 505 } | 535 } |
| 506 | 536 |
| 507 // Checks if default front camera can be switched to back camera and then | 537 // Checks if default front camera can be switched to back camera and then |
| 508 // again to front camera. | 538 // again to front camera. |
| 509 @SmallTest | 539 @SmallTest |
| 510 public void testCameraSwitch() throws InterruptedException { | 540 public void testCameraSwitch() throws InterruptedException { |
| 511 Log.d(TAG, "testCameraSwitch"); | 541 Log.d(TAG, "testCameraSwitch"); |
| 512 loopback = true; | 542 loopback = true; |
| 513 | 543 |
| 514 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 544 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 515 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 545 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 516 | 546 |
| 517 pcClient = createPeerConnectionClient( | 547 pcClient = createPeerConnectionClient( |
| 518 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 548 localRenderer, remoteRenderer, |
| 549 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 550 createCameraCapturer(false), null); | |
| 519 | 551 |
| 520 // Wait for local SDP, rename it to answer and set as remote SDP. | 552 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 521 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 553 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 522 SessionDescription remoteSdp = new SessionDescription( | 554 SessionDescription remoteSdp = new SessionDescription( |
| 523 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); | 555 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); |
| 524 pcClient.setRemoteDescription(remoteSdp); | 556 pcClient.setRemoteDescription(remoteSdp); |
| 525 | 557 |
| 526 // Wait for ICE connection. | 558 // Wait for ICE connection. |
| 527 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 559 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| 528 | 560 |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 556 @FlakyTest | 588 @FlakyTest |
| 557 //@SmallTest | 589 //@SmallTest |
| 558 public void testVideoSourceRestart() throws InterruptedException { | 590 public void testVideoSourceRestart() throws InterruptedException { |
| 559 Log.d(TAG, "testVideoSourceRestart"); | 591 Log.d(TAG, "testVideoSourceRestart"); |
| 560 loopback = true; | 592 loopback = true; |
| 561 | 593 |
| 562 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 594 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 563 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 595 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 564 | 596 |
| 565 pcClient = createPeerConnectionClient( | 597 pcClient = createPeerConnectionClient( |
| 566 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 598 localRenderer, remoteRenderer, |
| 599 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 600 createCameraCapturer(false), null); | |
| 567 | 601 |
| 568 // Wait for local SDP, rename it to answer and set as remote SDP. | 602 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 569 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 603 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 570 SessionDescription remoteSdp = new SessionDescription( | 604 SessionDescription remoteSdp = new SessionDescription( |
| 571 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); | 605 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); |
| 572 pcClient.setRemoteDescription(remoteSdp); | 606 pcClient.setRemoteDescription(remoteSdp); |
| 573 | 607 |
| 574 // Wait for ICE connection. | 608 // Wait for ICE connection. |
| 575 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 609 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| 576 | 610 |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 605 @FlakyTest | 639 @FlakyTest |
| 606 //@SmallTest | 640 //@SmallTest |
| 607 public void testCaptureFormatChange() throws InterruptedException { | 641 public void testCaptureFormatChange() throws InterruptedException { |
| 608 Log.d(TAG, "testCaptureFormatChange"); | 642 Log.d(TAG, "testCaptureFormatChange"); |
| 609 loopback = true; | 643 loopback = true; |
| 610 | 644 |
| 611 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 645 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 612 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 646 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 613 | 647 |
| 614 pcClient = createPeerConnectionClient( | 648 pcClient = createPeerConnectionClient( |
| 615 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 649 localRenderer, remoteRenderer, |
| 650 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 651 createCameraCapturer(false), null); | |
| 616 | 652 |
| 617 // Wait for local SDP, rename it to answer and set as remote SDP. | 653 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 618 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 654 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 619 SessionDescription remoteSdp = new SessionDescription( | 655 SessionDescription remoteSdp = new SessionDescription( |
| 620 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); | 656 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio n); |
| 621 pcClient.setRemoteDescription(remoteSdp); | 657 pcClient.setRemoteDescription(remoteSdp); |
| 622 | 658 |
| 623 // Wait for ICE connection. | 659 // Wait for ICE connection. |
| 624 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 660 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| 625 | 661 |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 645 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 681 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 646 assertTrue("Remote video frames were not rendered after capture format cha nge.", | 682 assertTrue("Remote video frames were not rendered after capture format cha nge.", |
| 647 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 683 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 648 } | 684 } |
| 649 | 685 |
| 650 pcClient.close(); | 686 pcClient.close(); |
| 651 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 687 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 652 Log.d(TAG, "testCaptureFormatChange done."); | 688 Log.d(TAG, "testCaptureFormatChange done."); |
| 653 } | 689 } |
| 654 } | 690 } |
| OLD | NEW |