Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
| 12 | 12 |
| 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
| 14 import org.appspot.apprtc.PeerConnectionClient; | 14 import org.appspot.apprtc.PeerConnectionClient; |
| 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
| 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
| 17 | 17 |
| 18 import android.os.Build; | 18 import android.os.Build; |
| 19 import android.test.InstrumentationTestCase; | 19 import android.test.InstrumentationTestCase; |
| 20 import android.test.suitebuilder.annotation.SmallTest; | 20 import android.test.suitebuilder.annotation.SmallTest; |
| 21 import android.util.Log; | 21 import android.util.Log; |
| 22 | 22 |
| 23 import org.webrtc.Camera1Enumerator; | |
| 23 import org.webrtc.Camera2Enumerator; | 24 import org.webrtc.Camera2Enumerator; |
| 25 import org.webrtc.CameraEnumerator; | |
| 24 import org.webrtc.EglBase; | 26 import org.webrtc.EglBase; |
| 25 import org.webrtc.IceCandidate; | 27 import org.webrtc.IceCandidate; |
| 26 import org.webrtc.MediaCodecVideoEncoder; | 28 import org.webrtc.MediaCodecVideoEncoder; |
| 27 import org.webrtc.PeerConnection; | 29 import org.webrtc.PeerConnection; |
| 28 import org.webrtc.PeerConnectionFactory; | 30 import org.webrtc.PeerConnectionFactory; |
| 29 import org.webrtc.SessionDescription; | 31 import org.webrtc.SessionDescription; |
| 30 import org.webrtc.StatsReport; | 32 import org.webrtc.StatsReport; |
| 33 import org.webrtc.VideoCapturer; | |
| 31 import org.webrtc.VideoRenderer; | 34 import org.webrtc.VideoRenderer; |
| 32 | 35 |
| 33 import java.util.LinkedList; | 36 import java.util.LinkedList; |
| 34 import java.util.List; | 37 import java.util.List; |
| 35 import java.util.concurrent.CountDownLatch; | 38 import java.util.concurrent.CountDownLatch; |
| 36 import java.util.concurrent.ExecutorService; | 39 import java.util.concurrent.ExecutorService; |
| 37 import java.util.concurrent.Executors; | 40 import java.util.concurrent.Executors; |
| 38 import java.util.concurrent.TimeUnit; | 41 import java.util.concurrent.TimeUnit; |
| 39 | 42 |
| 40 public class PeerConnectionClientTest extends InstrumentationTestCase | 43 public class PeerConnectionClientTest extends InstrumentationTestCase |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 234 synchronized(closeEvent) { | 237 synchronized(closeEvent) { |
| 235 if (!isClosed) { | 238 if (!isClosed) { |
| 236 closeEvent.wait(timeoutMs); | 239 closeEvent.wait(timeoutMs); |
| 237 } | 240 } |
| 238 return isClosed; | 241 return isClosed; |
| 239 } | 242 } |
| 240 } | 243 } |
| 241 | 244 |
| 242 PeerConnectionClient createPeerConnectionClient( | 245 PeerConnectionClient createPeerConnectionClient( |
| 243 MockRenderer localRenderer, MockRenderer remoteRenderer, | 246 MockRenderer localRenderer, MockRenderer remoteRenderer, |
| 244 PeerConnectionParameters peerConnectionParameters, EglBase.Context eglCont ext) { | 247 PeerConnectionParameters peerConnectionParameters, |
| 248 VideoCapturer videoCapturer, | |
| 249 EglBase.Context eglContext) { | |
| 245 List<PeerConnection.IceServer> iceServers = | 250 List<PeerConnection.IceServer> iceServers = |
| 246 new LinkedList<PeerConnection.IceServer>(); | 251 new LinkedList<PeerConnection.IceServer>(); |
| 247 SignalingParameters signalingParameters = new SignalingParameters( | 252 SignalingParameters signalingParameters = new SignalingParameters( |
| 248 iceServers, true, // iceServers, initiator. | 253 iceServers, true, // iceServers, initiator. |
| 249 null, null, null, // clientId, wssUrl, wssPostUrl. | 254 null, null, null, // clientId, wssUrl, wssPostUrl. |
| 250 null, null); // offerSdp, iceCandidates. | 255 null, null); // offerSdp, iceCandidates. |
| 251 | 256 |
| 252 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 257 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
| 253 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 258 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
| 254 options.networkIgnoreMask = 0; | 259 options.networkIgnoreMask = 0; |
| 255 options.disableNetworkMonitor = true; | 260 options.disableNetworkMonitor = true; |
| 256 client.setPeerConnectionFactoryOptions(options); | 261 client.setPeerConnectionFactoryOptions(options); |
| 257 client.createPeerConnectionFactory( | 262 client.createPeerConnectionFactory( |
| 258 getInstrumentation().getTargetContext(), peerConnectionParameters, this) ; | 263 getInstrumentation().getTargetContext(), peerConnectionParameters, this) ; |
| 259 client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signa lingParameters); | 264 client.createPeerConnection( |
| 265 eglContext, localRenderer, remoteRenderer, | |
| 266 videoCapturer, signalingParameters); | |
| 260 client.createOffer(); | 267 client.createOffer(); |
| 261 return client; | 268 return client; |
| 262 } | 269 } |
| 263 | 270 |
| 264 private PeerConnectionParameters createParametersForAudioCall() { | 271 private PeerConnectionParameters createParametersForAudioCall() { |
| 265 PeerConnectionParameters peerConnectionParameters = | 272 PeerConnectionParameters peerConnectionParameters = |
| 266 new PeerConnectionParameters( | 273 new PeerConnectionParameters( |
| 267 false, /* videoCallEnabled */ | 274 false, /* videoCallEnabled */ |
| 268 true, /* loopback */ | 275 true, /* loopback */ |
| 269 false, /* tracing */ | 276 false, /* tracing */ |
| 270 // Video codec parameters. | 277 // Video codec parameters. |
| 271 true, /* useCamera2 */ | |
| 272 0, /* videoWidth */ | 278 0, /* videoWidth */ |
| 273 0, /* videoHeight */ | 279 0, /* videoHeight */ |
| 274 0, /* videoFps */ | 280 0, /* videoFps */ |
| 275 0, /* videoStartBitrate */ | 281 0, /* videoStartBitrate */ |
| 276 "", /* videoCodec */ | 282 "", /* videoCodec */ |
| 277 true, /* videoCodecHwAcceleration */ | 283 true, /* videoCodecHwAcceleration */ |
| 278 false, /* captureToToTexture */ | |
| 279 // Audio codec parameters. | 284 // Audio codec parameters. |
| 280 0, /* audioStartBitrate */ | 285 0, /* audioStartBitrate */ |
| 281 "OPUS", /* audioCodec */ | 286 "OPUS", /* audioCodec */ |
| 282 false, /* noAudioProcessing */ | 287 false, /* noAudioProcessing */ |
| 283 false, /* aecDump */ | 288 false, /* aecDump */ |
| 284 false /* useOpenSLES */, | 289 false, /* useOpenSLES */ |
| 285 false /* disableBuiltInAEC */, | 290 false, /* disableBuiltInAEC */ |
| 286 false /* disableBuiltInAGC */, | 291 false, /* disableBuiltInAGC */ |
| 287 false /* disableBuiltInNS */, | 292 false, /* disableBuiltInNS */ |
| 288 false /* enableLevelControl */); | 293 false /* enableLevelControl */); |
| 294 | |
| 289 return peerConnectionParameters; | 295 return peerConnectionParameters; |
| 290 } | 296 } |
| 291 | 297 |
| 292 private PeerConnectionParameters createParametersForVideoCall( | 298 private VideoCapturer createVideoCapturer(boolean captureToTexture) { |
| 293 String videoCodec, boolean captureToTexture) { | |
| 294 final boolean useCamera2 = captureToTexture && Camera2Enumerator.isSupported (); | 299 final boolean useCamera2 = captureToTexture && Camera2Enumerator.isSupported (); |
|
sakal
2016/10/05 13:28:42
isSupported now takes context as a parameter, plea
| |
| 295 | 300 |
| 301 CameraEnumerator enumerator; | |
| 302 if (useCamera2) { | |
| 303 enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext() ); | |
| 304 } else { | |
| 305 enumerator = new Camera1Enumerator(captureToTexture); | |
| 306 } | |
| 307 String deviceName = enumerator.getDeviceNames()[0]; | |
| 308 return enumerator.createCapturer(deviceName, null); | |
| 309 } | |
| 310 | |
| 311 private PeerConnectionParameters createParametersForVideoCall( | |
| 312 String videoCodec) { | |
| 296 PeerConnectionParameters peerConnectionParameters = | 313 PeerConnectionParameters peerConnectionParameters = |
| 297 new PeerConnectionParameters( | 314 new PeerConnectionParameters( |
| 298 true, /* videoCallEnabled */ | 315 true, /* videoCallEnabled */ |
| 299 true, /* loopback */ | 316 true, /* loopback */ |
| 300 false, /* tracing */ | 317 false, /* tracing */ |
| 301 // Video codec parameters. | 318 // Video codec parameters. |
| 302 useCamera2, /* useCamera2 */ | |
| 303 0, /* videoWidth */ | 319 0, /* videoWidth */ |
| 304 0, /* videoHeight */ | 320 0, /* videoHeight */ |
| 305 0, /* videoFps */ | 321 0, /* videoFps */ |
| 306 0, /* videoStartBitrate */ | 322 0, /* videoStartBitrate */ |
| 307 videoCodec, /* videoCodec */ | 323 videoCodec, /* videoCodec */ |
| 308 true, /* videoCodecHwAcceleration */ | 324 true, /* videoCodecHwAcceleration */ |
| 309 captureToTexture, /* captureToToTexture */ | |
| 310 // Audio codec parameters. | 325 // Audio codec parameters. |
| 311 0, /* audioStartBitrate */ | 326 0, /* audioStartBitrate */ |
| 312 "OPUS", /* audioCodec */ | 327 "OPUS", /* audioCodec */ |
| 313 false, /* noAudioProcessing */ | 328 false, /* noAudioProcessing */ |
| 314 false, /* aecDump */ | 329 false, /* aecDump */ |
| 315 false /* useOpenSLES */, | 330 false, /* useOpenSLES */ |
| 316 false /* disableBuiltInAEC */, | 331 false, /* disableBuiltInAEC */ |
| 317 false /* disableBuiltInAGC */, | 332 false, /* disableBuiltInAGC */ |
| 318 false /* disableBuiltInNS */, | 333 false, /* disableBuiltInNS */ |
| 319 false /* enableLevelControl */); | 334 false /* enableLevelControl */); |
| 335 | |
| 320 return peerConnectionParameters; | 336 return peerConnectionParameters; |
| 321 } | 337 } |
| 322 | 338 |
| 323 @Override | 339 @Override |
| 324 public void setUp() { | 340 public void setUp() { |
| 325 signalingExecutor = Executors.newSingleThreadExecutor(); | 341 signalingExecutor = Executors.newSingleThreadExecutor(); |
| 326 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 342 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
| 327 eglBase = EglBase.create(); | 343 eglBase = EglBase.create(); |
| 328 } | 344 } |
| 329 } | 345 } |
| 330 | 346 |
| 331 @Override | 347 @Override |
| 332 public void tearDown() { | 348 public void tearDown() { |
| 333 signalingExecutor.shutdown(); | 349 signalingExecutor.shutdown(); |
| 334 if (eglBase != null) { | 350 if (eglBase != null) { |
| 335 eglBase.release(); | 351 eglBase.release(); |
| 336 } | 352 } |
| 337 } | 353 } |
| 338 | 354 |
| 339 @SmallTest | 355 @SmallTest |
| 340 public void testSetLocalOfferMakesVideoFlowLocally() | 356 public void testSetLocalOfferMakesVideoFlowLocally() |
| 341 throws InterruptedException { | 357 throws InterruptedException { |
| 342 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 358 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
| 343 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 359 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 344 pcClient = createPeerConnectionClient( | 360 pcClient = createPeerConnectionClient( |
| 345 localRenderer, new MockRenderer(0, null), | 361 localRenderer, new MockRenderer(0, null), |
| 346 createParametersForVideoCall(VIDEO_CODEC_VP8, false), null); | 362 createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 363 createVideoCapturer(false), null); | |
| 347 | 364 |
| 348 // Wait for local SDP and ice candidates set events. | 365 // Wait for local SDP and ice candidates set events. |
| 349 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 366 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 350 assertTrue("ICE candidates were not generated.", | 367 assertTrue("ICE candidates were not generated.", |
| 351 waitForIceCandidates(WAIT_TIMEOUT)); | 368 waitForIceCandidates(WAIT_TIMEOUT)); |
| 352 | 369 |
| 353 // Check that local video frames were rendered. | 370 // Check that local video frames were rendered. |
| 354 assertTrue("Local video frames were not rendered.", | 371 assertTrue("Local video frames were not rendered.", |
| 355 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 372 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 356 | 373 |
| 357 pcClient.close(); | 374 pcClient.close(); |
| 358 assertTrue("PeerConnection close event was not received.", | 375 assertTrue("PeerConnection close event was not received.", |
| 359 waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 376 waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 360 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); | 377 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); |
| 361 } | 378 } |
| 362 | 379 |
| 363 private void doLoopbackTest(PeerConnectionParameters parameters, boolean decod eToTexure) | 380 private void doLoopbackTest( |
| 381 PeerConnectionParameters parameters, | |
| 382 VideoCapturer videoCapturer, | |
| 383 boolean decodeToTexure) | |
|
sakal
2016/10/05 13:28:42
nit: can you fix the typo in the parameter name
mandermo
2016/10/07 11:33:41
Done.
| |
| 364 throws InterruptedException { | 384 throws InterruptedException { |
| 365 loopback = true; | 385 loopback = true; |
| 366 MockRenderer localRenderer = null; | 386 MockRenderer localRenderer = null; |
| 367 MockRenderer remoteRenderer = null; | 387 MockRenderer remoteRenderer = null; |
| 368 if (parameters.videoCallEnabled) { | 388 if (parameters.videoCallEnabled) { |
| 369 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); | 389 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); |
| 370 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM E); | 390 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM E); |
| 371 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N AME); | 391 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N AME); |
| 372 } else { | 392 } else { |
| 373 Log.d(TAG, "testLoopback for audio."); | 393 Log.d(TAG, "testLoopback for audio."); |
| 374 } | 394 } |
| 375 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet ers, | 395 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
| 396 parameters, videoCapturer, | |
| 376 decodeToTexure ? eglBase.getEglBaseContext() : null); | 397 decodeToTexure ? eglBase.getEglBaseContext() : null); |
| 377 | 398 |
| 378 // Wait for local SDP, rename it to answer and set as remote SDP. | 399 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 379 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 400 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 380 SessionDescription remoteSdp = new SessionDescription( | 401 SessionDescription remoteSdp = new SessionDescription( |
| 381 SessionDescription.Type.fromCanonicalForm("answer"), | 402 SessionDescription.Type.fromCanonicalForm("answer"), |
| 382 localSdp.description); | 403 localSdp.description); |
| 383 pcClient.setRemoteDescription(remoteSdp); | 404 pcClient.setRemoteDescription(remoteSdp); |
| 384 | 405 |
| 385 // Wait for ICE connection. | 406 // Wait for ICE connection. |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 397 Thread.sleep(AUDIO_RUN_TIMEOUT); | 418 Thread.sleep(AUDIO_RUN_TIMEOUT); |
| 398 } | 419 } |
| 399 | 420 |
| 400 pcClient.close(); | 421 pcClient.close(); |
| 401 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 422 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 402 Log.d(TAG, "testLoopback done."); | 423 Log.d(TAG, "testLoopback done."); |
| 403 } | 424 } |
| 404 | 425 |
| 405 @SmallTest | 426 @SmallTest |
| 406 public void testLoopbackAudio() throws InterruptedException { | 427 public void testLoopbackAudio() throws InterruptedException { |
| 407 doLoopbackTest(createParametersForAudioCall(), false); | 428 doLoopbackTest(createParametersForAudioCall(), null, false); |
| 408 } | 429 } |
| 409 | 430 |
| 410 @SmallTest | 431 @SmallTest |
| 411 public void testLoopbackVp8() throws InterruptedException { | 432 public void testLoopbackVp8() throws InterruptedException { |
| 412 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); | 433 doLoopbackTest( |
| 434 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 435 createVideoCapturer(false), false); | |
| 413 } | 436 } |
| 414 | 437 |
| 415 @SmallTest | 438 @SmallTest |
| 416 public void testLoopbackVp9() throws InterruptedException { | 439 public void testLoopbackVp9() throws InterruptedException { |
| 417 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false); | 440 doLoopbackTest( |
| 441 createParametersForVideoCall(VIDEO_CODEC_VP9), | |
| 442 createVideoCapturer(false), false); | |
| 418 } | 443 } |
| 419 | 444 |
| 420 @SmallTest | 445 @SmallTest |
| 421 public void testLoopbackH264() throws InterruptedException { | 446 public void testLoopbackH264() throws InterruptedException { |
| 422 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false) ; | 447 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 448 createVideoCapturer(false), false); | |
| 423 } | 449 } |
| 424 | 450 |
| 425 @SmallTest | 451 @SmallTest |
| 426 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 452 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
| 427 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 453 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 428 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 454 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 429 return; | 455 return; |
| 430 } | 456 } |
| 431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); | 457 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 458 createVideoCapturer(false), true); | |
| 432 } | 459 } |
| 433 | 460 |
| 434 @SmallTest | 461 @SmallTest |
| 435 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { | 462 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { |
| 436 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 463 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 437 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 464 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 438 return; | 465 return; |
| 439 } | 466 } |
| 440 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true); | 467 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
| 468 createVideoCapturer(false), true); | |
| 441 } | 469 } |
| 442 | 470 |
| 443 @SmallTest | 471 @SmallTest |
| 444 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 472 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
| 445 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 473 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 446 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); | 474 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19." ); |
| 447 return; | 475 return; |
| 448 } | 476 } |
| 449 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true); | 477 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 478 createVideoCapturer(false), true); | |
| 450 } | 479 } |
| 451 | 480 |
| 452 @SmallTest | 481 @SmallTest |
| 453 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { | 482 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
| 454 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 483 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 455 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19") ; | 484 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19") ; |
| 456 return; | 485 return; |
| 457 } | 486 } |
| 458 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the | 487 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the |
| 459 // hardware encoder supports to encode from a texture. | 488 // hardware encoder supports to encode from a texture. |
| 460 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 489 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
| 461 Log.i(TAG, "VP8 encode to textures is not supported."); | 490 Log.i(TAG, "VP8 encode to textures is not supported."); |
| 462 return; | 491 return; |
| 463 } | 492 } |
| 464 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); | 493 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
| 494 createVideoCapturer(true), true); | |
| 465 } | 495 } |
| 466 | 496 |
| 467 // Test that a call can be setup even if the EGL context used during initializ ation is | 497 // Test that a call can be setup even if the EGL context used during initializ ation is |
| 468 // released before the Video codecs are created. The HW encoder and decoder is setup to use | 498 // released before the Video codecs are created. The HW encoder and decoder is setup to use |
| 469 // textures. | 499 // textures. |
| 470 @SmallTest | 500 @SmallTest |
| 471 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted Exception { | 501 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted Exception { |
| 472 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 502 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 473 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19") ; | 503 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19") ; |
| 474 return; | 504 return; |
| 475 } | 505 } |
| 476 | 506 |
| 477 loopback = true; | 507 loopback = true; |
| 478 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD EC_VP8, true); | 508 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD EC_VP8); |
| 479 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 509 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 480 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 510 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 481 pcClient = createPeerConnectionClient( | 511 pcClient = createPeerConnectionClient( |
| 482 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); | 512 localRenderer, remoteRenderer, parameters, createVideoCapturer(true), |
| 513 eglBase.getEglBaseContext()); | |
| 483 | 514 |
| 484 // Wait for local SDP, rename it to answer and set as remote SDP. | 515 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 485 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 516 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 486 | 517 |
| 487 // Release the EGL context used for creating the PeerConnectionClient. | 518 // Release the EGL context used for creating the PeerConnectionClient. |
| 488 // Since createPeerConnectionClient is asynchronous, we must wait for the lo cal | 519 // Since createPeerConnectionClient is asynchronous, we must wait for the lo cal |
| 489 // SessionDescription. | 520 // SessionDescription. |
| 490 eglBase.release(); | 521 eglBase.release(); |
| 491 eglBase = null; | 522 eglBase = null; |
| 492 | 523 |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 513 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 544 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 514 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); | 545 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
| 515 return; | 546 return; |
| 516 } | 547 } |
| 517 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the | 548 // TODO(perkj): If we can always capture to textures, there is no need to ch eck if the |
| 518 // hardware encoder supports to encode from a texture. | 549 // hardware encoder supports to encode from a texture. |
| 519 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { | 550 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
| 520 Log.i(TAG, "H264 encode to textures is not supported."); | 551 Log.i(TAG, "H264 encode to textures is not supported."); |
| 521 return; | 552 return; |
| 522 } | 553 } |
| 523 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); | 554 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
| 555 createVideoCapturer(true), true); | |
| 524 } | 556 } |
| 525 | 557 |
| 526 | 558 |
| 527 // Checks if default front camera can be switched to back camera and then | 559 // Checks if default front camera can be switched to back camera and then |
| 528 // again to front camera. | 560 // again to front camera. |
| 529 @SmallTest | 561 @SmallTest |
| 530 public void testCameraSwitch() throws InterruptedException { | 562 public void testCameraSwitch() throws InterruptedException { |
| 531 Log.d(TAG, "testCameraSwitch"); | 563 Log.d(TAG, "testCameraSwitch"); |
| 532 loopback = true; | 564 loopback = true; |
| 533 | 565 |
| 534 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 566 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 535 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 567 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 536 | 568 |
| 537 pcClient = createPeerConnectionClient( | 569 pcClient = createPeerConnectionClient( |
| 538 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 570 localRenderer, remoteRenderer, |
| 571 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 572 createVideoCapturer(false), null); | |
| 539 | 573 |
| 540 // Wait for local SDP, rename it to answer and set as remote SDP. | 574 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 541 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 575 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 542 SessionDescription remoteSdp = new SessionDescription( | 576 SessionDescription remoteSdp = new SessionDescription( |
| 543 SessionDescription.Type.fromCanonicalForm("answer"), | 577 SessionDescription.Type.fromCanonicalForm("answer"), |
| 544 localSdp.description); | 578 localSdp.description); |
| 545 pcClient.setRemoteDescription(remoteSdp); | 579 pcClient.setRemoteDescription(remoteSdp); |
| 546 | 580 |
| 547 // Wait for ICE connection. | 581 // Wait for ICE connection. |
| 548 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 582 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 575 // background and back to foreground. | 609 // background and back to foreground. |
| 576 @SmallTest | 610 @SmallTest |
| 577 public void testVideoSourceRestart() throws InterruptedException { | 611 public void testVideoSourceRestart() throws InterruptedException { |
| 578 Log.d(TAG, "testVideoSourceRestart"); | 612 Log.d(TAG, "testVideoSourceRestart"); |
| 579 loopback = true; | 613 loopback = true; |
| 580 | 614 |
| 581 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 615 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 582 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 616 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 583 | 617 |
| 584 pcClient = createPeerConnectionClient( | 618 pcClient = createPeerConnectionClient( |
| 585 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 619 localRenderer, remoteRenderer, |
| 620 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 621 createVideoCapturer(false), null); | |
| 586 | 622 |
| 587 // Wait for local SDP, rename it to answer and set as remote SDP. | 623 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 588 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 624 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 589 SessionDescription remoteSdp = new SessionDescription( | 625 SessionDescription remoteSdp = new SessionDescription( |
| 590 SessionDescription.Type.fromCanonicalForm("answer"), | 626 SessionDescription.Type.fromCanonicalForm("answer"), |
| 591 localSdp.description); | 627 localSdp.description); |
| 592 pcClient.setRemoteDescription(remoteSdp); | 628 pcClient.setRemoteDescription(remoteSdp); |
| 593 | 629 |
| 594 // Wait for ICE connection. | 630 // Wait for ICE connection. |
| 595 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 631 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 623 // Checks if capture format can be changed on fly and decoder can be reset pro perly. | 659 // Checks if capture format can be changed on fly and decoder can be reset pro perly. |
| 624 @SmallTest | 660 @SmallTest |
| 625 public void testCaptureFormatChange() throws InterruptedException { | 661 public void testCaptureFormatChange() throws InterruptedException { |
| 626 Log.d(TAG, "testCaptureFormatChange"); | 662 Log.d(TAG, "testCaptureFormatChange"); |
| 627 loopback = true; | 663 loopback = true; |
| 628 | 664 |
| 629 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); | 665 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R ENDERER_NAME); |
| 630 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); | 666 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE _RENDERER_NAME); |
| 631 | 667 |
| 632 pcClient = createPeerConnectionClient( | 668 pcClient = createPeerConnectionClient( |
| 633 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_ VP8, false), null); | 669 localRenderer, remoteRenderer, |
| 670 createParametersForVideoCall(VIDEO_CODEC_VP8), | |
| 671 createVideoCapturer(false), null); | |
| 634 | 672 |
| 635 // Wait for local SDP, rename it to answer and set as remote SDP. | 673 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 636 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 674 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 637 SessionDescription remoteSdp = new SessionDescription( | 675 SessionDescription remoteSdp = new SessionDescription( |
| 638 SessionDescription.Type.fromCanonicalForm("answer"), | 676 SessionDescription.Type.fromCanonicalForm("answer"), |
| 639 localSdp.description); | 677 localSdp.description); |
| 640 pcClient.setRemoteDescription(remoteSdp); | 678 pcClient.setRemoteDescription(remoteSdp); |
| 641 | 679 |
| 642 // Wait for ICE connection. | 680 // Wait for ICE connection. |
| 643 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); | 681 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI T_TIMEOUT)); |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 665 assertTrue("Remote video frames were not rendered after capture format cha nge.", | 703 assertTrue("Remote video frames were not rendered after capture format cha nge.", |
| 666 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 704 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 667 } | 705 } |
| 668 | 706 |
| 669 pcClient.close(); | 707 pcClient.close(); |
| 670 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 708 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 671 Log.d(TAG, "testCaptureFormatChange done."); | 709 Log.d(TAG, "testCaptureFormatChange done."); |
| 672 } | 710 } |
| 673 | 711 |
| 674 } | 712 } |
| OLD | NEW |