OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
12 | 12 |
| 13 import static org.junit.Assert.assertTrue; |
| 14 import static org.junit.Assert.fail; |
| 15 |
| 16 import android.os.Build; |
| 17 import android.support.test.InstrumentationRegistry; |
| 18 import android.support.test.filters.FlakyTest; |
| 19 import android.support.test.filters.SmallTest; |
| 20 import android.util.Log; |
| 21 import java.util.LinkedList; |
| 22 import java.util.List; |
| 23 import java.util.concurrent.CountDownLatch; |
| 24 import java.util.concurrent.ExecutorService; |
| 25 import java.util.concurrent.Executors; |
| 26 import java.util.concurrent.TimeUnit; |
13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 27 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
14 import org.appspot.apprtc.PeerConnectionClient; | 28 import org.appspot.apprtc.PeerConnectionClient; |
15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 29 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 30 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
17 | 31 import org.chromium.base.test.BaseJUnit4ClassRunner; |
18 import android.os.Build; | 32 import org.junit.After; |
19 import android.test.FlakyTest; | 33 import org.junit.Before; |
20 import android.test.InstrumentationTestCase; | 34 import org.junit.Test; |
21 import android.test.suitebuilder.annotation.SmallTest; | 35 import org.junit.runner.RunWith; |
22 import android.util.Log; | |
23 | |
24 import org.webrtc.Camera1Enumerator; | 36 import org.webrtc.Camera1Enumerator; |
25 import org.webrtc.Camera2Enumerator; | 37 import org.webrtc.Camera2Enumerator; |
26 import org.webrtc.CameraEnumerator; | 38 import org.webrtc.CameraEnumerator; |
27 import org.webrtc.EglBase; | 39 import org.webrtc.EglBase; |
28 import org.webrtc.IceCandidate; | 40 import org.webrtc.IceCandidate; |
29 import org.webrtc.MediaCodecVideoEncoder; | 41 import org.webrtc.MediaCodecVideoEncoder; |
30 import org.webrtc.PeerConnection; | 42 import org.webrtc.PeerConnection; |
31 import org.webrtc.PeerConnectionFactory; | 43 import org.webrtc.PeerConnectionFactory; |
32 import org.webrtc.SessionDescription; | 44 import org.webrtc.SessionDescription; |
33 import org.webrtc.StatsReport; | 45 import org.webrtc.StatsReport; |
34 import org.webrtc.VideoCapturer; | 46 import org.webrtc.VideoCapturer; |
35 import org.webrtc.VideoRenderer; | 47 import org.webrtc.VideoRenderer; |
36 | 48 |
37 import java.util.LinkedList; | 49 @RunWith(BaseJUnit4ClassRunner.class) |
38 import java.util.List; | 50 public class PeerConnectionClientTest implements PeerConnectionEvents { |
39 import java.util.concurrent.CountDownLatch; | |
40 import java.util.concurrent.ExecutorService; | |
41 import java.util.concurrent.Executors; | |
42 import java.util.concurrent.TimeUnit; | |
43 | |
44 public class PeerConnectionClientTest | |
45 extends InstrumentationTestCase implements PeerConnectionEvents { | |
46 private static final String TAG = "RTCClientTest"; | 51 private static final String TAG = "RTCClientTest"; |
47 private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000; | 52 private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000; |
48 private static final int WAIT_TIMEOUT = 7000; | 53 private static final int WAIT_TIMEOUT = 7000; |
49 private static final int CAMERA_SWITCH_ATTEMPTS = 3; | 54 private static final int CAMERA_SWITCH_ATTEMPTS = 3; |
50 private static final int VIDEO_RESTART_ATTEMPTS = 3; | 55 private static final int VIDEO_RESTART_ATTEMPTS = 3; |
51 private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3; | 56 private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3; |
52 private static final int VIDEO_RESTART_TIMEOUT = 500; | 57 private static final int VIDEO_RESTART_TIMEOUT = 500; |
53 private static final int EXPECTED_VIDEO_FRAMES = 10; | 58 private static final int EXPECTED_VIDEO_FRAMES = 10; |
54 private static final String VIDEO_CODEC_VP8 = "VP8"; | 59 private static final String VIDEO_CODEC_VP8 = "VP8"; |
55 private static final String VIDEO_CODEC_VP9 = "VP9"; | 60 private static final String VIDEO_CODEC_VP9 = "VP9"; |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
244 new SignalingParameters(iceServers, true, // iceServers, initiator. | 249 new SignalingParameters(iceServers, true, // iceServers, initiator. |
245 null, null, null, // clientId, wssUrl, wssPostUrl. | 250 null, null, null, // clientId, wssUrl, wssPostUrl. |
246 null, null); // offerSdp, iceCandidates. | 251 null, null); // offerSdp, iceCandidates. |
247 | 252 |
248 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 253 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
249 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 254 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
250 options.networkIgnoreMask = 0; | 255 options.networkIgnoreMask = 0; |
251 options.disableNetworkMonitor = true; | 256 options.disableNetworkMonitor = true; |
252 client.setPeerConnectionFactoryOptions(options); | 257 client.setPeerConnectionFactoryOptions(options); |
253 client.createPeerConnectionFactory( | 258 client.createPeerConnectionFactory( |
254 getInstrumentation().getTargetContext(), peerConnectionParameters, this)
; | 259 InstrumentationRegistry.getTargetContext(), peerConnectionParameters, th
is); |
255 client.createPeerConnection( | 260 client.createPeerConnection( |
256 eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParam
eters); | 261 eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParam
eters); |
257 client.createOffer(); | 262 client.createOffer(); |
258 return client; | 263 return client; |
259 } | 264 } |
260 | 265 |
261 private PeerConnectionParameters createParametersForAudioCall() { | 266 private PeerConnectionParameters createParametersForAudioCall() { |
262 PeerConnectionParameters peerConnectionParameters = | 267 PeerConnectionParameters peerConnectionParameters = |
263 new PeerConnectionParameters(false, /* videoCallEnabled */ | 268 new PeerConnectionParameters(false, /* videoCallEnabled */ |
264 true, /* loopback */ | 269 true, /* loopback */ |
(...skipping 11 matching lines...) Expand all Loading... |
276 "OPUS", /* audioCodec */ | 281 "OPUS", /* audioCodec */ |
277 false, /* noAudioProcessing */ | 282 false, /* noAudioProcessing */ |
278 false, /* aecDump */ | 283 false, /* aecDump */ |
279 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 284 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
280 false /* disableBuiltInNS */, false /* enableLevelControl */); | 285 false /* disableBuiltInNS */, false /* enableLevelControl */); |
281 | 286 |
282 return peerConnectionParameters; | 287 return peerConnectionParameters; |
283 } | 288 } |
284 | 289 |
285 private VideoCapturer createCameraCapturer(boolean captureToTexture) { | 290 private VideoCapturer createCameraCapturer(boolean captureToTexture) { |
286 final boolean useCamera2 = | 291 final boolean useCamera2 = captureToTexture |
287 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g
etTargetContext()); | 292 && Camera2Enumerator.isSupported(InstrumentationRegistry.getTargetContex
t()); |
288 | 293 |
289 CameraEnumerator enumerator; | 294 CameraEnumerator enumerator; |
290 if (useCamera2) { | 295 if (useCamera2) { |
291 enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext()
); | 296 enumerator = new Camera2Enumerator(InstrumentationRegistry.getTargetContex
t()); |
292 } else { | 297 } else { |
293 enumerator = new Camera1Enumerator(captureToTexture); | 298 enumerator = new Camera1Enumerator(captureToTexture); |
294 } | 299 } |
295 String deviceName = enumerator.getDeviceNames()[0]; | 300 String deviceName = enumerator.getDeviceNames()[0]; |
296 return enumerator.createCapturer(deviceName, null); | 301 return enumerator.createCapturer(deviceName, null); |
297 } | 302 } |
298 | 303 |
299 private PeerConnectionParameters createParametersForVideoCall(String videoCode
c) { | 304 private PeerConnectionParameters createParametersForVideoCall(String videoCode
c) { |
300 PeerConnectionParameters peerConnectionParameters = | 305 PeerConnectionParameters peerConnectionParameters = |
301 new PeerConnectionParameters(true, /* videoCallEnabled */ | 306 new PeerConnectionParameters(true, /* videoCallEnabled */ |
(...skipping 11 matching lines...) Expand all Loading... |
313 0, /* audioStartBitrate */ | 318 0, /* audioStartBitrate */ |
314 "OPUS", /* audioCodec */ | 319 "OPUS", /* audioCodec */ |
315 false, /* noAudioProcessing */ | 320 false, /* noAudioProcessing */ |
316 false, /* aecDump */ | 321 false, /* aecDump */ |
317 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 322 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
318 false /* disableBuiltInNS */, false /* enableLevelControl */); | 323 false /* disableBuiltInNS */, false /* enableLevelControl */); |
319 | 324 |
320 return peerConnectionParameters; | 325 return peerConnectionParameters; |
321 } | 326 } |
322 | 327 |
323 @Override | 328 @Before |
324 public void setUp() { | 329 public void setUp() { |
325 signalingExecutor = Executors.newSingleThreadExecutor(); | 330 signalingExecutor = Executors.newSingleThreadExecutor(); |
326 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 331 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
327 eglBase = EglBase.create(); | 332 eglBase = EglBase.create(); |
328 } | 333 } |
329 } | 334 } |
330 | 335 |
331 @Override | 336 @After |
332 public void tearDown() { | 337 public void tearDown() { |
333 signalingExecutor.shutdown(); | 338 signalingExecutor.shutdown(); |
334 if (eglBase != null) { | 339 if (eglBase != null) { |
335 eglBase.release(); | 340 eglBase.release(); |
336 } | 341 } |
337 } | 342 } |
338 | 343 |
| 344 @Test |
339 @SmallTest | 345 @SmallTest |
340 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { | 346 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { |
341 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 347 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
342 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 348 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
343 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), | 349 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), |
344 createParametersForVideoCall(VIDEO_CODEC_VP8), | 350 createParametersForVideoCall(VIDEO_CODEC_VP8), |
345 createCameraCapturer(false /* captureToTexture */), null); | 351 createCameraCapturer(false /* captureToTexture */), null); |
346 | 352 |
347 // Wait for local SDP and ice candidates set events. | 353 // Wait for local SDP and ice candidates set events. |
348 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 354 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
392 // For audio just sleep for 1 sec. | 398 // For audio just sleep for 1 sec. |
393 // TODO(glaznev): check how we can detect that remote audio was rendered. | 399 // TODO(glaznev): check how we can detect that remote audio was rendered. |
394 Thread.sleep(AUDIO_RUN_TIMEOUT); | 400 Thread.sleep(AUDIO_RUN_TIMEOUT); |
395 } | 401 } |
396 | 402 |
397 pcClient.close(); | 403 pcClient.close(); |
398 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 404 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
399 Log.d(TAG, "testLoopback done."); | 405 Log.d(TAG, "testLoopback done."); |
400 } | 406 } |
401 | 407 |
| 408 @Test |
402 @SmallTest | 409 @SmallTest |
403 public void testLoopbackAudio() throws InterruptedException { | 410 public void testLoopbackAudio() throws InterruptedException { |
404 doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTextur
e */); | 411 doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTextur
e */); |
405 } | 412 } |
406 | 413 |
| 414 @Test |
407 @SmallTest | 415 @SmallTest |
408 public void testLoopbackVp8() throws InterruptedException { | 416 public void testLoopbackVp8() throws InterruptedException { |
409 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 417 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
410 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | 418 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
411 } | 419 } |
412 | 420 |
| 421 @Test |
413 @SmallTest | 422 @SmallTest |
414 public void testLoopbackVp9() throws InterruptedException { | 423 public void testLoopbackVp9() throws InterruptedException { |
415 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), | 424 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
416 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | 425 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
417 } | 426 } |
418 | 427 |
| 428 @Test |
419 @SmallTest | 429 @SmallTest |
420 public void testLoopbackH264() throws InterruptedException { | 430 public void testLoopbackH264() throws InterruptedException { |
421 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
422 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | 432 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); |
423 } | 433 } |
424 | 434 |
| 435 @Test |
425 @SmallTest | 436 @SmallTest |
426 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 437 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
427 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 438 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
428 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 439 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
429 return; | 440 return; |
430 } | 441 } |
431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 442 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
432 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | 443 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
433 } | 444 } |
434 | 445 |
| 446 @Test |
435 @SmallTest | 447 @SmallTest |
436 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { | 448 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { |
437 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 449 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
438 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 450 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
439 return; | 451 return; |
440 } | 452 } |
441 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), | 453 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), |
442 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | 454 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
443 } | 455 } |
444 | 456 |
| 457 @Test |
445 @SmallTest | 458 @SmallTest |
446 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 459 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
447 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 460 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
448 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 461 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
449 return; | 462 return; |
450 } | 463 } |
451 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 464 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
452 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | 465 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); |
453 } | 466 } |
454 | 467 |
| 468 @Test |
455 @SmallTest | 469 @SmallTest |
456 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { | 470 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
457 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 471 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
458 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; | 472 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; |
459 return; | 473 return; |
460 } | 474 } |
461 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 475 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
462 // hardware encoder supports to encode from a texture. | 476 // hardware encoder supports to encode from a texture. |
463 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 477 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
464 Log.i(TAG, "VP8 encode to textures is not supported."); | 478 Log.i(TAG, "VP8 encode to textures is not supported."); |
465 return; | 479 return; |
466 } | 480 } |
467 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 481 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), |
468 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); | 482 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); |
469 } | 483 } |
470 | 484 |
471 // Test that a call can be setup even if the EGL context used during initializ
ation is | 485 // Test that a call can be setup even if the EGL context used during initializ
ation is |
472 // released before the Video codecs are created. The HW encoder and decoder is
setup to use | 486 // released before the Video codecs are created. The HW encoder and decoder is
setup to use |
473 // textures. | 487 // textures. |
| 488 @Test |
474 @SmallTest | 489 @SmallTest |
475 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { | 490 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { |
476 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 491 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
477 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; | 492 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; |
478 return; | 493 return; |
479 } | 494 } |
480 | 495 |
481 loopback = true; | 496 loopback = true; |
482 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8); | 497 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8); |
483 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 498 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
(...skipping 20 matching lines...) Expand all Loading... |
504 assertTrue( | 519 assertTrue( |
505 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); | 520 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); |
506 assertTrue("Remote video frames were not rendered.", | 521 assertTrue("Remote video frames were not rendered.", |
507 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 522 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
508 | 523 |
509 pcClient.close(); | 524 pcClient.close(); |
510 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 525 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
511 Log.d(TAG, "testLoopback done."); | 526 Log.d(TAG, "testLoopback done."); |
512 } | 527 } |
513 | 528 |
| 529 @Test |
514 @SmallTest | 530 @SmallTest |
515 public void testLoopbackH264CaptureToTexture() throws InterruptedException { | 531 public void testLoopbackH264CaptureToTexture() throws InterruptedException { |
516 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 532 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
517 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); | 533 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
518 return; | 534 return; |
519 } | 535 } |
520 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 536 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
521 // hardware encoder supports to encode from a texture. | 537 // hardware encoder supports to encode from a texture. |
522 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { | 538 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
523 Log.i(TAG, "H264 encode to textures is not supported."); | 539 Log.i(TAG, "H264 encode to textures is not supported."); |
524 return; | 540 return; |
525 } | 541 } |
526 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 542 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), |
527 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); | 543 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); |
528 } | 544 } |
529 | 545 |
530 // Checks if default front camera can be switched to back camera and then | 546 // Checks if default front camera can be switched to back camera and then |
531 // again to front camera. | 547 // again to front camera. |
| 548 @Test |
532 @SmallTest | 549 @SmallTest |
533 public void testCameraSwitch() throws InterruptedException { | 550 public void testCameraSwitch() throws InterruptedException { |
534 Log.d(TAG, "testCameraSwitch"); | 551 Log.d(TAG, "testCameraSwitch"); |
535 loopback = true; | 552 loopback = true; |
536 | 553 |
537 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 554 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
538 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 555 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
539 | 556 |
540 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 557 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
541 createParametersForVideoCall(VIDEO_CODEC_VP8), | 558 createParametersForVideoCall(VIDEO_CODEC_VP8), |
(...skipping 28 matching lines...) Expand all Loading... |
570 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 587 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
571 } | 588 } |
572 pcClient.close(); | 589 pcClient.close(); |
573 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 590 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
574 Log.d(TAG, "testCameraSwitch done."); | 591 Log.d(TAG, "testCameraSwitch done."); |
575 } | 592 } |
576 | 593 |
577 // Checks if video source can be restarted - simulate app goes to | 594 // Checks if video source can be restarted - simulate app goes to |
578 // background and back to foreground. | 595 // background and back to foreground. |
579 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 596 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
| 597 @Test |
580 @FlakyTest | 598 @FlakyTest |
581 //@SmallTest | 599 //@SmallTest |
582 public void testVideoSourceRestart() throws InterruptedException { | 600 public void testVideoSourceRestart() throws InterruptedException { |
583 Log.d(TAG, "testVideoSourceRestart"); | 601 Log.d(TAG, "testVideoSourceRestart"); |
584 loopback = true; | 602 loopback = true; |
585 | 603 |
586 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 604 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
587 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 605 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
588 | 606 |
589 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 607 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
(...skipping 30 matching lines...) Expand all Loading... |
620 assertTrue("Remote video frames were not rendered after video restart.", | 638 assertTrue("Remote video frames were not rendered after video restart.", |
621 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 639 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
622 } | 640 } |
623 pcClient.close(); | 641 pcClient.close(); |
624 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 642 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
625 Log.d(TAG, "testVideoSourceRestart done."); | 643 Log.d(TAG, "testVideoSourceRestart done."); |
626 } | 644 } |
627 | 645 |
628 // Checks if capture format can be changed on fly and decoder can be reset pro
perly. | 646 // Checks if capture format can be changed on fly and decoder can be reset pro
perly. |
629 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 647 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
| 648 @Test |
630 @FlakyTest | 649 @FlakyTest |
631 //@SmallTest | 650 //@SmallTest |
632 public void testCaptureFormatChange() throws InterruptedException { | 651 public void testCaptureFormatChange() throws InterruptedException { |
633 Log.d(TAG, "testCaptureFormatChange"); | 652 Log.d(TAG, "testCaptureFormatChange"); |
634 loopback = true; | 653 loopback = true; |
635 | 654 |
636 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 655 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
637 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 656 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
638 | 657 |
639 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 658 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
671 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 690 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
672 assertTrue("Remote video frames were not rendered after capture format cha
nge.", | 691 assertTrue("Remote video frames were not rendered after capture format cha
nge.", |
673 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 692 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
674 } | 693 } |
675 | 694 |
676 pcClient.close(); | 695 pcClient.close(); |
677 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 696 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
678 Log.d(TAG, "testCaptureFormatChange done."); | 697 Log.d(TAG, "testCaptureFormatChange done."); |
679 } | 698 } |
680 } | 699 } |
OLD | NEW |