OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
12 | 12 |
13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 13 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
14 import org.appspot.apprtc.PeerConnectionClient; | 14 import org.appspot.apprtc.PeerConnectionClient; |
15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 15 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 16 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
17 | 17 |
18 import android.os.Build; | 18 import android.os.Build; |
19 import android.test.FlakyTest; | 19 import android.test.FlakyTest; |
20 import android.test.InstrumentationTestCase; | 20 import android.test.InstrumentationTestCase; |
21 import android.test.suitebuilder.annotation.SmallTest; | 21 import android.test.suitebuilder.annotation.SmallTest; |
22 import android.util.Log; | 22 import android.util.Log; |
23 | 23 |
24 import org.webrtc.Camera1Enumerator; | |
25 import org.webrtc.Camera2Enumerator; | 24 import org.webrtc.Camera2Enumerator; |
26 import org.webrtc.CameraEnumerator; | |
27 import org.webrtc.EglBase; | 25 import org.webrtc.EglBase; |
28 import org.webrtc.IceCandidate; | 26 import org.webrtc.IceCandidate; |
29 import org.webrtc.MediaCodecVideoEncoder; | 27 import org.webrtc.MediaCodecVideoEncoder; |
30 import org.webrtc.PeerConnection; | 28 import org.webrtc.PeerConnection; |
31 import org.webrtc.PeerConnectionFactory; | 29 import org.webrtc.PeerConnectionFactory; |
32 import org.webrtc.SessionDescription; | 30 import org.webrtc.SessionDescription; |
33 import org.webrtc.StatsReport; | 31 import org.webrtc.StatsReport; |
34 import org.webrtc.VideoCapturer; | |
35 import org.webrtc.VideoRenderer; | 32 import org.webrtc.VideoRenderer; |
36 | 33 |
37 import java.util.LinkedList; | 34 import java.util.LinkedList; |
38 import java.util.List; | 35 import java.util.List; |
39 import java.util.concurrent.CountDownLatch; | 36 import java.util.concurrent.CountDownLatch; |
40 import java.util.concurrent.ExecutorService; | 37 import java.util.concurrent.ExecutorService; |
41 import java.util.concurrent.Executors; | 38 import java.util.concurrent.Executors; |
42 import java.util.concurrent.TimeUnit; | 39 import java.util.concurrent.TimeUnit; |
43 | 40 |
44 public class PeerConnectionClientTest | 41 public class PeerConnectionClientTest |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
231 synchronized (closeEvent) { | 228 synchronized (closeEvent) { |
232 if (!isClosed) { | 229 if (!isClosed) { |
233 closeEvent.wait(timeoutMs); | 230 closeEvent.wait(timeoutMs); |
234 } | 231 } |
235 return isClosed; | 232 return isClosed; |
236 } | 233 } |
237 } | 234 } |
238 | 235 |
239 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, | 236 PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer, |
240 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame
ters, | 237 MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParame
ters, |
241 VideoCapturer videoCapturer, EglBase.Context eglContext) { | 238 EglBase.Context eglContext) { |
242 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic
eServer>(); | 239 List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.Ic
eServer>(); |
243 SignalingParameters signalingParameters = | 240 SignalingParameters signalingParameters = |
244 new SignalingParameters(iceServers, true, // iceServers, initiator. | 241 new SignalingParameters(iceServers, true, // iceServers, initiator. |
245 null, null, null, // clientId, wssUrl, wssPostUrl. | 242 null, null, null, // clientId, wssUrl, wssPostUrl. |
246 null, null); // offerSdp, iceCandidates. | 243 null, null); // offerSdp, iceCandidates. |
247 | 244 |
248 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 245 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
249 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 246 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
250 options.networkIgnoreMask = 0; | 247 options.networkIgnoreMask = 0; |
251 options.disableNetworkMonitor = true; | 248 options.disableNetworkMonitor = true; |
252 client.setPeerConnectionFactoryOptions(options); | 249 client.setPeerConnectionFactoryOptions(options); |
253 client.createPeerConnectionFactory( | 250 client.createPeerConnectionFactory( |
254 getInstrumentation().getTargetContext(), peerConnectionParameters, this)
; | 251 getInstrumentation().getTargetContext(), peerConnectionParameters, this)
; |
255 client.createPeerConnection( | 252 client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signa
lingParameters); |
256 eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParam
eters); | |
257 client.createOffer(); | 253 client.createOffer(); |
258 return client; | 254 return client; |
259 } | 255 } |
260 | 256 |
261 private PeerConnectionParameters createParametersForAudioCall() { | 257 private PeerConnectionParameters createParametersForAudioCall() { |
262 PeerConnectionParameters peerConnectionParameters = | 258 PeerConnectionParameters peerConnectionParameters = |
263 new PeerConnectionParameters(false, /* videoCallEnabled */ | 259 new PeerConnectionParameters(false, /* videoCallEnabled */ |
264 true, /* loopback */ | 260 true, /* loopback */ |
265 false, /* tracing */ | 261 false, /* tracing */ |
266 // Video codec parameters. | 262 // Video codec parameters. |
| 263 true, /* useCamera2 */ |
267 0, /* videoWidth */ | 264 0, /* videoWidth */ |
268 0, /* videoHeight */ | 265 0, /* videoHeight */ |
269 0, /* videoFps */ | 266 0, /* videoFps */ |
270 0, /* videoStartBitrate */ | 267 0, /* videoStartBitrate */ |
271 "", /* videoCodec */ | 268 "", /* videoCodec */ |
272 true, /* videoCodecHwAcceleration */ | 269 true, /* videoCodecHwAcceleration */ |
| 270 false, /* captureToToTexture */ |
273 // Audio codec parameters. | 271 // Audio codec parameters. |
274 0, /* audioStartBitrate */ | 272 0, /* audioStartBitrate */ |
275 "OPUS", /* audioCodec */ | 273 "OPUS", /* audioCodec */ |
276 false, /* noAudioProcessing */ | 274 false, /* noAudioProcessing */ |
277 false, /* aecDump */ | 275 false, /* aecDump */ |
278 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 276 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
279 false /* disableBuiltInNS */, false /* enableLevelControl */); | 277 false /* disableBuiltInNS */, false /* enableLevelControl */); |
280 | |
281 return peerConnectionParameters; | 278 return peerConnectionParameters; |
282 } | 279 } |
283 | 280 |
284 private VideoCapturer createCameraCapturer(boolean captureToTexture) { | 281 private PeerConnectionParameters createParametersForVideoCall( |
| 282 String videoCodec, boolean captureToTexture) { |
285 final boolean useCamera2 = | 283 final boolean useCamera2 = |
286 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g
etTargetContext()); | 284 captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().g
etTargetContext()); |
287 | 285 |
288 CameraEnumerator enumerator; | |
289 if (useCamera2) { | |
290 enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext()
); | |
291 } else { | |
292 enumerator = new Camera1Enumerator(captureToTexture); | |
293 } | |
294 String deviceName = enumerator.getDeviceNames()[0]; | |
295 return enumerator.createCapturer(deviceName, null); | |
296 } | |
297 | |
298 private PeerConnectionParameters createParametersForVideoCall(String videoCode
c) { | |
299 PeerConnectionParameters peerConnectionParameters = | 286 PeerConnectionParameters peerConnectionParameters = |
300 new PeerConnectionParameters(true, /* videoCallEnabled */ | 287 new PeerConnectionParameters(true, /* videoCallEnabled */ |
301 true, /* loopback */ | 288 true, /* loopback */ |
302 false, /* tracing */ | 289 false, /* tracing */ |
303 // Video codec parameters. | 290 // Video codec parameters. |
| 291 useCamera2, /* useCamera2 */ |
304 0, /* videoWidth */ | 292 0, /* videoWidth */ |
305 0, /* videoHeight */ | 293 0, /* videoHeight */ |
306 0, /* videoFps */ | 294 0, /* videoFps */ |
307 0, /* videoStartBitrate */ | 295 0, /* videoStartBitrate */ |
308 videoCodec, /* videoCodec */ | 296 videoCodec, /* videoCodec */ |
309 true, /* videoCodecHwAcceleration */ | 297 true, /* videoCodecHwAcceleration */ |
| 298 captureToTexture, /* captureToToTexture */ |
310 // Audio codec parameters. | 299 // Audio codec parameters. |
311 0, /* audioStartBitrate */ | 300 0, /* audioStartBitrate */ |
312 "OPUS", /* audioCodec */ | 301 "OPUS", /* audioCodec */ |
313 false, /* noAudioProcessing */ | 302 false, /* noAudioProcessing */ |
314 false, /* aecDump */ | 303 false, /* aecDump */ |
315 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, | 304 false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* dis
ableBuiltInAGC */, |
316 false /* disableBuiltInNS */, false /* enableLevelControl */); | 305 false /* disableBuiltInNS */, false /* enableLevelControl */); |
317 | |
318 return peerConnectionParameters; | 306 return peerConnectionParameters; |
319 } | 307 } |
320 | 308 |
321 @Override | 309 @Override |
322 public void setUp() { | 310 public void setUp() { |
323 signalingExecutor = Executors.newSingleThreadExecutor(); | 311 signalingExecutor = Executors.newSingleThreadExecutor(); |
324 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 312 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
325 eglBase = EglBase.create(); | 313 eglBase = EglBase.create(); |
326 } | 314 } |
327 } | 315 } |
328 | 316 |
329 @Override | 317 @Override |
330 public void tearDown() { | 318 public void tearDown() { |
331 signalingExecutor.shutdown(); | 319 signalingExecutor.shutdown(); |
332 if (eglBase != null) { | 320 if (eglBase != null) { |
333 eglBase.release(); | 321 eglBase.release(); |
334 } | 322 } |
335 } | 323 } |
336 | 324 |
337 @SmallTest | 325 @SmallTest |
338 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { | 326 public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedExcepti
on { |
339 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 327 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
340 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 328 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
341 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), | 329 pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, nul
l), |
342 createParametersForVideoCall(VIDEO_CODEC_VP8), | 330 createParametersForVideoCall(VIDEO_CODEC_VP8, false), null); |
343 createCameraCapturer(false /* captureToTexture */), null); | |
344 | 331 |
345 // Wait for local SDP and ice candidates set events. | 332 // Wait for local SDP and ice candidates set events. |
346 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 333 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
347 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T
IMEOUT)); | 334 assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_T
IMEOUT)); |
348 | 335 |
349 // Check that local video frames were rendered. | 336 // Check that local video frames were rendered. |
350 assertTrue( | 337 assertTrue( |
351 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); | 338 "Local video frames were not rendered.", localRenderer.waitForFramesRend
ered(WAIT_TIMEOUT)); |
352 | 339 |
353 pcClient.close(); | 340 pcClient.close(); |
354 assertTrue( | 341 assertTrue( |
355 "PeerConnection close event was not received.", waitForPeerConnectionClo
sed(WAIT_TIMEOUT)); | 342 "PeerConnection close event was not received.", waitForPeerConnectionClo
sed(WAIT_TIMEOUT)); |
356 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); | 343 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); |
357 } | 344 } |
358 | 345 |
359 private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer
videoCapturer, | 346 private void doLoopbackTest(PeerConnectionParameters parameters, boolean decod
eToTexure) |
360 boolean decodeToTexture) throws InterruptedException { | 347 throws InterruptedException { |
361 loopback = true; | 348 loopback = true; |
362 MockRenderer localRenderer = null; | 349 MockRenderer localRenderer = null; |
363 MockRenderer remoteRenderer = null; | 350 MockRenderer remoteRenderer = null; |
364 if (parameters.videoCallEnabled) { | 351 if (parameters.videoCallEnabled) { |
365 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); | 352 Log.d(TAG, "testLoopback for video " + parameters.videoCodec); |
366 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM
E); | 353 localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAM
E); |
367 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N
AME); | 354 remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_N
AME); |
368 } else { | 355 } else { |
369 Log.d(TAG, "testLoopback for audio."); | 356 Log.d(TAG, "testLoopback for audio."); |
370 } | 357 } |
371 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, videoCapturer, | 358 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, |
372 decodeToTexture ? eglBase.getEglBaseContext() : null); | 359 decodeToTexure ? eglBase.getEglBaseContext() : null); |
373 | 360 |
374 // Wait for local SDP, rename it to answer and set as remote SDP. | 361 // Wait for local SDP, rename it to answer and set as remote SDP. |
375 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 362 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
376 SessionDescription remoteSdp = new SessionDescription( | 363 SessionDescription remoteSdp = new SessionDescription( |
377 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 364 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
378 pcClient.setRemoteDescription(remoteSdp); | 365 pcClient.setRemoteDescription(remoteSdp); |
379 | 366 |
380 // Wait for ICE connection. | 367 // Wait for ICE connection. |
381 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 368 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
382 | 369 |
383 if (parameters.videoCallEnabled) { | 370 if (parameters.videoCallEnabled) { |
384 // Check that local and remote video frames were rendered. | 371 // Check that local and remote video frames were rendered. |
385 assertTrue("Local video frames were not rendered.", | 372 assertTrue("Local video frames were not rendered.", |
386 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 373 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
387 assertTrue("Remote video frames were not rendered.", | 374 assertTrue("Remote video frames were not rendered.", |
388 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 375 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
389 } else { | 376 } else { |
390 // For audio just sleep for 1 sec. | 377 // For audio just sleep for 1 sec. |
391 // TODO(glaznev): check how we can detect that remote audio was rendered. | 378 // TODO(glaznev): check how we can detect that remote audio was rendered. |
392 Thread.sleep(AUDIO_RUN_TIMEOUT); | 379 Thread.sleep(AUDIO_RUN_TIMEOUT); |
393 } | 380 } |
394 | 381 |
395 pcClient.close(); | 382 pcClient.close(); |
396 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 383 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
397 Log.d(TAG, "testLoopback done."); | 384 Log.d(TAG, "testLoopback done."); |
398 } | 385 } |
399 | 386 |
400 @SmallTest | 387 @SmallTest |
401 public void testLoopbackAudio() throws InterruptedException { | 388 public void testLoopbackAudio() throws InterruptedException { |
402 doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTextur
e */); | 389 doLoopbackTest(createParametersForAudioCall(), false); |
403 } | 390 } |
404 | 391 |
405 @SmallTest | 392 @SmallTest |
406 public void testLoopbackVp8() throws InterruptedException { | 393 public void testLoopbackVp8() throws InterruptedException { |
407 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 394 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); |
408 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | |
409 } | 395 } |
410 | 396 |
411 @SmallTest | 397 @SmallTest |
412 public void testLoopbackVp9() throws InterruptedException { | 398 public void testLoopbackVp9() throws InterruptedException { |
413 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), | 399 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false); |
414 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | |
415 } | 400 } |
416 | 401 |
417 @SmallTest | 402 @SmallTest |
418 public void testLoopbackH264() throws InterruptedException { | 403 public void testLoopbackH264() throws InterruptedException { |
419 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 404 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false)
; |
420 createCameraCapturer(false /* captureToTexture */), false /* decodeToTex
ture */); | |
421 } | 405 } |
422 | 406 |
423 @SmallTest | 407 @SmallTest |
424 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 408 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
425 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 409 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
426 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 410 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
427 return; | 411 return; |
428 } | 412 } |
429 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 413 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); |
430 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | |
431 } | 414 } |
432 | 415 |
433 @SmallTest | 416 @SmallTest |
434 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { | 417 public void testLoopbackVp9DecodeToTexture() throws InterruptedException { |
435 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 418 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
436 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 419 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
437 return; | 420 return; |
438 } | 421 } |
439 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), | 422 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true); |
440 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | |
441 } | 423 } |
442 | 424 |
443 @SmallTest | 425 @SmallTest |
444 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 426 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
445 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 427 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
446 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | 428 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
447 return; | 429 return; |
448 } | 430 } |
449 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 431 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true); |
450 createCameraCapturer(false /* captureToTexture */), true /* decodeToText
ure */); | |
451 } | 432 } |
452 | 433 |
453 @SmallTest | 434 @SmallTest |
454 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { | 435 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
455 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 436 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
456 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; | 437 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; |
457 return; | 438 return; |
458 } | 439 } |
459 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 440 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
460 // hardware encoder supports to encode from a texture. | 441 // hardware encoder supports to encode from a texture. |
461 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 442 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
462 Log.i(TAG, "VP8 encode to textures is not supported."); | 443 Log.i(TAG, "VP8 encode to textures is not supported."); |
463 return; | 444 return; |
464 } | 445 } |
465 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), | 446 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); |
466 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); | |
467 } | 447 } |
468 | 448 |
469 // Test that a call can be setup even if the EGL context used during initializ
ation is | 449 // Test that a call can be setup even if the EGL context used during initializ
ation is |
470 // released before the Video codecs are created. The HW encoder and decoder is
setup to use | 450 // released before the Video codecs are created. The HW encoder and decoder is
setup to use |
471 // textures. | 451 // textures. |
472 @SmallTest | 452 @SmallTest |
473 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { | 453 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { |
474 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 454 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
475 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; | 455 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; |
476 return; | 456 return; |
477 } | 457 } |
478 | 458 |
479 loopback = true; | 459 loopback = true; |
480 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8); | 460 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8, true); |
481 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 461 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
482 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 462 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
483 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, paramet
ers, | 463 pcClient = createPeerConnectionClient( |
484 createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseCon
text()); | 464 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); |
485 | 465 |
486 // Wait for local SDP, rename it to answer and set as remote SDP. | 466 // Wait for local SDP, rename it to answer and set as remote SDP. |
487 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 467 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
488 | 468 |
489 // Release the EGL context used for creating the PeerConnectionClient. | 469 // Release the EGL context used for creating the PeerConnectionClient. |
490 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal | 470 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal |
491 // SessionDescription. | 471 // SessionDescription. |
492 eglBase.release(); | 472 eglBase.release(); |
493 eglBase = null; | 473 eglBase = null; |
494 | 474 |
(...skipping 19 matching lines...) Expand all Loading... |
514 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 494 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
515 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); | 495 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
516 return; | 496 return; |
517 } | 497 } |
518 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 498 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
519 // hardware encoder supports to encode from a texture. | 499 // hardware encoder supports to encode from a texture. |
520 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { | 500 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
521 Log.i(TAG, "H264 encode to textures is not supported."); | 501 Log.i(TAG, "H264 encode to textures is not supported."); |
522 return; | 502 return; |
523 } | 503 } |
524 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), | 504 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); |
525 createCameraCapturer(true /* captureToTexture */), true /* decodeToTextu
re */); | |
526 } | 505 } |
527 | 506 |
528 // Checks if default front camera can be switched to back camera and then | 507 // Checks if default front camera can be switched to back camera and then |
529 // again to front camera. | 508 // again to front camera. |
530 @SmallTest | 509 @SmallTest |
531 public void testCameraSwitch() throws InterruptedException { | 510 public void testCameraSwitch() throws InterruptedException { |
532 Log.d(TAG, "testCameraSwitch"); | 511 Log.d(TAG, "testCameraSwitch"); |
533 loopback = true; | 512 loopback = true; |
534 | 513 |
535 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 514 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
536 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 515 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
537 | 516 |
538 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 517 pcClient = createPeerConnectionClient( |
539 createParametersForVideoCall(VIDEO_CODEC_VP8), | 518 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); |
540 createCameraCapturer(false /* captureToTexture */), null); | |
541 | 519 |
542 // Wait for local SDP, rename it to answer and set as remote SDP. | 520 // Wait for local SDP, rename it to answer and set as remote SDP. |
543 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 521 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
544 SessionDescription remoteSdp = new SessionDescription( | 522 SessionDescription remoteSdp = new SessionDescription( |
545 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 523 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
546 pcClient.setRemoteDescription(remoteSdp); | 524 pcClient.setRemoteDescription(remoteSdp); |
547 | 525 |
548 // Wait for ICE connection. | 526 // Wait for ICE connection. |
549 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 527 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
550 | 528 |
(...skipping 26 matching lines...) Expand all Loading... |
577 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 555 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
578 @FlakyTest | 556 @FlakyTest |
579 //@SmallTest | 557 //@SmallTest |
580 public void testVideoSourceRestart() throws InterruptedException { | 558 public void testVideoSourceRestart() throws InterruptedException { |
581 Log.d(TAG, "testVideoSourceRestart"); | 559 Log.d(TAG, "testVideoSourceRestart"); |
582 loopback = true; | 560 loopback = true; |
583 | 561 |
584 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 562 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
585 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 563 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
586 | 564 |
587 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 565 pcClient = createPeerConnectionClient( |
588 createParametersForVideoCall(VIDEO_CODEC_VP8), | 566 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); |
589 createCameraCapturer(false /* captureToTexture */), null); | |
590 | 567 |
591 // Wait for local SDP, rename it to answer and set as remote SDP. | 568 // Wait for local SDP, rename it to answer and set as remote SDP. |
592 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 569 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
593 SessionDescription remoteSdp = new SessionDescription( | 570 SessionDescription remoteSdp = new SessionDescription( |
594 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 571 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
595 pcClient.setRemoteDescription(remoteSdp); | 572 pcClient.setRemoteDescription(remoteSdp); |
596 | 573 |
597 // Wait for ICE connection. | 574 // Wait for ICE connection. |
598 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 575 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
599 | 576 |
(...skipping 27 matching lines...) Expand all Loading... |
627 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 | 604 // Disabled because of https://bugs.chromium.org/p/webrtc/issues/detail?id=647
8 |
628 @FlakyTest | 605 @FlakyTest |
629 //@SmallTest | 606 //@SmallTest |
630 public void testCaptureFormatChange() throws InterruptedException { | 607 public void testCaptureFormatChange() throws InterruptedException { |
631 Log.d(TAG, "testCaptureFormatChange"); | 608 Log.d(TAG, "testCaptureFormatChange"); |
632 loopback = true; | 609 loopback = true; |
633 | 610 |
634 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 611 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
635 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 612 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
636 | 613 |
637 pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, | 614 pcClient = createPeerConnectionClient( |
638 createParametersForVideoCall(VIDEO_CODEC_VP8), | 615 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), null); |
639 createCameraCapturer(false /* captureToTexture */), null); | |
640 | 616 |
641 // Wait for local SDP, rename it to answer and set as remote SDP. | 617 // Wait for local SDP, rename it to answer and set as remote SDP. |
642 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 618 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
643 SessionDescription remoteSdp = new SessionDescription( | 619 SessionDescription remoteSdp = new SessionDescription( |
644 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); | 620 SessionDescription.Type.fromCanonicalForm("answer"), localSdp.descriptio
n); |
645 pcClient.setRemoteDescription(remoteSdp); | 621 pcClient.setRemoteDescription(remoteSdp); |
646 | 622 |
647 // Wait for ICE connection. | 623 // Wait for ICE connection. |
648 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 624 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
649 | 625 |
(...skipping 19 matching lines...) Expand all Loading... |
669 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 645 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
670 assertTrue("Remote video frames were not rendered after capture format cha
nge.", | 646 assertTrue("Remote video frames were not rendered after capture format cha
nge.", |
671 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 647 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
672 } | 648 } |
673 | 649 |
674 pcClient.close(); | 650 pcClient.close(); |
675 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 651 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
676 Log.d(TAG, "testCaptureFormatChange done."); | 652 Log.d(TAG, "testCaptureFormatChange done."); |
677 } | 653 } |
678 } | 654 } |
OLD | NEW |