OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.appspot.apprtc.test; | 11 package org.appspot.apprtc.test; |
12 | 12 |
13 import java.util.LinkedList; | 13 import java.util.LinkedList; |
14 import java.util.List; | 14 import java.util.List; |
15 import java.util.concurrent.CountDownLatch; | 15 import java.util.concurrent.CountDownLatch; |
16 import java.util.concurrent.TimeUnit; | 16 import java.util.concurrent.TimeUnit; |
17 | 17 |
18 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | 18 import org.appspot.apprtc.AppRTCClient.SignalingParameters; |
19 import org.appspot.apprtc.PeerConnectionClient; | 19 import org.appspot.apprtc.PeerConnectionClient; |
20 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; | 20 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; |
21 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; | 21 import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; |
22 import org.appspot.apprtc.util.LooperExecutor; | 22 import org.appspot.apprtc.util.LooperExecutor; |
23 import org.webrtc.EglBase; | 23 import org.webrtc.EglBase; |
24 import org.webrtc.IceCandidate; | 24 import org.webrtc.IceCandidate; |
| 25 import org.webrtc.MediaCodecVideoEncoder; |
25 import org.webrtc.PeerConnection; | 26 import org.webrtc.PeerConnection; |
26 import org.webrtc.PeerConnectionFactory; | 27 import org.webrtc.PeerConnectionFactory; |
27 import org.webrtc.SessionDescription; | 28 import org.webrtc.SessionDescription; |
28 import org.webrtc.StatsReport; | 29 import org.webrtc.StatsReport; |
29 import org.webrtc.VideoRenderer; | 30 import org.webrtc.VideoRenderer; |
30 | 31 |
31 import android.os.Build; | 32 import android.os.Build; |
32 import android.test.InstrumentationTestCase; | 33 import android.test.InstrumentationTestCase; |
33 import android.util.Log; | 34 import android.util.Log; |
34 | 35 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 synchronized(closeEvent) { | 218 synchronized(closeEvent) { |
218 if (!isClosed) { | 219 if (!isClosed) { |
219 closeEvent.wait(timeoutMs); | 220 closeEvent.wait(timeoutMs); |
220 } | 221 } |
221 return isClosed; | 222 return isClosed; |
222 } | 223 } |
223 } | 224 } |
224 | 225 |
225 PeerConnectionClient createPeerConnectionClient( | 226 PeerConnectionClient createPeerConnectionClient( |
226 MockRenderer localRenderer, MockRenderer remoteRenderer, | 227 MockRenderer localRenderer, MockRenderer remoteRenderer, |
227 PeerConnectionParameters peerConnectionParameters, boolean decodeToTexture
) { | 228 PeerConnectionParameters peerConnectionParameters, boolean useTexures) { |
228 List<PeerConnection.IceServer> iceServers = | 229 List<PeerConnection.IceServer> iceServers = |
229 new LinkedList<PeerConnection.IceServer>(); | 230 new LinkedList<PeerConnection.IceServer>(); |
230 SignalingParameters signalingParameters = new SignalingParameters( | 231 SignalingParameters signalingParameters = new SignalingParameters( |
231 iceServers, true, // iceServers, initiator. | 232 iceServers, true, // iceServers, initiator. |
232 null, null, null, // clientId, wssUrl, wssPostUrl. | 233 null, null, null, // clientId, wssUrl, wssPostUrl. |
233 null, null); // offerSdp, iceCandidates. | 234 null, null); // offerSdp, iceCandidates. |
234 | 235 |
235 PeerConnectionClient client = PeerConnectionClient.getInstance(); | 236 PeerConnectionClient client = PeerConnectionClient.getInstance(); |
236 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); | 237 PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); |
237 options.networkIgnoreMask = 0; | 238 options.networkIgnoreMask = 0; |
238 options.disableNetworkMonitor = true; | 239 options.disableNetworkMonitor = true; |
239 client.setPeerConnectionFactoryOptions(options); | 240 client.setPeerConnectionFactoryOptions(options); |
240 client.createPeerConnectionFactory( | 241 client.createPeerConnectionFactory( |
241 getInstrumentation().getContext(), peerConnectionParameters, this); | 242 getInstrumentation().getContext(), peerConnectionParameters, this); |
242 client.createPeerConnection(decodeToTexture ? eglBase.getContext() : null, | 243 client.createPeerConnection(useTexures ? eglBase.getContext() : null, |
243 localRenderer, remoteRenderer, signalingParameters); | 244 localRenderer, remoteRenderer, signalingParameters); |
244 client.createOffer(); | 245 client.createOffer(); |
245 return client; | 246 return client; |
246 } | 247 } |
247 | 248 |
248 private PeerConnectionParameters createParameters(boolean enableVideo, | 249 private PeerConnectionParameters createParametersForAudioCall() { |
249 String videoCodec) { | |
250 PeerConnectionParameters peerConnectionParameters = | 250 PeerConnectionParameters peerConnectionParameters = |
251 new PeerConnectionParameters( | 251 new PeerConnectionParameters( |
252 enableVideo, true, // videoCallEnabled, loopback. | 252 false, true, // videoCallEnabled, loopback. |
253 0, 0, 0, 0, videoCodec, true, false, // video codec parameters. | 253 0, 0, 0, 0, "", true, false, // video codec parameters. |
254 0, "OPUS", false, false); // audio codec parameters. | 254 0, "OPUS", false, false); // audio codec parameters. |
255 return peerConnectionParameters; | 255 return peerConnectionParameters; |
256 } | 256 } |
| 257 |
| 258 private PeerConnectionParameters createParametersForVideoCall( |
| 259 String videoCodec, boolean captureToTexture) { |
| 260 PeerConnectionParameters peerConnectionParameters = |
| 261 new PeerConnectionParameters( |
| 262 true, true, // videoCallEnabled, loopback. |
| 263 0, 0, 0, 0, videoCodec, true, captureToTexture, // video codec param
eters. |
| 264 0, "OPUS", false, false); // audio codec parameters. |
| 265 return peerConnectionParameters; |
| 266 } |
257 | 267 |
258 @Override | 268 @Override |
259 public void setUp() { | 269 public void setUp() { |
260 signalingExecutor = new LooperExecutor(); | 270 signalingExecutor = new LooperExecutor(); |
261 signalingExecutor.requestStart(); | 271 signalingExecutor.requestStart(); |
262 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { | 272 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { |
263 eglBase = new EglBase(); | 273 eglBase = new EglBase(); |
264 } | 274 } |
265 } | 275 } |
266 | 276 |
267 @Override | 277 @Override |
268 public void tearDown() { | 278 public void tearDown() { |
269 signalingExecutor.requestStop(); | 279 signalingExecutor.requestStop(); |
270 if (eglBase != null) { | 280 if (eglBase != null) { |
271 eglBase.release(); | 281 eglBase.release(); |
272 } | 282 } |
273 } | 283 } |
274 | 284 |
275 public void testSetLocalOfferMakesVideoFlowLocally() | 285 public void testSetLocalOfferMakesVideoFlowLocally() |
276 throws InterruptedException { | 286 throws InterruptedException { |
277 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); | 287 Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); |
278 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 288 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
279 pcClient = createPeerConnectionClient( | 289 pcClient = createPeerConnectionClient( |
280 localRenderer, new MockRenderer(0, null), createParameters(true, VIDEO_C
ODEC_VP8), false); | 290 localRenderer, new MockRenderer(0, null), |
| 291 createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); |
281 | 292 |
282 // Wait for local SDP and ice candidates set events. | 293 // Wait for local SDP and ice candidates set events. |
283 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 294 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
284 assertTrue("ICE candidates were not generated.", | 295 assertTrue("ICE candidates were not generated.", |
285 waitForIceCandidates(WAIT_TIMEOUT)); | 296 waitForIceCandidates(WAIT_TIMEOUT)); |
286 | 297 |
287 // Check that local video frames were rendered. | 298 // Check that local video frames were rendered. |
288 assertTrue("Local video frames were not rendered.", | 299 assertTrue("Local video frames were not rendered.", |
289 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 300 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
290 | 301 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
330 // TODO(glaznev): check how we can detect that remote audio was rendered. | 341 // TODO(glaznev): check how we can detect that remote audio was rendered. |
331 Thread.sleep(AUDIO_RUN_TIMEOUT); | 342 Thread.sleep(AUDIO_RUN_TIMEOUT); |
332 } | 343 } |
333 | 344 |
334 pcClient.close(); | 345 pcClient.close(); |
335 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 346 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
336 Log.d(TAG, "testLoopback done."); | 347 Log.d(TAG, "testLoopback done."); |
337 } | 348 } |
338 | 349 |
339 public void testLoopbackAudio() throws InterruptedException { | 350 public void testLoopbackAudio() throws InterruptedException { |
340 doLoopbackTest(createParameters(false, VIDEO_CODEC_VP8), false); | 351 doLoopbackTest(createParametersForAudioCall(), false); |
341 } | 352 } |
342 | 353 |
343 public void testLoopbackVp8() throws InterruptedException { | 354 public void testLoopbackVp8() throws InterruptedException { |
344 doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), false); | 355 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false); |
345 } | 356 } |
346 | 357 |
347 public void DISABLED_testLoopbackVp9() throws InterruptedException { | 358 public void DISABLED_testLoopbackVp9() throws InterruptedException { |
348 doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), false); | 359 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false); |
349 } | 360 } |
350 | 361 |
351 public void testLoopbackH264() throws InterruptedException { | 362 public void testLoopbackH264() throws InterruptedException { |
352 doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), false); | 363 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false)
; |
353 } | 364 } |
354 | 365 |
355 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { | 366 public void testLoopbackVp8DecodeToTexture() throws InterruptedException { |
356 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { | 367 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
357 Log.i(TAG, "Decode to textures is not supported, requires EGL14."); | 368 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
358 return; | 369 return; |
359 } | 370 } |
360 | 371 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); |
361 doLoopbackTest(createParameters(true, VIDEO_CODEC_VP8), true); | |
362 } | 372 } |
363 | 373 |
364 public void DISABLED_testLoopbackVp9DecodeToTexture() throws InterruptedExcept
ion { | 374 public void DISABLED_testLoopbackVp9DecodeToTexture() throws InterruptedExcept
ion { |
365 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { | 375 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
366 Log.i(TAG, "Decode to textures is not supported, requires EGL14."); | 376 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
367 return; | 377 return; |
368 } | 378 } |
369 doLoopbackTest(createParameters(true, VIDEO_CODEC_VP9), true); | 379 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true); |
370 } | 380 } |
371 | 381 |
372 public void testLoopbackH264DecodeToTexture() throws InterruptedException { | 382 public void testLoopbackH264DecodeToTexture() throws InterruptedException { |
373 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { | 383 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
374 Log.i(TAG, "Decode to textures is not supported, requires EGL14."); | 384 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); |
375 return; | 385 return; |
376 } | 386 } |
377 doLoopbackTest(createParameters(true, VIDEO_CODEC_H264), true); | 387 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true); |
378 } | 388 } |
379 | 389 |
| 390 public void testLoopbackVp8CaptureToTexture() throws InterruptedException { |
| 391 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 392 Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19")
; |
| 393 return; |
| 394 } |
| 395 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
| 396 // hardware encoder supports to encode from a texture. |
| 397 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
| 398 Log.i(TAG, "VP8 encode to textures is not supported."); |
| 399 return; |
| 400 } |
| 401 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); |
| 402 } |
| 403 |
| 404 public void testLoopbackH264CaptureToTexture() throws InterruptedException { |
| 405 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 406 Log.i(TAG, "Encode to textures is not supported. Requires KITKAT"); |
| 407 return; |
| 408 } |
| 409 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
| 410 // hardware encoder supports to encode from a texture. |
| 411 if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) { |
| 412 Log.i(TAG, "H264 encode to textures is not supported."); |
| 413 return; |
| 414 } |
| 415 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true); |
| 416 } |
| 417 |
| 418 |
380 // Checks if default front camera can be switched to back camera and then | 419 // Checks if default front camera can be switched to back camera and then |
381 // again to front camera. | 420 // again to front camera. |
382 public void testCameraSwitch() throws InterruptedException { | 421 public void testCameraSwitch() throws InterruptedException { |
383 Log.d(TAG, "testCameraSwitch"); | 422 Log.d(TAG, "testCameraSwitch"); |
384 loopback = true; | 423 loopback = true; |
385 | 424 |
386 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 425 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
387 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 426 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
388 | 427 |
389 pcClient = createPeerConnectionClient( | 428 pcClient = createPeerConnectionClient( |
390 localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8),
false); | 429 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), false); |
391 | 430 |
392 // Wait for local SDP, rename it to answer and set as remote SDP. | 431 // Wait for local SDP, rename it to answer and set as remote SDP. |
393 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 432 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
394 SessionDescription remoteSdp = new SessionDescription( | 433 SessionDescription remoteSdp = new SessionDescription( |
395 SessionDescription.Type.fromCanonicalForm("answer"), | 434 SessionDescription.Type.fromCanonicalForm("answer"), |
396 localSdp.description); | 435 localSdp.description); |
397 pcClient.setRemoteDescription(remoteSdp); | 436 pcClient.setRemoteDescription(remoteSdp); |
398 | 437 |
399 // Wait for ICE connection. | 438 // Wait for ICE connection. |
400 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 439 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
(...skipping 25 matching lines...) Expand all Loading... |
426 // Checks if video source can be restarted - simulate app goes to | 465 // Checks if video source can be restarted - simulate app goes to |
427 // background and back to foreground. | 466 // background and back to foreground. |
428 public void testVideoSourceRestart() throws InterruptedException { | 467 public void testVideoSourceRestart() throws InterruptedException { |
429 Log.d(TAG, "testVideoSourceRestart"); | 468 Log.d(TAG, "testVideoSourceRestart"); |
430 loopback = true; | 469 loopback = true; |
431 | 470 |
432 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 471 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
433 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 472 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
434 | 473 |
435 pcClient = createPeerConnectionClient( | 474 pcClient = createPeerConnectionClient( |
436 localRenderer, remoteRenderer, createParameters(true, VIDEO_CODEC_VP8),
false); | 475 localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_
VP8, false), false); |
437 | 476 |
438 // Wait for local SDP, rename it to answer and set as remote SDP. | 477 // Wait for local SDP, rename it to answer and set as remote SDP. |
439 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 478 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
440 SessionDescription remoteSdp = new SessionDescription( | 479 SessionDescription remoteSdp = new SessionDescription( |
441 SessionDescription.Type.fromCanonicalForm("answer"), | 480 SessionDescription.Type.fromCanonicalForm("answer"), |
442 localSdp.description); | 481 localSdp.description); |
443 pcClient.setRemoteDescription(remoteSdp); | 482 pcClient.setRemoteDescription(remoteSdp); |
444 | 483 |
445 // Wait for ICE connection. | 484 // Wait for ICE connection. |
446 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); | 485 assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAI
T_TIMEOUT)); |
(...skipping 18 matching lines...) Expand all Loading... |
465 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 504 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
466 assertTrue("Remote video frames were not rendered after video restart.", | 505 assertTrue("Remote video frames were not rendered after video restart.", |
467 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 506 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
468 } | 507 } |
469 pcClient.close(); | 508 pcClient.close(); |
470 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 509 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
471 Log.d(TAG, "testVideoSourceRestart done."); | 510 Log.d(TAG, "testVideoSourceRestart done."); |
472 } | 511 } |
473 | 512 |
474 } | 513 } |
OLD | NEW |