| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 393 } | 393 } |
| 394 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the | 394 // TODO(perkj): If we can always capture to textures, there is no need to ch
eck if the |
| 395 // hardware encoder supports to encode from a texture. | 395 // hardware encoder supports to encode from a texture. |
| 396 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { | 396 if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
| 397 Log.i(TAG, "VP8 encode to textures is not supported."); | 397 Log.i(TAG, "VP8 encode to textures is not supported."); |
| 398 return; | 398 return; |
| 399 } | 399 } |
| 400 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); | 400 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true); |
| 401 } | 401 } |
| 402 | 402 |
| 403 | |
| 404 // Test that a call can be setup even if a released EGL context is used during
setup. | |
| 405 // The HW encoder and decoder will fallback to encode and decode from byte buf
fers. | |
| 406 public void testLoopbackEglContextReleasedBeforeSetup() throws InterruptedExce
ption { | |
| 407 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | |
| 408 Log.i(TAG, "Decode to textures is not supported, requires SDK version 19."
); | |
| 409 return; | |
| 410 } | |
| 411 eglBase.release(); | |
| 412 doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true); | |
| 413 eglBase = null; | |
| 414 } | |
| 415 | |
| 416 // Test that a call can be setup even if the EGL context used during initializ
ation is | 403 // Test that a call can be setup even if the EGL context used during initializ
ation is |
| 417 // released before the Video codecs are created. The HW encoder and decoder is
setup to use | 404 // released before the Video codecs are created. The HW encoder and decoder is
setup to use |
| 418 // textures. | 405 // textures. |
| 419 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { | 406 public void testLoopbackEglContextReleasedAfterCreatingPc() throws Interrupted
Exception { |
| 420 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 407 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 421 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; | 408 Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19")
; |
| 422 return; | 409 return; |
| 423 } | 410 } |
| 424 | 411 |
| 425 loopback = true; | 412 loopback = true; |
| 426 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8, false); | 413 PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_COD
EC_VP8, true); |
| 427 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); | 414 MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_R
ENDERER_NAME); |
| 428 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); | 415 MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE
_RENDERER_NAME); |
| 429 pcClient = createPeerConnectionClient( | 416 pcClient = createPeerConnectionClient( |
| 430 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); | 417 localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); |
| 431 | 418 |
| 432 // Wait for local SDP, rename it to answer and set as remote SDP. | 419 // Wait for local SDP, rename it to answer and set as remote SDP. |
| 433 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); | 420 assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); |
| 434 | 421 |
| 435 // Release the EGL context used for creating the PeerConnectionClient. | 422 // Release the EGL context used for creating the PeerConnectionClient. |
| 436 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal | 423 // Since createPeerConnectionClient is asynchronous, we must wait for the lo
cal |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 558 assertTrue("Local video frames were not rendered after video restart.", | 545 assertTrue("Local video frames were not rendered after video restart.", |
| 559 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 546 localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 560 assertTrue("Remote video frames were not rendered after video restart.", | 547 assertTrue("Remote video frames were not rendered after video restart.", |
| 561 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); | 548 remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); |
| 562 } | 549 } |
| 563 pcClient.close(); | 550 pcClient.close(); |
| 564 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); | 551 assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); |
| 565 Log.d(TAG, "testVideoSourceRestart done."); | 552 Log.d(TAG, "testVideoSourceRestart done."); |
| 566 } | 553 } |
| 567 } | 554 } |
| OLD | NEW |