| Index: talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
|
| diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
|
| index 29f3022ffa0bc3a8e8da576048ef8ae102b31a6a..8a1bac9f5ed86fa6b6d3bf4f807e35b965456d15 100644
|
| --- a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
|
| +++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
|
| @@ -28,6 +28,9 @@ package org.webrtc;
|
|
|
| import java.nio.ByteBuffer;
|
|
|
| +import android.opengl.EGL14;
|
| +import android.opengl.GLES11Ext;
|
| +import android.opengl.GLES20;
|
| import android.test.ActivityTestCase;
|
| import android.test.suitebuilder.annotation.SmallTest;
|
| import android.util.Log;
|
| @@ -46,7 +49,20 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
| }
|
| MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
| assertTrue(encoder.initEncode(
|
| - MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30));
|
| + MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
|
| + encoder.release();
|
| + }
|
| +
|
| + @SmallTest
|
| + public static void testInitReleaseUsingTextures() {
|
| + if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
|
| + Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
|
| + return;
|
| + }
|
| + MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
| + assertTrue(encoder.initEncode(
|
| + MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
|
| + EGL14.EGL_NO_CONTEXT));
|
| encoder.release();
|
| }
|
|
|
| @@ -65,7 +81,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
| MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
|
|
| assertTrue(encoder.initEncode(
|
| - MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30));
|
| + MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
|
| ByteBuffer[] inputBuffers = encoder.getInputBuffers();
|
| assertNotNull(inputBuffers);
|
| assertTrue(min_size <= inputBuffers[0].capacity());
|
| @@ -92,4 +108,43 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
|
|
|
| encoder.release();
|
| }
|
| +
|
| + @SmallTest
|
| + public static void testEncoderUsingTextures() throws InterruptedException {
|
| + if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
|
| + Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
|
| + return;
|
| + }
|
| +
|
| + final int width = 640;
|
| + final int height = 480;
|
| + final long presentationTs = 2;
|
| +
|
| + final EglBase eglOesBase = new EglBase(EGL14.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
|
| + int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
| +
|
| + MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
| +
|
| + assertTrue(encoder.initEncode(
|
| + MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
|
| + eglOesBase.getContext()));
|
| + assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
|
| + presentationTs));
|
| +
|
| + // It should be Ok to delete the texture after calling encodeTexture.
|
| + GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
| +
|
| + OutputBufferInfo info = encoder.dequeueOutputBuffer();
|
| + while (info == null) {
|
| + info = encoder.dequeueOutputBuffer();
|
| + Thread.sleep(20);
|
| + }
|
| + assertTrue(info.index != -1);
|
| + assertEquals(presentationTs, info.presentationTimestampUs);
|
| + assertTrue(info.buffer.capacity() > 0);
|
| + encoder.releaseOutputBuffer(info.index);
|
| +
|
| + encoder.release();
|
| + eglOesBase.release();
|
| + }
|
| }
|
|
|