Index: talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java |
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java |
index 29f3022ffa0bc3a8e8da576048ef8ae102b31a6a..045e063f22d45585852933f0862431a587f406c8 100644 |
--- a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java |
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java |
@@ -27,18 +27,22 @@ |
package org.webrtc; |
import java.nio.ByteBuffer; |
- |
+import android.annotation.TargetApi; |
+import android.opengl.GLES11Ext; |
+import android.opengl.GLES20; |
+import android.os.Build; |
import android.test.ActivityTestCase; |
import android.test.suitebuilder.annotation.SmallTest; |
import android.util.Log; |
magjed_webrtc
2015/11/18 13:12:52
The import order is wrong.
"Import statements are
perkj_webrtc
2015/11/18 14:51:20
We strictly don't do
|
- |
import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo; |
+import javax.microedition.khronos.egl.EGL10; |
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) |
public final class MediaCodecVideoEncoderTest extends ActivityTestCase { |
final static String TAG = "MediaCodecVideoEncoderTest"; |
@SmallTest |
- public static void testInitReleaseUsingByteBuffer() { |
+ public static void testReInitializeUsingByteBuffer() { |
magjed_webrtc
2015/11/18 13:12:52
Why did you modify this test in this CL? And why d
perkj_webrtc
2015/11/18 14:51:20
this is how we use it from c++ sometimes. It felt
magjed_webrtc
2015/11/18 15:38:12
Yeah maybe. I don't know much about our testing 's
|
if (!MediaCodecVideoEncoder.isVp8HwSupported()) { |
Log.i(TAG, |
"Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer"); |
@@ -46,7 +50,27 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase { |
} |
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); |
assertTrue(encoder.initEncode( |
- MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30)); |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null)); |
+ encoder.release(); |
+ assertTrue(encoder.initEncode( |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null)); |
+ encoder.release(); |
+ } |
+ |
+ @SmallTest |
+ public static void testReInitilizeUsingTextures() { |
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
+ Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures"); |
+ return; |
+ } |
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); |
+ assertTrue(encoder.initEncode( |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, |
+ EGL10.EGL_NO_CONTEXT)); |
+ encoder.release(); |
+ assertTrue(encoder.initEncode( |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, |
+ EGL10.EGL_NO_CONTEXT)); |
encoder.release(); |
} |
@@ -65,7 +89,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase { |
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); |
assertTrue(encoder.initEncode( |
- MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30)); |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null)); |
ByteBuffer[] inputBuffers = encoder.getInputBuffers(); |
assertNotNull(inputBuffers); |
assertTrue(min_size <= inputBuffers[0].capacity()); |
@@ -92,4 +116,42 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase { |
encoder.release(); |
} |
+ |
+ @SmallTest |
+ public static void testEncoderUsingTextures() throws InterruptedException { |
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { |
+ Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures"); |
+ return; |
+ } |
+ |
+ final int width = 640; |
+ final int height = 480; |
+ final long presentationTs = 2; |
+ |
+ final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN); |
magjed_webrtc
2015/11/18 13:12:52
You must make |eglOesBase| current, otherwise it h
perkj_webrtc
2015/11/18 14:51:20
not sure I understand.
magjed_webrtc
2015/11/18 15:38:12
First, you need a current EGLContext for all GLES
|
+ int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); |
+ |
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); |
+ |
+ assertTrue(encoder.initEncode( |
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, |
+ eglOesBase.getContext())); |
+ assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), |
+ presentationTs)); |
+ |
+ // It should be Ok to delete the texture after calling encodeTexture. |
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); |
+ |
+ OutputBufferInfo info = encoder.dequeueOutputBuffer(); |
+ while (info == null) { |
+ info = encoder.dequeueOutputBuffer(); |
+ Thread.sleep(20); |
+ } |
+ assertTrue(info.index != -1); |
+ assertTrue(info.buffer.capacity() > 0); |
+ encoder.releaseOutputBuffer(info.index); |
+ |
+ encoder.release(); |
+ eglOesBase.release(); |
+ } |
} |