| Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
|
| diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
|
| index ef2055645d9ad218231fcbbb630ec7716af62e28..0adc562c25823266b2d9a0a4d169ba5d86f37307 100644
|
| --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
|
| +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
|
| @@ -27,7 +27,6 @@
|
|
|
| package org.webrtc;
|
|
|
| -import android.graphics.SurfaceTexture;
|
| import android.media.MediaCodec;
|
| import android.media.MediaCodecInfo;
|
| import android.media.MediaCodecInfo.CodecCapabilities;
|
| @@ -93,10 +92,59 @@ public class MediaCodecVideoDecoder {
|
| private int stride;
|
| private int sliceHeight;
|
| private boolean useSurface;
|
| - private int textureID = 0;
|
| - private SurfaceTexture surfaceTexture = null;
|
| + // |isWaitingForTexture| is true when waiting for the transition:
|
| + // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
|
| + private boolean isWaitingForTexture = false;
|
| + private TextureListener textureListener;
|
| private Surface surface = null;
|
| - private EglBase eglBase;
|
| +
|
| + // Poll based texture listener.
|
| + private static class TextureListener
|
| + implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
| + private final SurfaceTextureHelper surfaceTextureHelper;
|
| + private DecodedTextureBuffer textureBuffer;
|
| +
|
| + public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
|
| + this.surfaceTextureHelper = surfaceTextureHelper;
|
| + surfaceTextureHelper.setListener(this);
|
| + }
|
| +
|
| + // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
|
| + @Override
|
| + public synchronized void onTextureFrameAvailable(
|
| + int oesTextureId, float[] transformMatrix, long timestampNs) {
|
| + if (textureBuffer != null) {
|
| + throw new IllegalStateException("Already holding a texture.");
|
| + }
|
| + textureBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, timestampNs);
|
| + notifyAll();
|
| + }
|
| +
|
| + // Dequeues and returns a texture buffer if available, or null otherwise.
|
| + public synchronized DecodedTextureBuffer dequeueTextureFrame(int timeoutUs) {
|
| + final int timeoutMs = timeoutUs / 1000;
|
| + if (textureBuffer == null && timeoutMs > 0) {
|
| + try {
|
| + wait(timeoutMs);
|
| + } catch(InterruptedException e) {
|
| + Thread.currentThread().interrupt();
|
| + }
|
| + }
|
| + final DecodedTextureBuffer textureBuffer = this.textureBuffer;
|
| + this.textureBuffer = null;
|
| + return textureBuffer;
|
| + }
|
| +
|
| + public void release() {
|
| + surfaceTextureHelper.disconnect();
|
| + synchronized (this) {
|
| + if (textureBuffer != null) {
|
| + surfaceTextureHelper.returnTextureFrame();
|
| + textureBuffer = null;
|
| + }
|
| + }
|
| + }
|
| + }
|
|
|
| private MediaCodecVideoDecoder() { }
|
|
|
| @@ -180,12 +228,13 @@ public class MediaCodecVideoDecoder {
|
| }
|
| }
|
|
|
| - // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
|
| - private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
|
| + // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
|
| + private boolean initDecode(
|
| + VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
|
| if (mediaCodecThread != null) {
|
| throw new RuntimeException("Forgot to release()?");
|
| }
|
| - useSurface = (sharedContext != null);
|
| + useSurface = (surfaceTextureHelper != null);
|
| String mime = null;
|
| String[] supportedCodecPrefixes = null;
|
| if (type == VideoCodecType.VIDEO_CODEC_VP8) {
|
| @@ -204,9 +253,6 @@ public class MediaCodecVideoDecoder {
|
| Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
|
| ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
|
| ". Use Surface: " + useSurface);
|
| - if (sharedContext != null) {
|
| - Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
|
| - }
|
| mediaCodecThread = Thread.currentThread();
|
| try {
|
| this.width = width;
|
| @@ -215,16 +261,8 @@ public class MediaCodecVideoDecoder {
|
| sliceHeight = height;
|
|
|
| if (useSurface) {
|
| - // Create shared EGL context.
|
| - eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
|
| - eglBase.createDummyPbufferSurface();
|
| - eglBase.makeCurrent();
|
| -
|
| - // Create output surface
|
| - textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
| - Logging.d(TAG, "Video decoder TextureID = " + textureID);
|
| - surfaceTexture = new SurfaceTexture(textureID);
|
| - surface = new Surface(surfaceTexture);
|
| + textureListener = new TextureListener(surfaceTextureHelper);
|
| + surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
|
| }
|
|
|
| MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
|
| @@ -265,11 +303,7 @@ public class MediaCodecVideoDecoder {
|
| if (useSurface) {
|
| surface.release();
|
| surface = null;
|
| - Logging.d(TAG, "Delete video decoder TextureID " + textureID);
|
| - GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
|
| - textureID = 0;
|
| - eglBase.release();
|
| - eglBase = null;
|
| + textureListener.release();
|
| }
|
| }
|
|
|
| @@ -317,11 +351,13 @@ public class MediaCodecVideoDecoder {
|
|
|
| private static class DecodedTextureBuffer {
|
| private final int textureID;
|
| - private final long presentationTimestampUs;
|
| + private final float[] transformMatrix;
|
| + private final long timestampNs;
|
|
|
| - public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
|
| + public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timestampNs) {
|
| this.textureID = textureID;
|
| - this.presentationTimestampUs = presentationTimestampUs;
|
| + this.transformMatrix = transformMatrix;
|
| + this.timestampNs = timestampNs;
|
| }
|
| }
|
|
|
| @@ -333,6 +369,16 @@ public class MediaCodecVideoDecoder {
|
| private Object dequeueOutputBuffer(int dequeueTimeoutUs)
|
| throws IllegalStateException, MediaCodec.CodecException {
|
| checkOnMediaCodecThread();
|
| + // Calling multiple MediaCodec.releaseOutputBuffer() with render=true in a row will result in
|
| + // dropped texture frames. Therefore, wait for any pending onTextureFrameAvailable() before
|
| + // proceeding.
|
| + if (isWaitingForTexture) {
|
| + final DecodedTextureBuffer textureBuffer =
|
| + textureListener.dequeueTextureFrame(dequeueTimeoutUs);
|
| + isWaitingForTexture = (textureBuffer == null);
|
| + return textureBuffer;
|
| + }
|
| +
|
| // Drain the decoder until receiving a decoded buffer or hitting
|
| // MediaCodec.INFO_TRY_AGAIN_LATER.
|
| final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
| @@ -371,9 +417,10 @@ public class MediaCodecVideoDecoder {
|
| // Output buffer decoded.
|
| if (useSurface) {
|
| mediaCodec.releaseOutputBuffer(result, true /* render */);
|
| - // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
|
| - // frame.
|
| - return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
|
| + final DecodedTextureBuffer textureBuffer =
|
| + textureListener.dequeueTextureFrame(dequeueTimeoutUs);
|
| + isWaitingForTexture = (textureBuffer == null);
|
| + return textureBuffer;
|
| } else {
|
| return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
|
| }
|
|
|