 Chromium Code Reviews
 Chromium Code Reviews Issue 1422963003:
  Android MediaCodecVideoDecoder: Manage lifetime of texture frames  (Closed) 
  Base URL: https://chromium.googlesource.com/external/webrtc.git@master
    
  
    Issue 1422963003:
  Android MediaCodecVideoDecoder: Manage lifetime of texture frames  (Closed) 
  Base URL: https://chromium.googlesource.com/external/webrtc.git@master| Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java | 
| diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java | 
| index a23d69ca0d84abe1658020be077014f00ce899d2..eb2ba9ad3979ba2c20ef56f7049e00171b4d90f0 100644 | 
| --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java | 
| +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java | 
| @@ -27,23 +27,24 @@ | 
| package org.webrtc; | 
| -import android.graphics.SurfaceTexture; | 
| import android.media.MediaCodec; | 
| import android.media.MediaCodecInfo; | 
| import android.media.MediaCodecInfo.CodecCapabilities; | 
| import android.media.MediaCodecList; | 
| import android.media.MediaFormat; | 
| -import android.opengl.EGLContext; | 
| -import android.opengl.GLES11Ext; | 
| -import android.opengl.GLES20; | 
| import android.os.Build; | 
| +import android.os.SystemClock; | 
| import android.view.Surface; | 
| import org.webrtc.Logging; | 
| import java.nio.ByteBuffer; | 
| +import java.util.ArrayList; | 
| import java.util.Arrays; | 
| +import java.util.LinkedList; | 
| import java.util.List; | 
| +import java.util.Queue; | 
| +import java.util.concurrent.TimeUnit; | 
| // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. | 
| // This class is an implementation detail of the Java PeerConnection API. | 
| @@ -96,13 +97,12 @@ public class MediaCodecVideoDecoder { | 
| private int stride; | 
| private int sliceHeight; | 
| private boolean useSurface; | 
| - private int textureID = 0; | 
| - private SurfaceTexture surfaceTexture = null; | 
| + private TextureListener textureListener; | 
| private Surface surface = null; | 
| - private EglBase eglBase; | 
| - private MediaCodecVideoDecoder() { | 
| - } | 
| + private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>(); | 
| + private final Queue<DecodedTextureBuffer> | 
| + dequeuedTextureBuffers = new LinkedList<DecodedTextureBuffer>(); | 
| // Helper struct for findVp8Decoder() below. | 
| private static class DecoderProperties { | 
| @@ -196,12 +196,13 @@ public class MediaCodecVideoDecoder { | 
| } | 
| } | 
| - // Pass null in |sharedContext| to configure the codec for ByteBuffer output. | 
| - private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { | 
| + // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. | 
| + private boolean initDecode( | 
| + VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { | 
| if (mediaCodecThread != null) { | 
| throw new RuntimeException("Forgot to release()?"); | 
| } | 
| - useSurface = (sharedContext != null); | 
| + useSurface = (surfaceTextureHelper != null); | 
| String mime = null; | 
| String[] supportedCodecPrefixes = null; | 
| if (type == VideoCodecType.VIDEO_CODEC_VP8) { | 
| @@ -220,9 +221,6 @@ public class MediaCodecVideoDecoder { | 
| Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + | 
| ". Color: 0x" + Integer.toHexString(properties.colorFormat) + | 
| ". Use Surface: " + useSurface); | 
| - if (sharedContext != null) { | 
| - Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); | 
| - } | 
| runningInstance = this; // Decoder is now running and can be queried for stack traces. | 
| mediaCodecThread = Thread.currentThread(); | 
| try { | 
| @@ -232,16 +230,8 @@ public class MediaCodecVideoDecoder { | 
| sliceHeight = height; | 
| if (useSurface) { | 
| - // Create shared EGL context. | 
| - eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); | 
| - eglBase.createDummyPbufferSurface(); | 
| - eglBase.makeCurrent(); | 
| - | 
| - // Create output surface | 
| - textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); | 
| - Logging.d(TAG, "Video decoder TextureID = " + textureID); | 
| - surfaceTexture = new SurfaceTexture(textureID); | 
| - surface = new Surface(surfaceTexture); | 
| + textureListener = new TextureListener(surfaceTextureHelper); | 
| + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); | 
| } | 
| MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); | 
| @@ -284,11 +274,7 @@ public class MediaCodecVideoDecoder { | 
| if (useSurface) { | 
| surface.release(); | 
| surface = null; | 
| - Logging.d(TAG, "Delete video decoder TextureID " + textureID); | 
| - GLES20.glDeleteTextures(1, new int[] {textureID}, 0); | 
| - textureID = 0; | 
| - eglBase.release(); | 
| - eglBase = null; | 
| + textureListener.release(); | 
| } | 
| Logging.d(TAG, "Java releaseDecoder done"); | 
| } | 
| @@ -311,6 +297,8 @@ public class MediaCodecVideoDecoder { | 
| try { | 
| inputBuffers[inputBufferIndex].position(0); | 
| inputBuffers[inputBufferIndex].limit(size); | 
| + decodeStartTimeMs.add(SystemClock.elapsedRealtime()); | 
| + | 
| mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); | 
| return true; | 
| } | 
| @@ -320,7 +308,7 @@ public class MediaCodecVideoDecoder { | 
| } | 
| } | 
| - // Helper structs for dequeueOutputBuffer() below. | 
| + // Helper struct for dequeueByteBuffer() below. | 
| private static class DecodedByteBuffer { | 
| public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) { | 
| this.index = index; | 
| @@ -335,32 +323,208 @@ public class MediaCodecVideoDecoder { | 
| private final long presentationTimestampUs; | 
| } | 
| + // Helper struct for dequeueTextureBuffer() below. | 
| private static class DecodedTextureBuffer { | 
| 
magjed_webrtc
2015/10/29 09:44:13
I think this class has become bloated with too muc
 
perkj_webrtc
2015/10/29 19:26:44
Acknowledged.
 | 
| - private final int textureID; | 
| - private final long presentationTimestampUs; | 
| + private final int bufferIndex; | 
| + private final long decodeTimeMs; | 
| + private final int width; | 
| + private final int height; | 
| + private int textureID; | 
| + private float[] transformMatrix; | 
| + private long timestampNs; | 
| + private State state; | 
| + public enum State { | 
| 
magjed_webrtc
2015/10/29 09:44:13
I don't think this State makes sense, because it's
 
perkj_webrtc
2015/10/29 19:26:45
Done.
 | 
| + DECODED, | 
| + RENDERING, | 
| + RENDERED, | 
| + } | 
| + | 
| + public DecodedTextureBuffer(int bufferIndex, int width, int height, long decodeTimeMs) { | 
| + this.state = State.DECODED; | 
| + this.bufferIndex = bufferIndex; | 
| + this.width = width; | 
| + this.height = height; | 
| + this.decodeTimeMs = decodeTimeMs; | 
| + } | 
| - public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { | 
| + public void setTextureInfo(int textureID, float[] transformMatrix, long timestampNs) { | 
| this.textureID = textureID; | 
| - this.presentationTimestampUs = presentationTimestampUs; | 
| + this.transformMatrix = transformMatrix; | 
| + this.timestampNs = timestampNs; | 
| + this.state = State.RENDERED; | 
| } | 
| } | 
| - // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or | 
| - // DecodedTexturebuffer depending on |useSurface| configuration. | 
| + // Poll based texture listener. | 
| + private static class TextureListener | 
| + implements SurfaceTextureHelper.OnTextureFrameAvailableListener { | 
| + public static class TextureInfo { | 
| + private final int textureID; | 
| + private final float[] transformMatrix; | 
| + private final long timestampNs; | 
| + | 
| + TextureInfo(int textureId, float[] transformMatrix, long timestampNs) { | 
| + this.textureID = textureId; | 
| + this.transformMatrix = transformMatrix; | 
| + this.timestampNs = timestampNs; | 
| + } | 
| + } | 
| + private final SurfaceTextureHelper surfaceTextureHelper; | 
| + private TextureInfo textureProperties; | 
| + // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). | 
| + private final Object newFrameLock = new Object(); | 
| + | 
| + public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { | 
| + this.surfaceTextureHelper = surfaceTextureHelper; | 
| + surfaceTextureHelper.setListener(this); | 
| + } | 
| + | 
| + // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. | 
| + @Override | 
| + public void onTextureFrameAvailable( | 
| + int oesTextureId, float[] transformMatrix, long timestampNs) { | 
| + synchronized (newFrameLock) { | 
| + if (textureProperties != null) { | 
| + Logging.e(TAG, | 
| + "Unexpected onTextureFrameAvailable() called while already holding a texture."); | 
| + throw new IllegalStateException("Already holding a texture."); | 
| + } | 
| + textureProperties = new TextureInfo(oesTextureId, transformMatrix, timestampNs); | 
| + newFrameLock.notifyAll(); | 
| + } | 
| + } | 
| + | 
| + // Dequeues and returns a TextureInfo if available, or null otherwise. | 
| + public TextureInfo dequeueTextureInfo(int timeoutMs) { | 
| + synchronized (newFrameLock) { | 
| + if (textureProperties == null && timeoutMs > 0) { | 
| + try { | 
| + newFrameLock.wait(timeoutMs); | 
| + } catch(InterruptedException e) { | 
| + // Restore the interrupted status by reinterrupting the thread. | 
| + Thread.currentThread().interrupt(); | 
| + } | 
| + } | 
| + TextureInfo returnedInfo = textureProperties; | 
| + textureProperties = null; | 
| + return returnedInfo; | 
| + } | 
| + } | 
| + | 
| + public void release() { | 
| + // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in | 
| + // progress is done. Therefore, the call to disconnect() must be outside any synchronized | 
| + // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. | 
| + surfaceTextureHelper.disconnect(); | 
| + synchronized (newFrameLock) { | 
| + if (textureProperties != null) { | 
| + surfaceTextureHelper.returnTextureFrame(); | 
| + textureProperties = null; | 
| + } | 
| + } | 
| + } | 
| + } | 
| + | 
| + // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer. | 
| // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an | 
| // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException | 
| // upon codec error. | 
| - private Object dequeueOutputBuffer(int dequeueTimeoutUs) | 
| + private DecodedByteBuffer dequeueByteBuffer(int dequeueTimeoutMs) | 
| throws IllegalStateException, MediaCodec.CodecException { | 
| checkOnMediaCodecThread(); | 
| + if (useSurface) { | 
| + throw new IllegalStateException("dequeueOutputBuffer() called for surface decoding."); | 
| + } | 
| + | 
| + final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); | 
| + int result = dequeueOutputBuffer(info, dequeueTimeoutMs); | 
| + if (result < 0) | 
| + return null; | 
| + return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); | 
| + } | 
| + | 
| + // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer. | 
| + // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an | 
| + // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException | 
| + // upon codec error. | 
| + private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { | 
| 
magjed_webrtc
2015/10/29 09:44:13
This function has become too bloated and complicat
 
perkj_webrtc
2015/10/29 19:26:44
Done.
 | 
| + if (!useSurface) { | 
| + throw new IllegalStateException("dequeueTexture() called for byte buffer decoding."); | 
| + } | 
| + | 
| + if (!decodeStartTimeMs.isEmpty()) { | 
| + final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); | 
| + final int result = dequeueOutputBuffer(info, dequeueTimeoutMs); | 
| + if (result >= 0) { | 
| + // Output buffer decoded. | 
| + dequeuedTextureBuffers.add( | 
| + new DecodedTextureBuffer( | 
| + result, width, height, SystemClock.elapsedRealtime() - decodeStartTimeMs.poll())); | 
| + } | 
| + } | 
| + | 
| + if (dequeuedTextureBuffers.isEmpty()) | 
| + return null; | 
| + | 
| + DecodedTextureBuffer textureBuffer = dequeuedTextureBuffers.peek(); | 
| + | 
| + if (textureBuffer.state == DecodedTextureBuffer.State.DECODED) { | 
| + // releaseOutputBuffer renders to the output surface. | 
| + mediaCodec.releaseOutputBuffer(textureBuffer.bufferIndex, true /* render */); | 
| + textureBuffer.state = DecodedTextureBuffer.State.RENDERING; | 
| + } | 
| + | 
| + if (textureBuffer.state == DecodedTextureBuffer.State.RENDERING) { | 
| + // If we are waiting for a frame to be rendered to the decoder surface, | 
| + // check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame | 
| + // rendered at the time. | 
| + TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeueTimeoutMs); | 
| + if (info != null) { | 
| + textureBuffer.setTextureInfo(info.textureID, info.transformMatrix, info.timestampNs); | 
| + textureBuffer.state = DecodedTextureBuffer.State.RENDERED; | 
| + } | 
| + } | 
| + | 
| + if (textureBuffer.state == DecodedTextureBuffer.State.RENDERED) { | 
| + dequeuedTextureBuffers.remove(); | 
| + if (!dequeuedTextureBuffers.isEmpty()) { | 
| + // If we are not waiting for a frame to be rendered, we can render the next decoder output | 
| + // buffer to the decoder surface to prepare for the next run. | 
| + DecodedTextureBuffer nextFrame = dequeuedTextureBuffers.peek(); | 
| + mediaCodec.releaseOutputBuffer(nextFrame.bufferIndex, true /* render */); | 
| + nextFrame.state = DecodedTextureBuffer.State.RENDERING; | 
| + } | 
| + return textureBuffer; | 
| + } | 
| + return null; | 
| + } | 
| + | 
| + // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for | 
| 
magjed_webrtc
2015/10/29 09:44:13
Move this back to the old place so it's easier to
 | 
| + // non-surface decoding. | 
| + // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured | 
| + // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws | 
| + // MediaCodec.CodecException upon codec error. | 
| + private void returnDecodedByteBuffer(int index) | 
| + throws IllegalStateException, MediaCodec.CodecException { | 
| + checkOnMediaCodecThread(); | 
| + if (useSurface) { | 
| + throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); | 
| + } | 
| + mediaCodec.releaseOutputBuffer(index, false /* render */); | 
| + } | 
| + | 
| + // Returns the index of |outputBuffers| that has just been decoded or | 
| + // MediaCodec.INFO_TRY_AGAIN_LATER if no output buffer has been filled. | 
| + // Throws IllegalStateException if color format changes to an | 
| + // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException | 
| + // upon codec error. | 
| + private int dequeueOutputBuffer(MediaCodec.BufferInfo info, int dequeueTimeoutMs) { | 
| // Drain the decoder until receiving a decoded buffer or hitting | 
| // MediaCodec.INFO_TRY_AGAIN_LATER. | 
| - final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); | 
| while (true) { | 
| - final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); | 
| + final int result = mediaCodec.dequeueOutputBuffer( | 
| + info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); | 
| switch (result) { | 
| - case MediaCodec.INFO_TRY_AGAIN_LATER: | 
| - return null; | 
| case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: | 
| outputBuffers = mediaCodec.getOutputBuffers(); | 
| Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); | 
| @@ -387,31 +551,10 @@ public class MediaCodecVideoDecoder { | 
| stride = Math.max(width, stride); | 
| sliceHeight = Math.max(height, sliceHeight); | 
| break; | 
| + case MediaCodec.INFO_TRY_AGAIN_LATER: | 
| default: | 
| - // Output buffer decoded. | 
| - if (useSurface) { | 
| - mediaCodec.releaseOutputBuffer(result, true /* render */); | 
| - // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture | 
| - // frame. | 
| - return new DecodedTextureBuffer(textureID, info.presentationTimeUs); | 
| - } else { | 
| - return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); | 
| - } | 
| + return result; | 
| 
magjed_webrtc
2015/10/29 09:44:13
I think you should add a |decodedTimeMs| to Decode
 
perkj_webrtc
2015/10/29 19:26:45
Done.
 | 
| } | 
| } | 
| } | 
| - | 
| - // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for | 
| - // non-surface decoding. | 
| - // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured | 
| - // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws | 
| - // MediaCodec.CodecException upon codec error. | 
| - private void returnDecodedByteBuffer(int index) | 
| - throws IllegalStateException, MediaCodec.CodecException { | 
| - checkOnMediaCodecThread(); | 
| - if (useSurface) { | 
| - throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); | 
| - } | 
| - mediaCodec.releaseOutputBuffer(index, false /* render */); | 
| - } | 
| } |