Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
index 86221c4cd9d7d4704841a844f59afa9b57352086..f60f3e56772f4a8cc887908e5841e5d910cc488b 100644 |
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
@@ -33,19 +33,19 @@ import android.media.MediaCodecInfo; |
import android.media.MediaCodecInfo.CodecCapabilities; |
import android.media.MediaCodecList; |
import android.media.MediaFormat; |
-import android.opengl.GLES11Ext; |
-import android.opengl.GLES20; |
import android.os.Build; |
+import android.os.SystemClock; |
import android.view.Surface; |
import org.webrtc.Logging; |
import java.nio.ByteBuffer; |
import java.util.Arrays; |
+import java.util.LinkedList; |
import java.util.List; |
import java.util.concurrent.CountDownLatch; |
- |
-import javax.microedition.khronos.egl.EGLContext; |
+import java.util.Queue; |
+import java.util.concurrent.TimeUnit; |
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. |
// This class is an implementation detail of the Java PeerConnection API. |
@@ -104,14 +104,18 @@ public class MediaCodecVideoDecoder { |
private int height; |
private int stride; |
private int sliceHeight; |
+ private boolean hasDecodedFirstFrame; |
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>(); |
private boolean useSurface; |
- private int textureID = 0; |
- private SurfaceTexture surfaceTexture = null; |
- private Surface surface = null; |
- private EglBase eglBase; |
- private MediaCodecVideoDecoder() { |
- } |
+ // The below variables are only used when decoding to a Surface. |
+ private TextureListener textureListener; |
+ // Max number of output buffers queued before starting to drop decoded frames. |
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; |
+ private int droppedFrames; |
+ private Surface surface = null; |
+ private final Queue<DecodedOutputBuffer> |
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>(); |
// MediaCodec error handler - invoked when critical error happens which may prevent |
// further use of media codec API. Now it means that one of media codec instances |
@@ -223,12 +227,13 @@ public class MediaCodecVideoDecoder { |
} |
} |
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output. |
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { |
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. |
+ private boolean initDecode( |
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { |
if (mediaCodecThread != null) { |
throw new RuntimeException("Forgot to release()?"); |
} |
- useSurface = (sharedContext != null); |
+ useSurface = (surfaceTextureHelper != null); |
String mime = null; |
String[] supportedCodecPrefixes = null; |
if (type == VideoCodecType.VIDEO_CODEC_VP8) { |
@@ -250,9 +255,6 @@ public class MediaCodecVideoDecoder { |
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + |
". Color: 0x" + Integer.toHexString(properties.colorFormat) + |
". Use Surface: " + useSurface); |
- if (sharedContext != null) { |
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); |
- } |
runningInstance = this; // Decoder is now running and can be queried for stack traces. |
mediaCodecThread = Thread.currentThread(); |
try { |
@@ -262,16 +264,8 @@ public class MediaCodecVideoDecoder { |
sliceHeight = height; |
if (useSurface) { |
- // Create shared EGL context. |
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); |
- eglBase.createDummyPbufferSurface(); |
- eglBase.makeCurrent(); |
- |
- // Create output surface |
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); |
- Logging.d(TAG, "Video decoder TextureID = " + textureID); |
- surfaceTexture = new SurfaceTexture(textureID); |
- surface = new Surface(surfaceTexture); |
+ textureListener = new TextureListener(surfaceTextureHelper); |
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); |
} |
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); |
@@ -290,6 +284,10 @@ public class MediaCodecVideoDecoder { |
colorFormat = properties.colorFormat; |
outputBuffers = mediaCodec.getOutputBuffers(); |
inputBuffers = mediaCodec.getInputBuffers(); |
+ decodeStartTimeMs.clear(); |
+ hasDecodedFirstFrame = false; |
+ dequeuedSurfaceOutputBuffers.clear(); |
+ droppedFrames = 0; |
Logging.d(TAG, "Input buffers: " + inputBuffers.length + |
". Output buffers: " + outputBuffers.length); |
return true; |
@@ -300,7 +298,7 @@ public class MediaCodecVideoDecoder { |
} |
private void release() { |
- Logging.d(TAG, "Java releaseDecoder"); |
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); |
checkOnMediaCodecThread(); |
// Run Mediacodec stop() and release() on separate thread since sometime |
@@ -338,11 +336,7 @@ public class MediaCodecVideoDecoder { |
if (useSurface) { |
surface.release(); |
surface = null; |
- Logging.d(TAG, "Delete video decoder TextureID " + textureID); |
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0); |
- textureID = 0; |
- eglBase.release(); |
- eglBase = null; |
+ textureListener.release(); |
} |
Logging.d(TAG, "Java releaseDecoder done"); |
} |
@@ -359,13 +353,15 @@ public class MediaCodecVideoDecoder { |
} |
} |
- private boolean queueInputBuffer( |
- int inputBufferIndex, int size, long timestampUs) { |
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs, |
+ long timeStampMs, long ntpTimeStamp) { |
checkOnMediaCodecThread(); |
try { |
inputBuffers[inputBufferIndex].position(0); |
inputBuffers[inputBufferIndex].limit(size); |
- mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); |
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, |
+ ntpTimeStamp)); |
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0); |
return true; |
} |
catch (IllegalStateException e) { |
@@ -374,57 +370,183 @@ public class MediaCodecVideoDecoder { |
} |
} |
- // Helper structs for dequeueOutputBuffer() below. |
- private static class DecodedByteBuffer { |
- public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) { |
+ private static class TimeStamps { |
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) { |
+ this.decodeStartTimeMs = decodeStartTimeMs; |
+ this.timeStampMs = timeStampMs; |
+ this.ntpTimeStampMs = ntpTimeStampMs; |
+ } |
+ private final long decodeStartTimeMs; // Time when this frame was queued for decoding. |
+ private final long timeStampMs; // Only used for bookkeeping in Java. Used in C++; |
+ private final long ntpTimeStampMs; // Only used for bookkeeping in Java. Used in C++; |
+ } |
+ |
+ // Helper struct for dequeueOutputBuffer() below. |
+ private static class DecodedOutputBuffer { |
+ public DecodedOutputBuffer(int index, int offset, int size, long timeStampMs, |
+ long ntpTimeStampMs, long decodeTime, long endDecodeTime) { |
this.index = index; |
this.offset = offset; |
this.size = size; |
- this.presentationTimestampUs = presentationTimestampUs; |
+ this.timeStampMs = timeStampMs; |
+ this.ntpTimeStampMs = ntpTimeStampMs; |
+ this.decodeTimeMs = decodeTime; |
+ this.endDecodeTimeMs = endDecodeTime; |
} |
private final int index; |
private final int offset; |
private final int size; |
- private final long presentationTimestampUs; |
+ private final long timeStampMs; |
+ private final long ntpTimeStampMs; |
+ // Number of ms it took to decode this frame. |
+ private final long decodeTimeMs; |
+ // System time when this frame finished decoding. |
+ private final long endDecodeTimeMs; |
} |
+ // Helper struct for dequeueTextureBuffer() below. |
private static class DecodedTextureBuffer { |
private final int textureID; |
- private final long presentationTimestampUs; |
- |
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { |
+ private final float[] transformMatrix; |
+ private final long timeStampMs; |
+ private final long ntpTimeStampMs; |
+ private final long decodeTimeMs; |
+ // Interval from when the frame finished decoding until this buffer has been created. |
+ // Since there is only one texture, this interval depend on the time from when |
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec |
+ // so that the texture can be updated with the next decoded frame. |
+ private final long frameDelayMs; |
+ |
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame |
+ // that was dropped. |
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timeStampMs, |
+ long ntpTimeStampMs, long decodeTimeMs, long frameDelay) { |
this.textureID = textureID; |
- this.presentationTimestampUs = presentationTimestampUs; |
+ this.transformMatrix = transformMatrix; |
+ this.timeStampMs = timeStampMs; |
+ this.ntpTimeStampMs = ntpTimeStampMs; |
+ this.decodeTimeMs = decodeTimeMs; |
+ this.frameDelayMs = frameDelay; |
+ } |
+ } |
+ |
+ // Poll based texture listener. |
+ private static class TextureListener |
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener { |
+ private final SurfaceTextureHelper surfaceTextureHelper; |
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). |
+ private final Object newFrameLock = new Object(); |
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to |
+ // onTextureFrameAvailable(). |
+ private DecodedOutputBuffer bufferToRender; |
+ private DecodedTextureBuffer renderedBuffer; |
+ |
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { |
+ this.surfaceTextureHelper = surfaceTextureHelper; |
+ surfaceTextureHelper.setListener(this); |
+ } |
+ |
+ public void addBufferToRender(DecodedOutputBuffer buffer) { |
+ if (bufferToRender != null) { |
+ Logging.e(TAG, |
+ "Unexpected addBufferToRender() called while waiting for a texture."); |
+ throw new IllegalStateException("Waiting for a texture."); |
+ } |
+ bufferToRender = buffer; |
+ } |
+ |
+ public boolean isWaitingForTexture() { |
+ synchronized (newFrameLock) { |
+ return bufferToRender != null; |
+ } |
+ } |
+ |
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. |
+ @Override |
+ public void onTextureFrameAvailable( |
+ int oesTextureId, float[] transformMatrix, long timestampNs) { |
+ synchronized (newFrameLock) { |
+ if (renderedBuffer != null) { |
+ Logging.e(TAG, |
+ "Unexpected onTextureFrameAvailable() called while already holding a texture."); |
+ throw new IllegalStateException("Already holding a texture."); |
+ } |
+ // |timestampNs| is always zero on some Android versions. |
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, |
+ bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, |
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); |
+ bufferToRender = null; |
+ newFrameLock.notifyAll(); |
+ } |
+ } |
+ |
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise. |
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) { |
+ synchronized (newFrameLock) { |
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) { |
+ try { |
+ newFrameLock.wait(timeoutMs); |
+ } catch(InterruptedException e) { |
+ // Restore the interrupted status by reinterrupting the thread. |
+ Thread.currentThread().interrupt(); |
+ } |
+ } |
+ DecodedTextureBuffer returnedBuffer = renderedBuffer; |
+ renderedBuffer = null; |
+ return returnedBuffer; |
+ } |
+ } |
+ |
+ public void release() { |
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in |
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized |
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. |
+ surfaceTextureHelper.disconnect(); |
+ synchronized (newFrameLock) { |
+ if (renderedBuffer != null) { |
+ surfaceTextureHelper.returnTextureFrame(); |
+ renderedBuffer = null; |
+ } |
+ } |
} |
} |
- // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or |
- // DecodedTexturebuffer depending on |useSurface| configuration. |
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer. |
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an |
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException |
// upon codec error. |
- private Object dequeueOutputBuffer(int dequeueTimeoutUs) |
- throws IllegalStateException, MediaCodec.CodecException { |
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { |
checkOnMediaCodecThread(); |
- |
+ if (decodeStartTimeMs.isEmpty()) { |
+ return null; |
+ } |
// Drain the decoder until receiving a decoded buffer or hitting |
// MediaCodec.INFO_TRY_AGAIN_LATER. |
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); |
while (true) { |
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); |
+ final int result = mediaCodec.dequeueOutputBuffer( |
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); |
switch (result) { |
- case MediaCodec.INFO_TRY_AGAIN_LATER: |
- return null; |
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: |
outputBuffers = mediaCodec.getOutputBuffers(); |
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); |
+ if (hasDecodedFirstFrame) { |
+ throw new RuntimeException("Unexpected output buffer change event."); |
+ } |
break; |
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: |
MediaFormat format = mediaCodec.getOutputFormat(); |
Logging.d(TAG, "Decoder format changed: " + format.toString()); |
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH); |
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); |
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) { |
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" + |
+ height + ". New " + new_width + "*" + new_height); |
+ } |
width = format.getInteger(MediaFormat.KEY_WIDTH); |
height = format.getInteger(MediaFormat.KEY_HEIGHT); |
+ |
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { |
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); |
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); |
@@ -442,18 +564,74 @@ public class MediaCodecVideoDecoder { |
stride = Math.max(width, stride); |
sliceHeight = Math.max(height, sliceHeight); |
break; |
+ case MediaCodec.INFO_TRY_AGAIN_LATER: |
+ return null; |
default: |
- // Output buffer decoded. |
- if (useSurface) { |
- mediaCodec.releaseOutputBuffer(result, true /* render */); |
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture |
- // frame. |
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs); |
- } else { |
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); |
- } |
+ hasDecodedFirstFrame = true; |
+ TimeStamps timeStamps = decodeStartTimeMs.remove(); |
+ return new DecodedOutputBuffer(result, info.offset, info.size, timeStamps.timeStampMs, |
+ timeStamps.ntpTimeStampMs, |
+ SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs, |
+ SystemClock.elapsedRealtime()); |
+ } |
+ } |
+ } |
+ |
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer. |
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an |
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException |
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if |
+ // a frame can't be returned. |
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { |
+ checkOnMediaCodecThread(); |
+ if (!useSurface) { |
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding."); |
+ } |
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); |
+ if (outputBuffer != null) { |
+ dequeuedSurfaceOutputBuffers.add(outputBuffer); |
+ } |
+ |
+ MaybeRenderDecodedTextureBuffer(); |
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|. |
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs); |
+ if (renderedBuffer != null) { |
+ MaybeRenderDecodedTextureBuffer(); |
+ return renderedBuffer; |
+ } |
+ |
+ if ((dequeuedSurfaceOutputBuffers.size() |
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length) |
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) { |
+ ++droppedFrames; |
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers. |
+ // The oldest frame is owned by |textureListener| and can't be dropped since |
+ // mediaCodec.releaseOutputBuffer has already been called. |
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove(); |
+ if (dequeueTimeoutMs > 0) { |
+ Logging.w(TAG, "Draining decoder. Dropping frame with TS: " |
+ + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames); |
+ } else { |
+ Logging.w(TAG, "Too many output buffers. Dropping frame with TS: " |
+ + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames); |
} |
+ |
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); |
+ return new DecodedTextureBuffer(0, null, droppedFrame.timeStampMs, |
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs, |
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); |
+ } |
+ return null; |
+ } |
+ |
+ private void MaybeRenderDecodedTextureBuffer() { |
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) { |
+ return; |
} |
+ // Get the first frame in the queue and render to the decoder output surface. |
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); |
+ textureListener.addBufferToRender(buffer); |
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); |
} |
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for |
@@ -461,11 +639,11 @@ public class MediaCodecVideoDecoder { |
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured |
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws |
// MediaCodec.CodecException upon codec error. |
- private void returnDecodedByteBuffer(int index) |
+ private void returnDecodedOutputBuffer(int index) |
throws IllegalStateException, MediaCodec.CodecException { |
checkOnMediaCodecThread(); |
if (useSurface) { |
- throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); |
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding."); |
} |
mediaCodec.releaseOutputBuffer(index, false /* render */); |
} |