Chromium Code Reviews| Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
| diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
| index 86221c4cd9d7d4704841a844f59afa9b57352086..4ba4d93b5af358474c41d7c1736b27a101d7f5aa 100644 |
| --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
| +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java |
| @@ -33,19 +33,19 @@ import android.media.MediaCodecInfo; |
| import android.media.MediaCodecInfo.CodecCapabilities; |
| import android.media.MediaCodecList; |
| import android.media.MediaFormat; |
| -import android.opengl.GLES11Ext; |
| -import android.opengl.GLES20; |
| import android.os.Build; |
| +import android.os.SystemClock; |
| import android.view.Surface; |
| import org.webrtc.Logging; |
| import java.nio.ByteBuffer; |
| import java.util.Arrays; |
| +import java.util.LinkedList; |
| import java.util.List; |
| import java.util.concurrent.CountDownLatch; |
| - |
| -import javax.microedition.khronos.egl.EGLContext; |
| +import java.util.Queue; |
| +import java.util.concurrent.TimeUnit; |
| // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. |
| // This class is an implementation detail of the Java PeerConnection API. |
| @@ -104,14 +104,18 @@ public class MediaCodecVideoDecoder { |
| private int height; |
| private int stride; |
| private int sliceHeight; |
| + private boolean hasDecodedFirstFrame; |
| + private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>(); |
| private boolean useSurface; |
| - private int textureID = 0; |
| - private SurfaceTexture surfaceTexture = null; |
| - private Surface surface = null; |
| - private EglBase eglBase; |
| - private MediaCodecVideoDecoder() { |
| - } |
| + // The below variables are only used when decoding to a Surface. |
| + private TextureListener textureListener; |
| + // Max number of output buffers queued before starting to drop decoded frames. |
| + private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; |
| + private int droppedFrames; |
| + private Surface surface = null; |
| + private final Queue<DecodedOutputBuffer> |
| + dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>(); |
| // MediaCodec error handler - invoked when critical error happens which may prevent |
| // further use of media codec API. Now it means that one of media codec instances |
| @@ -223,12 +227,13 @@ public class MediaCodecVideoDecoder { |
| } |
| } |
| - // Pass null in |sharedContext| to configure the codec for ByteBuffer output. |
| - private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { |
| + // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. |
| + private boolean initDecode( |
| + VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { |
| if (mediaCodecThread != null) { |
| throw new RuntimeException("Forgot to release()?"); |
| } |
| - useSurface = (sharedContext != null); |
| + useSurface = (surfaceTextureHelper != null); |
| String mime = null; |
| String[] supportedCodecPrefixes = null; |
| if (type == VideoCodecType.VIDEO_CODEC_VP8) { |
| @@ -250,9 +255,6 @@ public class MediaCodecVideoDecoder { |
| Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + |
| ". Color: 0x" + Integer.toHexString(properties.colorFormat) + |
| ". Use Surface: " + useSurface); |
| - if (sharedContext != null) { |
| - Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); |
| - } |
| runningInstance = this; // Decoder is now running and can be queried for stack traces. |
| mediaCodecThread = Thread.currentThread(); |
| try { |
| @@ -262,16 +264,8 @@ public class MediaCodecVideoDecoder { |
| sliceHeight = height; |
| if (useSurface) { |
| - // Create shared EGL context. |
| - eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); |
| - eglBase.createDummyPbufferSurface(); |
| - eglBase.makeCurrent(); |
| - |
| - // Create output surface |
| - textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); |
| - Logging.d(TAG, "Video decoder TextureID = " + textureID); |
| - surfaceTexture = new SurfaceTexture(textureID); |
| - surface = new Surface(surfaceTexture); |
| + textureListener = new TextureListener(surfaceTextureHelper); |
| + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); |
| } |
| MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); |
| @@ -290,6 +284,10 @@ public class MediaCodecVideoDecoder { |
| colorFormat = properties.colorFormat; |
| outputBuffers = mediaCodec.getOutputBuffers(); |
| inputBuffers = mediaCodec.getInputBuffers(); |
| + decodeStartTimeMs.clear(); |
| + hasDecodedFirstFrame = false; |
| + dequeuedSurfaceOutputBuffers.clear(); |
| + droppedFrames = 0; |
| Logging.d(TAG, "Input buffers: " + inputBuffers.length + |
| ". Output buffers: " + outputBuffers.length); |
| return true; |
| @@ -300,7 +298,7 @@ public class MediaCodecVideoDecoder { |
| } |
| private void release() { |
| - Logging.d(TAG, "Java releaseDecoder"); |
| + Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); |
| checkOnMediaCodecThread(); |
| // Run Mediacodec stop() and release() on separate thread since sometime |
| @@ -338,11 +336,7 @@ public class MediaCodecVideoDecoder { |
| if (useSurface) { |
| surface.release(); |
| surface = null; |
| - Logging.d(TAG, "Delete video decoder TextureID " + textureID); |
| - GLES20.glDeleteTextures(1, new int[] {textureID}, 0); |
| - textureID = 0; |
| - eglBase.release(); |
| - eglBase = null; |
| + textureListener.release(); |
| } |
| Logging.d(TAG, "Java releaseDecoder done"); |
| } |
| @@ -365,6 +359,7 @@ public class MediaCodecVideoDecoder { |
| try { |
| inputBuffers[inputBufferIndex].position(0); |
| inputBuffers[inputBufferIndex].limit(size); |
| + decodeStartTimeMs.add(SystemClock.elapsedRealtime()); |
| mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); |
| return true; |
| } |
| @@ -374,57 +369,169 @@ public class MediaCodecVideoDecoder { |
| } |
| } |
| - // Helper structs for dequeueOutputBuffer() below. |
| - private static class DecodedByteBuffer { |
| - public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) { |
| + // Helper struct for dequeueOutputBuffer() below. |
| + private static class DecodedOutputBuffer { |
| + public DecodedOutputBuffer(int index, int offset, int size, long presentationTimestampUs, |
| + long decodeTime, long endDecodeTime) { |
| this.index = index; |
| this.offset = offset; |
| this.size = size; |
| this.presentationTimestampUs = presentationTimestampUs; |
| + this.decodeTimeMs = decodeTime; |
| + this.endDecodeTimeMs = endDecodeTime; |
| } |
| private final int index; |
| private final int offset; |
| private final int size; |
| private final long presentationTimestampUs; |
| + // Number of ms it took to decode this frame. |
| + private final long decodeTimeMs; |
| + // System time when this frame finished decoding. |
| + private final long endDecodeTimeMs; |
| } |
| + // Helper struct for dequeueTextureBuffer() below. |
| private static class DecodedTextureBuffer { |
| private final int textureID; |
| + private final float[] transformMatrix; |
| private final long presentationTimestampUs; |
| - |
| - public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { |
| + private final long decodeTimeMs; |
| + // Interval from when the frame finished decoding until this buffer has been created. |
| + // Since there is only one texture, this interval depend on the time from when |
| + // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec |
| + // so that the texture can be updated with the next decoded frame. |
| + private final long frameDelayMs; |
| + |
| + // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame |
| + // that was dropped. |
| + public DecodedTextureBuffer(int textureID, float[] transformMatrix, |
| + long presentationTimestampUs, long decodeTimeMs, long frameDelay) { |
| this.textureID = textureID; |
| + this.transformMatrix = transformMatrix; |
| this.presentationTimestampUs = presentationTimestampUs; |
| + this.decodeTimeMs = decodeTimeMs; |
| + this.frameDelayMs = frameDelay; |
| } |
| } |
| - // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or |
| - // DecodedTexturebuffer depending on |useSurface| configuration. |
| + // Poll based texture listener. |
| + private static class TextureListener |
| + implements SurfaceTextureHelper.OnTextureFrameAvailableListener { |
| + private final SurfaceTextureHelper surfaceTextureHelper; |
| + // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). |
| + private final Object newFrameLock = new Object(); |
| + private DecodedOutputBuffer bufferToRender; |
| + private DecodedTextureBuffer renderedBuffer; |
| + // |isWaitingForTexture| is true when waiting for the transition: |
|
magjed_webrtc
2015/11/17 11:58:19
Keep comment that bufferToRender is non-null when
perkj_webrtc
2015/11/17 12:44:19
Done.
|
| + // addBufferToRender() -> onTextureFrameAvailable(). |
| + private boolean isWaitingForTexture; |
| + |
| + public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { |
| + this.surfaceTextureHelper = surfaceTextureHelper; |
| + surfaceTextureHelper.setListener(this); |
| + } |
| + |
| + public void addBufferToRender(DecodedOutputBuffer buffer) { |
| + if (isWaitingForTexture) { |
| + Logging.e(TAG, |
| + "Unexpected addBufferToRender() called while waiting for a texture."); |
| + throw new IllegalStateException("Waiting for a texture."); |
| + } |
| + bufferToRender = buffer; |
| + isWaitingForTexture = true; |
| + } |
| + |
| + public boolean isWaitingForTexture() { |
| + return isWaitingForTexture; |
| + } |
| + |
| + // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. |
| + @Override |
| + public void onTextureFrameAvailable( |
| + int oesTextureId, float[] transformMatrix, long timestampNs) { |
| + synchronized (newFrameLock) { |
| + if (renderedBuffer != null) { |
| + Logging.e(TAG, |
| + "Unexpected onTextureFrameAvailable() called while already holding a texture."); |
| + throw new IllegalStateException("Already holding a texture."); |
| + } |
| + // |timestampNs| is always zero on some Android versions. |
| + renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, |
| + bufferToRender.presentationTimestampUs, bufferToRender.decodeTimeMs, |
| + SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); |
| + |
|
magjed_webrtc
2015/11/16 12:56:47
You can set |isWaitingForTexture| to false here in
|
| + newFrameLock.notifyAll(); |
| + } |
| + } |
| + |
| + // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise. |
| + public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) { |
| + synchronized (newFrameLock) { |
| + if (renderedBuffer == null && timeoutMs > 0) { |
| + try { |
| + newFrameLock.wait(timeoutMs); |
| + } catch(InterruptedException e) { |
| + // Restore the interrupted status by reinterrupting the thread. |
| + Thread.currentThread().interrupt(); |
| + } |
| + } |
| + DecodedTextureBuffer returnedBuffer = renderedBuffer; |
| + renderedBuffer = null; |
| + isWaitingForTexture = (returnedBuffer == null); |
| + return returnedBuffer; |
| + } |
| + } |
| + |
| + public void release() { |
| + // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in |
| + // progress is done. Therefore, the call to disconnect() must be outside any synchronized |
| + // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. |
| + surfaceTextureHelper.disconnect(); |
| + synchronized (newFrameLock) { |
| + if (renderedBuffer != null) { |
| + surfaceTextureHelper.returnTextureFrame(); |
| + renderedBuffer = null; |
| + } |
| + } |
| + } |
| + } |
| + |
| + // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer. |
| // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an |
| // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException |
| // upon codec error. |
| - private Object dequeueOutputBuffer(int dequeueTimeoutUs) |
| - throws IllegalStateException, MediaCodec.CodecException { |
| + private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { |
| checkOnMediaCodecThread(); |
| - |
| + if (decodeStartTimeMs.isEmpty()) { |
| + return null; |
| + } |
| // Drain the decoder until receiving a decoded buffer or hitting |
| // MediaCodec.INFO_TRY_AGAIN_LATER. |
| final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); |
| while (true) { |
| - final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); |
| + final int result = mediaCodec.dequeueOutputBuffer( |
| + info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); |
| switch (result) { |
| - case MediaCodec.INFO_TRY_AGAIN_LATER: |
| - return null; |
| case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: |
| outputBuffers = mediaCodec.getOutputBuffers(); |
| Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); |
| + if (hasDecodedFirstFrame) { |
| + throw new RuntimeException("Unexpected output buffer change event."); |
| + } |
| break; |
| case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: |
| MediaFormat format = mediaCodec.getOutputFormat(); |
| Logging.d(TAG, "Decoder format changed: " + format.toString()); |
| + int new_width = format.getInteger(MediaFormat.KEY_WIDTH); |
| + int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); |
| + if (hasDecodedFirstFrame && (new_width != width || new_height != height)) { |
| + throw new RuntimeException("Unexpected size change. Configured " + width + "*" + |
| + height + ". New " + new_width + "*" + new_height); |
| + } |
| width = format.getInteger(MediaFormat.KEY_WIDTH); |
| height = format.getInteger(MediaFormat.KEY_HEIGHT); |
| + |
| if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { |
| colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); |
| Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); |
| @@ -442,18 +549,78 @@ public class MediaCodecVideoDecoder { |
| stride = Math.max(width, stride); |
| sliceHeight = Math.max(height, sliceHeight); |
| break; |
| + case MediaCodec.INFO_TRY_AGAIN_LATER: |
| + return null; |
| default: |
| - // Output buffer decoded. |
| - if (useSurface) { |
| - mediaCodec.releaseOutputBuffer(result, true /* render */); |
| - // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture |
| - // frame. |
| - return new DecodedTextureBuffer(textureID, info.presentationTimeUs); |
| - } else { |
| - return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); |
| - } |
| + hasDecodedFirstFrame = true; |
| + return new DecodedOutputBuffer(result, info.offset, info.size, info.presentationTimeUs, |
| + SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(), |
| + SystemClock.elapsedRealtime()); |
| + } |
| + } |
| + } |
| + |
| + // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer. |
| + // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an |
| + // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException |
| + // upon codec error. |
| + private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { |
| + checkOnMediaCodecThread(); |
| + if (!useSurface) { |
| + throw new IllegalStateException("dequeueTexture() called for byte buffer decoding."); |
| + } |
| + |
| + DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); |
|
AlexG
2015/11/17 00:47:35
If dequeueTimeoutMs > 0 (in case decoder is draini
perkj_webrtc
2015/11/17 11:02:19
That is what I also discovered. And that in turns
|
| + if (outputBuffer != null) { |
| + dequeuedSurfaceOutputBuffers.add(outputBuffer); |
| + } |
| + |
| + if (dequeuedSurfaceOutputBuffers.isEmpty()) { |
| + return null; |
| + } |
| + |
| + if (!textureListener.isWaitingForTexture()) { |
|
magjed_webrtc
2015/11/16 12:56:47
I would prefer if you add !dequeuedSurfaceOutputBu
perkj_webrtc
2015/11/17 11:02:19
Done.
|
| + // Get the first frame in the queue and render to the decoder output surface. |
| + final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); |
| + textureListener.addBufferToRender(buffer); |
| + mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); |
| + } |
| + |
| + // We are waiting for a frame to be rendered to the decoder surface. |
| + // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame |
| + // rendered at a time. |
| + DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs); |
| + if (renderedBuffer != null) { |
| + if (!dequeuedSurfaceOutputBuffers.isEmpty()) { |
| + // Get the next frame in the queue and render to the decoder output surface. |
| + final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); |
| + textureListener.addBufferToRender(buffer); |
| + mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); |
| + } |
| + return renderedBuffer; |
| + } |
| + |
| + if ((dequeuedSurfaceOutputBuffers.size() >= Math.min( |
| + MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) || dequeueTimeoutMs > 0) { |
|
magjed_webrtc
2015/11/16 12:56:47
I think for the case 'dequeueTimeoutMs > 0', |dequ
|
| + ++droppedFrames; |
| + if (dequeueTimeoutMs > 0) { |
|
AlexG
2015/11/17 00:47:35
Have you checked that you can still receive onText
|
| + Logging.w(TAG, "Waiting for texture to be rendered, dropping next frame." |
| + + " Total number of dropped frames: " + droppedFrames); |
| + } else { |
| + Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: " |
| + + droppedFrames); |
| } |
| + // Drop the oldest frame still in dequeuedSurfaceOutputBuffers. |
| + // The oldest frame is owned by |textureListener| and can't be dropped since |
| + // mediaCodec.releaseOutputBuffer has already been called. Dropping the this frame will lead |
|
magjed_webrtc
2015/11/16 12:56:47
I think you should remove the timestamp queue in C
AlexG
2015/11/17 00:47:35
"frames_received_ - frames_decoded_" are used by C
perkj_webrtc
2015/11/17 11:02:18
Acknowledged.
perkj_webrtc
2015/11/17 11:02:19
Done.
|
| + // to a shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverPendingOutputs. |
| + final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove(); |
| + mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); |
| + return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimestampUs, |
| + droppedFrame.decodeTimeMs, |
| + SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); |
| } |
| + return null; |
| } |
| // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for |
| @@ -461,11 +628,11 @@ public class MediaCodecVideoDecoder { |
| // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured |
| // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws |
| // MediaCodec.CodecException upon codec error. |
| - private void returnDecodedByteBuffer(int index) |
| + private void returnDecodedOutputBuffer(int index) |
| throws IllegalStateException, MediaCodec.CodecException { |
| checkOnMediaCodecThread(); |
| if (useSurface) { |
| - throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); |
| + throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding."); |
| } |
| mediaCodec.releaseOutputBuffer(index, false /* render */); |
| } |