Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(270)

Unified Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index a23d69ca0d84abe1658020be077014f00ce899d2..5e856efc20befacbf58371bbe9eba8207a20f702 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -27,23 +27,22 @@
package org.webrtc;
-import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
-import android.opengl.EGLContext;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
import android.os.Build;
+import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
@@ -96,10 +95,15 @@ public class MediaCodecVideoDecoder {
private int stride;
private int sliceHeight;
private boolean useSurface;
- private int textureID = 0;
- private SurfaceTexture surfaceTexture = null;
+ // |isWaitingForTextureToFinishRendering| is true when waiting for the transition:
+ // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
+ private boolean isWaitingForTextureToFinishRendering = false;
+ private TextureListener textureListener;
private Surface surface = null;
- private EglBase eglBase;
+
+ private final List<Long> decodeStartTime = new ArrayList<Long>();
magjed_webrtc 2015/10/28 11:57:16 These Lists should be queues. Also add ms suffix t
perkj_webrtc 2015/10/28 21:12:39 Done.
+ private final List<Long> decodeTime = new ArrayList<Long>();
+ private final List<Integer> dequeuedOutputBuffers = new ArrayList<Integer>();
private MediaCodecVideoDecoder() {
}
@@ -196,12 +200,13 @@ public class MediaCodecVideoDecoder {
}
}
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- useSurface = (sharedContext != null);
+ useSurface = (surfaceTextureHelper != null);
String mime = null;
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
@@ -220,9 +225,6 @@ public class MediaCodecVideoDecoder {
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
- if (sharedContext != null) {
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
- }
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
@@ -232,16 +234,8 @@ public class MediaCodecVideoDecoder {
sliceHeight = height;
if (useSurface) {
- // Create shared EGL context.
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
- eglBase.createDummyPbufferSurface();
- eglBase.makeCurrent();
-
- // Create output surface
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
- Logging.d(TAG, "Video decoder TextureID = " + textureID);
- surfaceTexture = new SurfaceTexture(textureID);
- surface = new Surface(surfaceTexture);
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -284,11 +278,7 @@ public class MediaCodecVideoDecoder {
if (useSurface) {
surface.release();
surface = null;
- Logging.d(TAG, "Delete video decoder TextureID " + textureID);
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
- textureID = 0;
- eglBase.release();
- eglBase = null;
+ textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
@@ -311,6 +301,8 @@ public class MediaCodecVideoDecoder {
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
+ decodeStartTime.add(SystemClock.elapsedRealtime());
+
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
return true;
}
@@ -337,11 +329,74 @@ public class MediaCodecVideoDecoder {
private static class DecodedTextureBuffer {
private final int textureID;
- private final long presentationTimestampUs;
+ private final float[] transformMatrix;
+ private final long timestampNs;
+ private long decodeTimeMs;
perkj_webrtc 2015/10/27 20:46:15 decodeTime will be needed in the byte buffer as we
magjed_webrtc 2015/10/28 11:57:16 Or use the decode latency calculated in jni as bef
perkj_webrtc 2015/10/28 21:12:39 Done.
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timestampNs) {
this.textureID = textureID;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.transformMatrix = transformMatrix;
+ this.timestampNs = timestampNs;
+ }
+ }
+
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private DecodedTextureBuffer textureBuffer;
+ private long decodeTime;
perkj_webrtc 2015/10/27 20:46:15 remove, unused.
perkj_webrtc 2015/10/28 21:12:39 Done.
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (textureBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ textureBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, timestampNs);
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a texture buffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureFrame(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (textureBuffer == null && timeoutMs > 0) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ final DecodedTextureBuffer textureBuffer = this.textureBuffer;
+ this.textureBuffer = null;
+ return textureBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (textureBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ textureBuffer = null;
+ }
+ }
}
}
@@ -350,14 +405,19 @@ public class MediaCodecVideoDecoder {
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
- private Object dequeueOutputBuffer(int dequeueTimeoutUs)
+ private Object dequeueOutputBuffer(int dequeueTimeoutMs)
perkj_webrtc 2015/10/27 20:46:15 I think this should be completely separate from th
perkj_webrtc 2015/10/28 21:12:39 Done.
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
+
+ if (useSurface)
+ return dequeueTexture(dequeueTimeoutMs);
perkj_webrtc 2015/10/27 20:46:15 remove and use dequeueTexture from c++
perkj_webrtc 2015/10/28 21:12:39 Done.
+
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
@@ -388,17 +448,96 @@ public class MediaCodecVideoDecoder {
sliceHeight = Math.max(height, sliceHeight);
break;
default:
- // Output buffer decoded.
- if (useSurface) {
- mediaCodec.releaseOutputBuffer(result, true /* render */);
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
- // frame.
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
- } else {
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
+ return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
+ }
+ }
+ }
+
+ private Object dequeueTexture(int dequeueTimeoutMs) {
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+
+ if (decodeStartTime.size() > 0) {
magjed_webrtc 2015/10/28 11:57:16 s/decodeStartTime.size() > 0/!decodeStartTime.isEm
perkj_webrtc 2015/10/28 21:12:39 Done.
+ // Drain the decoder until receiving a decoded buffer or hitting
+ // MediaCodec.INFO_TRY_AGAIN_LATER.
+ final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+
+ int result = MediaCodec.INFO_TRY_AGAIN_LATER;
+ do {
perkj_webrtc 2015/10/27 20:46:15 Move all of do {} into a separate method and use i
perkj_webrtc 2015/10/28 21:12:39 Done.
+ result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+ switch (result) {
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ outputBuffers = mediaCodec.getOutputBuffers();
magjed_webrtc 2015/10/28 11:57:16 You need to update |dequeuedOutputBuffers| and |de
perkj_webrtc 2015/10/28 21:12:39 I hope I don't have to. This variable is actually
magjed_webrtc 2015/10/29 09:44:12 If INFO_OUTPUT_BUFFERS_CHANGED should never happen
+ Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ MediaFormat format = mediaCodec.getOutputFormat();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
perkj_webrtc 2015/10/28 21:12:39 width and height need to be stored with the frames
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (format.containsKey("stride")) {
+ stride = format.getInteger("stride");
}
+ if (format.containsKey("slice-height")) {
+ sliceHeight = format.getInteger("slice-height");
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+
+ break;
+ default:
+ break;
+ }
+ } while (result < 0 && result != MediaCodec.INFO_TRY_AGAIN_LATER);
+
+
+ if (result >= 0) {
+ // Output buffer decoded.
+ decodeTime.add(SystemClock.elapsedRealtime() - decodeStartTime.get(0));
+ decodeStartTime.remove(0);
+ dequeuedOutputBuffers.add(result);
+ }
+ }
+
+ DecodedTextureBuffer textureBuffer = null;
magjed_webrtc 2015/10/28 11:57:16 You can write the remaining code simpler like this
perkj_webrtc 2015/10/28 21:12:39 Yes, but the idea was that if isWaitingForTextureT
magjed_webrtc 2015/10/29 09:44:12 I think you should trust the C++ class to poll fre
+ if (isWaitingForTextureToFinishRendering) {
+ // If we are waiting for a frame to be rendered to the decoder surface,
+ // check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame
+ // rendered at the time.
+ textureBuffer = textureListener.dequeueTextureFrame(dequeueTimeoutMs);
+ isWaitingForTextureToFinishRendering = (textureBuffer == null);
+ }
+
+ if (!isWaitingForTextureToFinishRendering) {
+ // If we are not waiting for a frame to be rendered, we can render the next decoder output
+ // buffer to the decoder surface and wait for it |dequeueTimeoutMs|.
+ if (dequeuedOutputBuffers.size() > 0) {
+ int bufferIndex = dequeuedOutputBuffers.get(0);
+ dequeuedOutputBuffers.remove(0);
+ // releaseOutputBuffer renders to the output surface.
+ mediaCodec.releaseOutputBuffer(bufferIndex, true /* render */);
+ isWaitingForTextureToFinishRendering = true;
+
+ if (textureBuffer == null) {
+ // Wait max |dequeueTimeoutMs| for the rendering to finish.
+ textureBuffer = textureListener.dequeueTextureFrame(dequeueTimeoutMs);
+ isWaitingForTextureToFinishRendering = (textureBuffer == null);
+ }
}
}
+
+ if (textureBuffer != null) {
+ textureBuffer.decodeTimeMs = decodeTime.get(0);
+ decodeTime.remove(0);
+ }
+
+// Logging.d(TAG, " pending decoding: " + decodeStartTime.size() + " pending rendering: " + dequeuedOutputBuffers.size()
+// + " pending for release: " + decodeTime.size() + " isWaitingForTexture: " + (isWaitingForTextureToFinishRendering ? "True" : "False") + " textureBuffer : " + (textureBuffer != null ? "Set" : "Null"));
+
+ return textureBuffer;
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for

Powered by Google App Engine
This is Rietveld 408576698