Chromium Code Reviews| Index: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java |
| diff --git a/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java b/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java |
| index 99a0a4b06fb2ab1355a1a5552152e085c7c14237..a2713b5a5138373389bd189d9eb619efceafb10e 100644 |
| --- a/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java |
| +++ b/webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java |
| @@ -16,18 +16,22 @@ import android.media.MediaCodec; |
| import android.media.MediaCodecInfo.CodecCapabilities; |
| import android.media.MediaFormat; |
| import android.os.SystemClock; |
| +import android.view.Surface; |
| import java.io.IOException; |
| import java.nio.ByteBuffer; |
| import java.util.Arrays; |
| import java.util.Deque; |
| +import java.util.concurrent.BlockingDeque; |
| import java.util.concurrent.CountDownLatch; |
| import java.util.concurrent.LinkedBlockingDeque; |
| +import java.util.concurrent.atomic.AtomicReference; |
| import org.webrtc.ThreadUtils.ThreadChecker; |
| /** Android hardware video decoder. */ |
| @TargetApi(16) |
| @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods. |
| -class HardwareVideoDecoder implements VideoDecoder { |
| +class HardwareVideoDecoder |
| + implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener { |
| private static final String TAG = "HardwareVideoDecoder"; |
| // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. |
| @@ -52,6 +56,9 @@ class HardwareVideoDecoder implements VideoDecoder { |
| // MediaCodec. |
| private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; |
| + // Max number of output buffers queued before starting to drop decoded frames. |
| + private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; |
| + |
| private final String codecName; |
| private final VideoCodecType codecType; |
| @@ -100,18 +107,49 @@ class HardwareVideoDecoder implements VideoDecoder { |
| // Whether the decoder has seen a key frame. The first frame must be a key frame. |
| private boolean keyFrameRequired; |
| + private final SurfaceTextureHelper surfaceTextureHelper; |
| + private Surface surface = null; |
| + |
| + private static class DecodedTextureBuffer { |
| + final int index; |
| + final int width; |
| + final int height; |
| + final int rotation; |
| + final long presentationTimestampUs; |
| + final Integer decodeTimeMs; |
| + |
| + DecodedTextureBuffer(int index, int width, int height, int rotation, |
| + long presentationTimestampUs, Integer decodeTimeMs) { |
| + this.index = index; |
| + this.width = width; |
| + this.height = height; |
| + this.rotation = rotation; |
| + this.presentationTimestampUs = presentationTimestampUs; |
| + this.decodeTimeMs = decodeTimeMs; |
| + } |
| + } |
| + |
| + private final BlockingDeque<DecodedTextureBuffer> decodedTextureBuffers; |
| + private final AtomicReference<DecodedTextureBuffer> renderedTextureBuffer; |
| + |
| + private int droppedFrames = 0; |
| + |
| // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. |
| private Callback callback; |
| private MediaCodec codec = null; |
| - HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorFormat) { |
| + HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorFormat, |
| + SurfaceTextureHelper surfaceTextureHelper) { |
|
sakal
2017/07/17 13:44:56
I don't think we have to pass in the SurfaceTextur
mellem
2017/07/17 21:57:15
Done.
|
| if (!isSupportedColorFormat(colorFormat)) { |
| throw new IllegalArgumentException("Unsupported color format: " + colorFormat); |
| } |
| this.codecName = codecName; |
| this.codecType = codecType; |
| this.colorFormat = colorFormat; |
| + this.surfaceTextureHelper = surfaceTextureHelper; |
| + this.decodedTextureBuffers = new LinkedBlockingDeque<>(); |
| + this.renderedTextureBuffer = new AtomicReference<>(); |
| this.frameInfos = new LinkedBlockingDeque<>(); |
| } |
| @@ -138,6 +176,7 @@ class HardwareVideoDecoder implements VideoDecoder { |
| sliceHeight = height; |
| hasDecodedFirstFrame = false; |
| keyFrameRequired = true; |
| + droppedFrames = 0; |
| try { |
| codec = MediaCodec.createByCodecName(codecName); |
| @@ -147,8 +186,13 @@ class HardwareVideoDecoder implements VideoDecoder { |
| } |
| try { |
| MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); |
| - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); |
| - codec.configure(format, null, null, 0); |
| + if (surfaceTextureHelper == null) { |
| + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); |
| + } else { |
| + surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); |
| + surfaceTextureHelper.startListening(this); |
| + } |
| + codec.configure(format, surface, null, 0); |
| codec.start(); |
| } catch (IllegalStateException e) { |
| Logging.e(TAG, "initDecode failed", e); |
| @@ -209,7 +253,6 @@ class HardwareVideoDecoder implements VideoDecoder { |
| } |
| } |
| - // TODO(mellem): Support textures. |
| int index; |
| try { |
| index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); |
| @@ -288,6 +331,14 @@ class HardwareVideoDecoder implements VideoDecoder { |
| callback = null; |
| outputThread = null; |
| frameInfos.clear(); |
| + if (surface != null) { |
| + surface.release(); |
| + surface = null; |
| + surfaceTextureHelper.stopListening(); |
| + surfaceTextureHelper.returnTextureFrame(); |
|
sakal
2017/07/17 13:44:56
I think we should wait that all frames are release
mellem
2017/07/17 21:57:15
Now that the decoder owns the SurfaceTextureHelper
|
| + } |
| + decodedTextureBuffers.clear(); |
| + renderedTextureBuffer.set(null); |
| } |
| return VideoCodecStatus.OK; |
| } |
| @@ -308,6 +359,7 @@ class HardwareVideoDecoder implements VideoDecoder { |
| outputThreadChecker = new ThreadChecker(); |
| while (running) { |
| deliverDecodedFrame(); |
| + maybeRenderTextureBuffers(); |
| } |
| releaseCodecOnOutputThread(); |
| } |
| @@ -343,62 +395,145 @@ class HardwareVideoDecoder implements VideoDecoder { |
| hasDecodedFirstFrame = true; |
| - // Load dimensions from shared memory under the dimension lock. |
| - int width, height, stride, sliceHeight; |
| - synchronized (dimensionLock) { |
| - width = this.width; |
| - height = this.height; |
| - stride = this.stride; |
| - sliceHeight = this.sliceHeight; |
| + if (surfaceTextureHelper != null) { |
| + deliverTextureFrame(result, info, rotation, decodeTimeMs); |
| + } else { |
| + deliverByteFrame(result, info, rotation, decodeTimeMs); |
| } |
| - // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) |
| - // bytes for each of the U and V channels. |
| - if (info.size < width * height * 3 / 2) { |
| - Logging.e(TAG, "Insufficient output buffer size: " + info.size); |
| - return; |
| - } |
| + } catch (IllegalStateException e) { |
| + Logging.e(TAG, "deliverDecodedFrame failed", e); |
| + } |
| + } |
| - if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { |
| - // Some codecs (Exynos) report an incorrect stride. Correct it here. |
| - // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as |
| - // 2 * size / (3 * height). |
| - stride = info.size * 2 / (height * 3); |
| - } |
| + private void deliverTextureFrame( |
| + int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) { |
| + // Load dimensions from shared memory under the dimension lock. |
| + int width, height; |
| + synchronized (dimensionLock) { |
| + width = this.width; |
| + height = this.height; |
| + } |
| - ByteBuffer buffer = codec.getOutputBuffers()[result]; |
| - buffer.position(info.offset); |
| - buffer.limit(info.size); |
| - |
| - final VideoFrame.I420Buffer frameBuffer; |
| - |
| - // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatting data. |
| - if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { |
| - if (sliceHeight % 2 == 0) { |
| - frameBuffer = |
| - createBufferFromI420(buffer, result, info.offset, stride, sliceHeight, width, height); |
| - } else { |
| - frameBuffer = new I420BufferImpl(width, height); |
| - // Optimal path is not possible because we have to copy the last rows of U- and V-planes. |
| - copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); |
| - codec.releaseOutputBuffer(result, false); |
| - } |
| + DecodedTextureBuffer buffer = new DecodedTextureBuffer( |
| + index, width, height, rotation, info.presentationTimeUs, decodeTimeMs); |
| + decodedTextureBuffers.offerLast(buffer); |
| + |
| + maybeRenderTextureBuffers(); |
| + maybeDropTextureBuffer(); |
| + } |
| + |
| + private void maybeRenderTextureBuffers() { |
| + // Possibly render more than one buffer, to allow the callee to catch up. |
| + while (!decodedTextureBuffers.isEmpty() && !surfaceTextureHelper.isTextureInUse()) { |
|
sakal
2017/07/17 13:44:56
I don't think we are going to get more than one lo
mellem
2017/07/17 21:57:15
I need to call updateTexImage() whenever I get a t
sakal
2017/07/18 08:53:40
Yeah, we of course would have had to implement dra
|
| + DecodedTextureBuffer buffer = decodedTextureBuffers.pollFirst(); |
| + renderedTextureBuffer.set(buffer); |
| + codec.releaseOutputBuffer(buffer.index, true); |
| + } |
| + } |
| + |
| + private void maybeDropTextureBuffer() { |
| + if (decodedTextureBuffers.size() > MAX_QUEUED_OUTPUTBUFFERS) { |
| + ++droppedFrames; |
| + DecodedTextureBuffer buffer = decodedTextureBuffers.pollFirst(); |
| + Logging.w(TAG, |
| + "Draining the decoder. Dropped frame with timestamp : " + buffer.presentationTimestampUs |
| + + "(us). Total dropped frames: " + droppedFrames); |
| + codec.releaseOutputBuffer(buffer.index, false); |
| + } |
| + } |
| + |
| + @Override |
| + public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { |
| + DecodedTextureBuffer info = renderedTextureBuffer.getAndSet(null); |
| + |
| + OesTextureBuffer oesBuffer = new OesTextureBuffer( |
| + oesTextureId, info.width, info.height, transformMatrix, surfaceTextureHelper); |
| + |
| + // SurfaceTexture's matrix looks like this: |
|
sakal
2017/07/17 13:44:56
Please copy the helper method from my other CL her
mellem
2017/07/17 21:57:15
Done.
|
| + // [x1 y1 0 w1] |
| + // [x2 y2 0 w2] |
| + // [ 0 0 1 0] |
| + // [x3 y3 0 w3] |
| + // The android.graphics.Matrix looks like this: |
| + // [x1 y1 w1] |
| + // [x2 y2 w2] |
| + // [x3 y3 w3] |
| + float[] matrix3x3 = new float[9]; |
| + matrix3x3[0 * 3 + 0] = transformMatrix[0 * 4 + 0]; |
| + matrix3x3[0 * 3 + 1] = transformMatrix[0 * 4 + 1]; |
| + matrix3x3[0 * 3 + 2] = transformMatrix[0 * 4 + 3]; |
| + matrix3x3[1 * 3 + 0] = transformMatrix[1 * 4 + 0]; |
| + matrix3x3[1 * 3 + 1] = transformMatrix[1 * 4 + 1]; |
| + matrix3x3[1 * 3 + 2] = transformMatrix[1 * 4 + 3]; |
| + matrix3x3[2 * 3 + 0] = transformMatrix[3 * 4 + 0]; |
| + matrix3x3[2 * 3 + 1] = transformMatrix[3 * 4 + 1]; |
| + matrix3x3[2 * 3 + 2] = transformMatrix[3 * 4 + 3]; |
| + |
| + Matrix matrix = new Matrix(); |
| + matrix.setValues(matrix3x3); |
| + |
| + VideoFrame frame = |
| + new VideoFrame(oesBuffer, info.rotation, info.presentationTimestampUs * 1000, matrix); |
| + callback.onDecodedFrame(frame, info.decodeTimeMs, null /* qp */); |
| + frame.release(); |
| + } |
| + |
| + private void deliverByteFrame( |
| + int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) { |
| + // Load dimensions from shared memory under the dimension lock. |
| + int width, height, stride, sliceHeight; |
| + synchronized (dimensionLock) { |
| + width = this.width; |
| + height = this.height; |
| + stride = this.stride; |
| + sliceHeight = this.sliceHeight; |
| + } |
| + |
| + // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2) |
| + // bytes for each of the U and V channels. |
| + if (info.size < width * height * 3 / 2) { |
| + Logging.e(TAG, "Insufficient output buffer size: " + info.size); |
| + return; |
| + } |
| + |
| + if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) { |
| + // Some codecs (Exynos) report an incorrect stride. Correct it here. |
| + // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as |
| + // 2 * size / (3 * height). |
| + stride = info.size * 2 / (height * 3); |
| + } |
| + |
| + ByteBuffer buffer = codec.getOutputBuffers()[result]; |
| + buffer.position(info.offset); |
| + buffer.limit(info.size); |
| + |
| + final VideoFrame.I420Buffer frameBuffer; |
| + |
| + // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatting data. |
| + if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { |
| + if (sliceHeight % 2 == 0) { |
| + frameBuffer = |
| + createBufferFromI420(buffer, result, info.offset, stride, sliceHeight, width, height); |
| } else { |
| frameBuffer = new I420BufferImpl(width, height); |
| - // All other supported color formats are NV12. |
| - nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); |
| + // Optimal path is not possible because we have to copy the last rows of U- and V-planes. |
| + copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); |
| codec.releaseOutputBuffer(result, false); |
| } |
| + } else { |
| + frameBuffer = new I420BufferImpl(width, height); |
| + // All other supported color formats are NV12. |
| + nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height); |
| + codec.releaseOutputBuffer(result, false); |
| + } |
| - long presentationTimeNs = info.presentationTimeUs * 1000; |
| - VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix()); |
| + long presentationTimeNs = info.presentationTimeUs * 1000; |
| + VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix()); |
| - // Note that qp is parsed on the C++ side. |
| - callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); |
| - frame.release(); |
| - } catch (IllegalStateException e) { |
| - Logging.e(TAG, "deliverDecodedFrame failed", e); |
| - } |
| + // Note that qp is parsed on the C++ side. |
| + callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */); |
| + frame.release(); |
| } |
| private void reformat(MediaFormat format) { |
| @@ -429,7 +564,9 @@ class HardwareVideoDecoder implements VideoDecoder { |
| height = newHeight; |
| } |
| - if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { |
| + // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip |
| + // color format updates. |
| + if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { |
| colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); |
| Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); |
| if (!isSupportedColorFormat(colorFormat)) { |
| @@ -519,81 +656,20 @@ class HardwareVideoDecoder implements VideoDecoder { |
| synchronized (activeOutputBuffersLock) { |
| activeOutputBuffers++; |
| } |
| - return new VideoFrame.I420Buffer() { |
| - private int refCount = 1; |
| - |
| - @Override |
| - public ByteBuffer getDataY() { |
| - ByteBuffer data = buffer.slice(); |
| - data.position(yPos); |
| - data.limit(yPos + getStrideY() * height); |
| - return data; |
| - } |
| + I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback() { |
| @Override |
| - public ByteBuffer getDataU() { |
| - ByteBuffer data = buffer.slice(); |
| - data.position(uPos); |
| - data.limit(uPos + getStrideU() * chromaHeight); |
| - return data; |
| - } |
| - |
| - @Override |
| - public ByteBuffer getDataV() { |
| - ByteBuffer data = buffer.slice(); |
| - data.position(vPos); |
| - data.limit(vPos + getStrideV() * chromaHeight); |
| - return data; |
| - } |
| - |
| - @Override |
| - public int getStrideY() { |
| - return stride; |
| - } |
| - |
| - @Override |
| - public int getStrideU() { |
| - return uvStride; |
| - } |
| - |
| - @Override |
| - public int getStrideV() { |
| - return uvStride; |
| - } |
| - |
| - @Override |
| - public int getWidth() { |
| - return width; |
| - } |
| - |
| - @Override |
| - public int getHeight() { |
| - return height; |
| - } |
| - |
| - @Override |
| - public VideoFrame.I420Buffer toI420() { |
| - return this; |
| - } |
| - |
| - @Override |
| - public void retain() { |
| - refCount++; |
| - } |
| - |
| - @Override |
| - public void release() { |
| - refCount--; |
| - |
| - if (refCount == 0) { |
| - codec.releaseOutputBuffer(outputBufferIndex, false); |
| - synchronized (activeOutputBuffersLock) { |
| - activeOutputBuffers--; |
| - activeOutputBuffersLock.notifyAll(); |
| - } |
| + public void onRelease() { |
| + codec.releaseOutputBuffer(outputBufferIndex, false); |
| + synchronized (activeOutputBuffersLock) { |
| + activeOutputBuffers--; |
| + activeOutputBuffersLock.notifyAll(); |
| } |
| } |
| }; |
| + |
| + return new I420BufferImpl( |
| + buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, callback); |
| } |
| private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer, |