| Index: webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
|
| diff --git a/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
|
| index b3decbea9f26719c77f3286800bc365e753d10c7..808b43d6dfdd261d4734350b03d5f82a7964a204 100644
|
| --- a/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
|
| +++ b/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
|
| @@ -11,6 +11,7 @@
|
| package org.webrtc;
|
|
|
| import android.annotation.TargetApi;
|
| +import android.graphics.Matrix;
|
| import android.media.MediaCodec;
|
| import android.media.MediaCodecInfo;
|
| import android.media.MediaCodecInfo.CodecCapabilities;
|
| @@ -598,6 +599,46 @@ public class MediaCodecVideoEncoder {
|
| }
|
| }
|
|
|
| + /**
|
| + * Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we are not encoding in
|
| + * surface mode.
|
| + */
|
| + boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) {
|
| + checkOnMediaCodecThread();
|
| + try {
|
| + long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTimestampNs());
|
| + checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
| +
|
| + VideoFrame.Buffer buffer = frame.getBuffer();
|
| + if (buffer instanceof VideoFrame.TextureBuffer) {
|
| + VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
|
| + eglBase.makeCurrent();
|
| + // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
|
| + // but it's a workaround for bug webrtc:5147.
|
| + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
| + drawer.drawOes(textureBuffer.getTextureId(),
|
| + RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
|
| + textureBuffer.getTransformMatrix()),
|
| + width, height, 0, 0, width, height);
|
| + eglBase.swapBuffers(frame.getTimestampNs());
|
| + } else {
|
| + VideoFrame.I420Buffer i420Buffer = buffer.toI420();
|
| + nativeFillBuffer(nativeEncoder, bufferIndex, i420Buffer.getDataY(), i420Buffer.getStrideY(),
|
| + i420Buffer.getDataU(), i420Buffer.getStrideU(), i420Buffer.getDataV(),
|
| + i420Buffer.getStrideV());
|
| + i420Buffer.release();
|
| + // I420 consists of one full-resolution and two half-resolution planes.
|
| + // 1 + 1 / 4 + 1 / 4 = 3 / 2
|
| + int yuvSize = width * height * 3 / 2;
|
| + mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimestampUs, 0);
|
| + }
|
| + return true;
|
| + } catch (RuntimeException e) {
|
| + Logging.e(TAG, "encodeFrame failed", e);
|
| + return false;
|
| + }
|
| + }
|
| +
|
| void release() {
|
| Logging.d(TAG, "Java releaseEncoder");
|
| checkOnMediaCodecThread();
|
| @@ -881,4 +922,8 @@ public class MediaCodecVideoEncoder {
|
| return false;
|
| }
|
| }
|
| +
|
| + /** Fills an inputBuffer with the given index with data from the byte buffers. */
|
| + private static native void nativeFillBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY,
|
| + int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV);
|
| }
|
|
|