Index: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java |
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java |
index 0eac0913bad2f9de2c77a243e9b4ee3da7167375..055c1307ceb9a8ef7fef9d9b41fed8249a5a749a 100644 |
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java |
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java |
@@ -32,8 +32,10 @@ import android.media.MediaCodecInfo.CodecCapabilities; |
import android.media.MediaCodecInfo; |
import android.media.MediaCodecList; |
import android.media.MediaFormat; |
+import android.opengl.GLES20; |
import android.os.Build; |
import android.os.Bundle; |
+import android.view.Surface; |
import org.webrtc.Logging; |
@@ -42,6 +44,8 @@ import java.util.Arrays; |
import java.util.List; |
import java.util.concurrent.CountDownLatch; |
+import javax.microedition.khronos.egl.EGLContext; |
+ |
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder. |
// This class is an implementation detail of the Java PeerConnection API. |
public class MediaCodecVideoEncoder { |
@@ -70,6 +74,9 @@ public class MediaCodecVideoEncoder { |
private Thread mediaCodecThread; |
private MediaCodec mediaCodec; |
private ByteBuffer[] outputBuffers; |
+ private EglBase eglBase; |
+ private Surface inputSurface; |
+ private GlRectDrawer drawer; |
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; |
private static final String H264_MIME_TYPE = "video/avc"; |
// List of supported HW VP8 codecs. |
@@ -131,7 +138,7 @@ public class MediaCodecVideoEncoder { |
} |
private static EncoderProperties findHwEncoder( |
- String mime, String[] supportedHwCodecPrefixes) { |
+ String mime, String[] supportedHwCodecPrefixes, boolean useSurface) { |
// MediaCodec.setParameters is missing for JB and below, so bitrate |
// can not be adjusted dynamically. |
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
@@ -181,6 +188,18 @@ public class MediaCodecVideoEncoder { |
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); |
} |
+ if (useSurface) { |
+ for (int codecColorFormat : capabilities.colorFormats) { |
magjed_webrtc
2015/11/18 13:12:52
This looks like duplicated code. I suggest replaci
perkj_webrtc
2015/11/18 14:51:21
nice. yes
|
+ if (codecColorFormat == CodecCapabilities.COLOR_FormatSurface) { |
+ // Found supported HW encoder. |
+ Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + |
+ ". Using Surface as input"); |
+ return new EncoderProperties(name, codecColorFormat); |
+ } |
+ } |
+ return null; |
+ } |
+ |
// Check if codec supports either yuv420 or nv12. |
for (int supportedColorFormat : supportedColorList) { |
for (int codecColorFormat : capabilities.colorFormats) { |
@@ -197,11 +216,19 @@ public class MediaCodecVideoEncoder { |
} |
public static boolean isVp8HwSupported() { |
- return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null; |
+ return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, false) != null; |
} |
public static boolean isH264HwSupported() { |
- return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null; |
+ return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, false) != null; |
+ } |
+ |
+ public static boolean isVp8HwSupportedUsingTextures() { |
+ return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, true) != null; |
+ } |
+ |
+ public static boolean isH264HwSupportedUsingTextures() { |
+ return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, true) != null; |
} |
private void checkOnMediaCodecThread() { |
@@ -234,10 +261,12 @@ public class MediaCodecVideoEncoder { |
} |
} |
- // Returns false if the hardware encoder currently can't be used. |
- boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps) { |
+ boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps, |
+ EGLContext sharedContext) { |
+ final boolean useSurface = sharedContext != null; |
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + |
- ". @ " + kbps + " kbps. Fps: " + fps + "."); |
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + |
+ (useSurface ? "True" : "False")); |
magjed_webrtc
2015/11/18 13:12:52
Just print useSurface, you don't need to convert i
perkj_webrtc
2015/11/18 14:51:21
Done.
|
if (mediaCodecThread != null) { |
throw new RuntimeException("Forgot to release()?"); |
@@ -247,11 +276,11 @@ public class MediaCodecVideoEncoder { |
int keyFrameIntervalSec = 0; |
if (type == VideoCodecType.VIDEO_CODEC_VP8) { |
mime = VP8_MIME_TYPE; |
- properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes); |
+ properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, useSurface); |
keyFrameIntervalSec = 100; |
} else if (type == VideoCodecType.VIDEO_CODEC_H264) { |
mime = H264_MIME_TYPE; |
- properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes); |
+ properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, useSurface); |
keyFrameIntervalSec = 20; |
} |
if (properties == null) { |
@@ -279,6 +308,13 @@ public class MediaCodecVideoEncoder { |
mediaCodec.configure( |
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); |
+ if (useSurface) { |
+ eglBase = new EglBase(sharedContext, EglBase.ConfigType.RECORDABLE); |
+ // Create an input surface and keep a reference since we must release the surface when done. |
+ inputSurface = mediaCodec.createInputSurface(); |
+ eglBase.createSurface(inputSurface); |
+ drawer = new GlRectDrawer(); |
+ } |
mediaCodec.start(); |
outputBuffers = mediaCodec.getOutputBuffers(); |
Logging.d(TAG, "Output buffers: " + outputBuffers.length); |
@@ -321,6 +357,29 @@ public class MediaCodecVideoEncoder { |
} |
} |
+ boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix, |
+ long presentationTimestampUs) { |
+ checkOnMediaCodecThread(); |
+ try { |
+ if (isKeyframe) { |
+ Logging.d(TAG, "Sync frame request"); |
+ Bundle b = new Bundle(); |
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); |
+ mediaCodec.setParameters(b); |
+ } |
+ eglBase.makeCurrent(); |
+ drawer.drawOes(oesTextureId, transformationMatrix); |
+ // TODO(perkj): Do we have to call EGLExt.eglPresentationTimeANDROID ? |
+ // If not, remove |presentationTimestampUs|. |
+ eglBase.swapBuffers(); |
+ return true; |
+ } |
+ catch (RuntimeException e) { |
+ Logging.e(TAG, "encodeTexture failed", e); |
+ return false; |
+ } |
+ } |
+ |
void release() { |
Logging.d(TAG, "Java releaseEncoder"); |
checkOnMediaCodecThread(); |
@@ -356,6 +415,18 @@ public class MediaCodecVideoEncoder { |
mediaCodec = null; |
mediaCodecThread = null; |
+ if (drawer != null) { |
+ drawer.release(); |
+ drawer = null; |
+ } |
+ if (eglBase != null) { |
+ eglBase.release(); |
+ eglBase = null; |
+ } |
+ if (inputSurface != null) { |
+ inputSurface.release(); |
+ inputSurface = null; |
+ } |
runningInstance = null; |
Logging.d(TAG, "Java releaseEncoder done"); |
} |