Index: webrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java |
diff --git a/webrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java b/webrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java |
new file mode 100644 |
index 0000000000000000000000000000000000000000..4c38dc9b1e8e7d3fa7473c8a11b87ed300f03a78 |
--- /dev/null |
+++ b/webrtc/sdk/android/api/org/webrtc/VideoFrameDrawer.java |
@@ -0,0 +1,114 @@ |
+/* |
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
+ * |
+ * Use of this source code is governed by a BSD-style license |
+ * that can be found in the LICENSE file in the root of the source |
+ * tree. An additional intellectual property rights grant can be found |
+ * in the file PATENTS. All contributing project authors may |
+ * be found in the AUTHORS file in the root of the source tree. |
+ */ |
+ |
+package org.webrtc; |
+ |
+import android.graphics.Matrix; |
+import android.graphics.Point; |
+ |
+/** |
+ * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or |
+ * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation |
+ * taken into account. You can supply an additional render matrix for custom transformations. |
+ */ |
+public class VideoFrameDrawer { |
+ private static int distance(float x0, float y0, float x1, float y1) { |
+ return (int) Math.round(Math.hypot(x1 - x0, y1 - y0)); |
+ } |
+ |
+ // These points are used to calculate the size of the part of the frame we are rendering. |
+ final static float[] srcPoints = |
+ new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */}; |
+ private final float[] dstPoints = new float[6]; |
+ private final Point renderSize = new Point(); |
+ |
+ // Get the frame size after |renderMatrix| is applied. |
+ private Point getTransformedSize(int frameWidth, int frameHeight, Matrix renderMatrix) { |
+ if (renderMatrix == null) { |
+ renderSize.x = frameWidth; |
+ renderSize.y = frameHeight; |
+ return renderSize; |
+ } |
+ // Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|. |
+ renderMatrix.mapPoints(dstPoints, srcPoints); |
+ |
+ // Multiply with the width and height to get the positions in terms of pixels. |
+ for (int i = 0; i < 3; ++i) { |
+ dstPoints[i * 2 + 0] *= frameWidth; |
+ dstPoints[i * 2 + 1] *= frameHeight; |
+ } |
+ |
+ // Get the length of the sides of the transformed rectangle in terms of pixels. |
+ renderSize.x = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]); |
+ renderSize.y = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]); |
+ return renderSize; |
+ } |
+ |
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader(); |
+ // This variable will only be used for checking reference equality and is used for caching I420 |
+ // textures. |
+ private VideoFrame lastI420Frame; |
+ private final Matrix renderMatrix = new Matrix(); |
+ |
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) { |
+ drawFrame(frame, drawer, null /* additionalRenderMatrix */); |
+ } |
+ |
+ public void drawFrame( |
+ VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) { |
+ drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */, |
+ frame.getRotatedWidth(), frame.getRotatedHeight()); |
+ } |
+ |
+ public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, |
+ Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, |
+ int viewportHeight) { |
+ final int width = frame.getRotatedWidth(); |
+ final int height = frame.getRotatedHeight(); |
+ |
+ final Point renderSize = getTransformedSize(width, height, additionalRenderMatrix); |
+ |
+ final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer; |
+ renderMatrix.reset(); |
+ renderMatrix.preTranslate(0.5f, 0.5f); |
+ if (!isTextureFrame) { |
+ renderMatrix.preScale(1f, -1f); // I420-frames are upside down |
+ } |
+ renderMatrix.preRotate(frame.getRotation()); |
+ renderMatrix.preTranslate(-0.5f, -0.5f); |
+ if (additionalRenderMatrix != null) { |
+ renderMatrix.preConcat(additionalRenderMatrix); |
+ } |
+ |
+ if (isTextureFrame) { |
+ lastI420Frame = null; |
+ RendererCommon.drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, |
sakal
2017/09/11 09:05:58
Maybe we should move RendererCommon.drawTexture to
magjed_webrtc
2017/09/11 12:45:48
Done, I also moved YuvUploader.
|
+ renderSize.x, renderSize.y, viewportX, viewportY, viewportWidth, viewportHeight); |
+ } else { |
+ // Only upload the I420 data to textures once per frame, if we are called multiple times |
+ // with the same frame. |
+ if (frame != lastI420Frame) { |
+ lastI420Frame = frame; |
+ final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420(); |
+ yuvUploader.uploadFromBuffer(i420Buffer); |
+ i420Buffer.release(); |
+ } |
+ |
+ drawer.drawYuv(yuvUploader.getYuvTextures(), |
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderSize.x, |
+ renderSize.y, viewportX, viewportY, viewportWidth, viewportHeight); |
+ } |
+ } |
+ |
+ public void release() { |
+ yuvUploader.release(); |
+ lastI420Frame = null; |
+ } |
+} |