| Index: talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
|
| diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
|
| index 7e85b108b3bce9aca6752af779ec64a266145972..0c910f14c7b4a51456d4ed98cd8ec53b2e3fa570 100644
|
| --- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
|
| +++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
|
| @@ -27,9 +27,6 @@
|
|
|
| package org.webrtc;
|
|
|
| -import java.nio.ByteBuffer;
|
| -import java.nio.ByteOrder;
|
| -import java.nio.FloatBuffer;
|
| import java.util.ArrayList;
|
| import java.util.concurrent.CountDownLatch;
|
| import java.util.concurrent.LinkedBlockingQueue;
|
| @@ -38,12 +35,14 @@ import javax.microedition.khronos.egl.EGLConfig;
|
| import javax.microedition.khronos.opengles.GL10;
|
|
|
| import android.annotation.SuppressLint;
|
| +import android.graphics.Point;
|
| +import android.graphics.Rect;
|
| import android.graphics.SurfaceTexture;
|
| import android.opengl.EGL14;
|
| import android.opengl.EGLContext;
|
| -import android.opengl.GLES11Ext;
|
| import android.opengl.GLES20;
|
| import android.opengl.GLSurfaceView;
|
| +import android.opengl.Matrix;
|
| import android.util.Log;
|
|
|
| import org.webrtc.VideoRenderer.I420Frame;
|
| @@ -70,65 +69,27 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| private int screenHeight;
|
| // List of yuv renderers.
|
| private ArrayList<YuvImageRenderer> yuvImageRenderers;
|
| - private GlShader yuvShader;
|
| - private GlShader oesShader;
|
| + private GlRectDrawer drawer;
|
| + // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
|
| + // This limits excessive cropping when adjusting display size.
|
| + private static float BALANCED_VISIBLE_FRACTION = 0.56f;
|
| // Types of video scaling:
|
| // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
|
| // maintaining the aspect ratio (black borders may be displayed).
|
| // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
|
| // maintaining the aspect ratio. Some portion of the video frame may be
|
| // clipped.
|
| - // SCALE_FILL - video frame is scaled to to fill the size of the view. Video
|
| - // aspect ratio is changed if necessary.
|
| + // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
|
| + // possible of the view while maintaining aspect ratio, under the constraint that at least
|
| + // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
|
| public static enum ScalingType
|
| - { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_FILL };
|
| + { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
|
| private static final int EGL14_SDK_VERSION =
|
| android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
|
| // Current SDK version.
|
| private static final int CURRENT_SDK_VERSION =
|
| android.os.Build.VERSION.SDK_INT;
|
|
|
| - private final String VERTEX_SHADER_STRING =
|
| - "varying vec2 interp_tc;\n" +
|
| - "attribute vec4 in_pos;\n" +
|
| - "attribute vec2 in_tc;\n" +
|
| - "\n" +
|
| - "void main() {\n" +
|
| - " gl_Position = in_pos;\n" +
|
| - " interp_tc = in_tc;\n" +
|
| - "}\n";
|
| -
|
| - private final String YUV_FRAGMENT_SHADER_STRING =
|
| - "precision mediump float;\n" +
|
| - "varying vec2 interp_tc;\n" +
|
| - "\n" +
|
| - "uniform sampler2D y_tex;\n" +
|
| - "uniform sampler2D u_tex;\n" +
|
| - "uniform sampler2D v_tex;\n" +
|
| - "\n" +
|
| - "void main() {\n" +
|
| - // CSC according to http://www.fourcc.org/fccyvrgb.php
|
| - " float y = texture2D(y_tex, interp_tc).r;\n" +
|
| - " float u = texture2D(u_tex, interp_tc).r - 0.5;\n" +
|
| - " float v = texture2D(v_tex, interp_tc).r - 0.5;\n" +
|
| - " gl_FragColor = vec4(y + 1.403 * v, " +
|
| - " y - 0.344 * u - 0.714 * v, " +
|
| - " y + 1.77 * u, 1);\n" +
|
| - "}\n";
|
| -
|
| -
|
| - private static final String OES_FRAGMENT_SHADER_STRING =
|
| - "#extension GL_OES_EGL_image_external : require\n" +
|
| - "precision mediump float;\n" +
|
| - "varying vec2 interp_tc;\n" +
|
| - "\n" +
|
| - "uniform samplerExternalOES oes_tex;\n" +
|
| - "\n" +
|
| - "void main() {\n" +
|
| - " gl_FragColor = texture2D(oes_tex, interp_tc);\n" +
|
| - "}\n";
|
| -
|
| -
|
| private VideoRendererGui(GLSurfaceView surface) {
|
| this.surface = surface;
|
| // Create an OpenGL ES 2.0 context.
|
| @@ -148,8 +109,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| private static class YuvImageRenderer implements VideoRenderer.Callbacks {
|
| private GLSurfaceView surface;
|
| private int id;
|
| - private GlShader yuvShader;
|
| - private GlShader oesShader;
|
| private int[] yuvTextures = { -1, -1, -1 };
|
| private int oesTexture = -1;
|
|
|
| @@ -182,14 +141,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| // Time in ns spent in renderFrame() function - including copying frame
|
| // data to rendering planes.
|
| private long copyTimeNs;
|
| - // Texture vertices.
|
| - private float texLeft;
|
| - private float texRight;
|
| - private float texTop;
|
| - private float texBottom;
|
| - private FloatBuffer textureVertices;
|
| - // Texture UV coordinates.
|
| - private FloatBuffer textureCoords;
|
| + // The allowed view area in percentage of screen size.
|
| + private final Rect layoutInPercentage;
|
| + // The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
|
| + // |layoutInPercentage|.
|
| + private final Rect displayLayout = new Rect();
|
| + // Cached texture transformation matrix, calculated from current layout parameters.
|
| + private final float[] texMatrix = new float[16];
|
| // Flag if texture vertices or coordinates update is needed.
|
| private boolean updateTextureProperties;
|
| // Texture properties update lock.
|
| @@ -205,23 +163,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| // it rendered up right.
|
| private int rotationDegree;
|
|
|
| - // Mapping array from original UV mapping to the rotated mapping. The number
|
| - // is the position where the original UV coordination should be mapped
|
| - // to. (0,1) is the top left coord. (2,3) is the bottom left. (4,5) is the
|
| - // top right. (6,7) is the bottom right.
|
| - private static int rotation_matrix[][] =
|
| - // 0 1 2 3 4 5 6 7 // arrays indices
|
| - { {4, 5, 0, 1, 6, 7, 2, 3}, // 90 degree (clockwise)
|
| - {6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
|
| - {2, 3, 6, 7, 0, 1, 4, 5} }; // 270 degree (clockwise)
|
| -
|
| - private static int mirror_matrix[][] =
|
| - // 0 1 2 3 4 5 6 7 // arrays indices
|
| - { {4, 1, 6, 3, 0, 5, 2, 7}, // 0 degree mirror - u swap
|
| - {0, 5, 2, 7, 4, 1, 6, 3}, // 90 degree mirror - v swap
|
| - {4, 1, 6, 3, 0, 5, 2, 7}, // 180 degree mirror - u swap
|
| - {0, 5, 2, 7, 4, 1, 6, 3} }; // 270 degree mirror - v swap
|
| -
|
| private YuvImageRenderer(
|
| GLSurfaceView surface, int id,
|
| int x, int y, int width, int height,
|
| @@ -232,40 +173,20 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| this.scalingType = scalingType;
|
| this.mirror = mirror;
|
| frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
|
| - // Create texture vertices.
|
| - texLeft = (x - 50) / 50.0f;
|
| - texTop = (50 - y) / 50.0f;
|
| - texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
|
| - texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
|
| - float textureVeticesFloat[] = new float[] {
|
| - texLeft, texTop,
|
| - texLeft, texBottom,
|
| - texRight, texTop,
|
| - texRight, texBottom
|
| - };
|
| - textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
|
| - // Create texture UV coordinates.
|
| - float textureCoordinatesFloat[] = new float[] {
|
| - 0, 0, 0, 1, 1, 0, 1, 1
|
| - };
|
| - textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
|
| + layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
|
| updateTextureProperties = false;
|
| rotationDegree = 0;
|
| }
|
|
|
| - private void createTextures(GlShader yuvShader, GlShader oesShader) {
|
| + private void createTextures() {
|
| Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
|
| Thread.currentThread().getId());
|
| - this.yuvShader = yuvShader;
|
| - this.oesShader = oesShader;
|
|
|
| // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
|
| GLES20.glGenTextures(3, yuvTextures, 0);
|
| for (int i = 0; i < 3; i++) {
|
| GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
| GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
| - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
|
| - 128, 128, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
|
| GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
| GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
| GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
| @@ -278,144 +199,99 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| GlUtil.checkNoGLES2Error("y/u/v glGenTextures");
|
| }
|
|
|
| + private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
|
| + switch (scalingType) {
|
| + case SCALE_ASPECT_FIT:
|
| + return 1.0f;
|
| + case SCALE_ASPECT_FILL:
|
| + return 0.0f;
|
| + case SCALE_ASPECT_BALANCED:
|
| + return BALANCED_VISIBLE_FRACTION;
|
| + default:
|
| + throw new IllegalArgumentException();
|
| + }
|
| + }
|
| +
|
| + private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
|
| + int maxDisplayWidth, int maxDisplayHeight) {
|
| + // If there is no constraint on the amount of cropping, fill the allowed display area.
|
| + if (minVisibleFraction == 0) {
|
| + return new Point(maxDisplayWidth, maxDisplayHeight);
|
| + }
|
| + // Each dimension is constrained on max display size and how much we are allowed to crop.
|
| + final int width = Math.min(maxDisplayWidth,
|
| + (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
| + final int height = Math.min(maxDisplayHeight,
|
| + (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
| + return new Point(width, height);
|
| + }
|
| +
|
| private void checkAdjustTextureCoords() {
|
| synchronized(updateTextureLock) {
|
| - if (!updateTextureProperties || scalingType == ScalingType.SCALE_FILL) {
|
| + if (!updateTextureProperties) {
|
| return;
|
| }
|
| - // Re - calculate texture vertices to preserve video aspect ratio.
|
| - float texRight = this.texRight;
|
| - float texLeft = this.texLeft;
|
| - float texTop = this.texTop;
|
| - float texBottom = this.texBottom;
|
| - float texOffsetU = 0;
|
| - float texOffsetV = 0;
|
| - float displayWidth = (texRight - texLeft) * screenWidth / 2;
|
| - float displayHeight = (texTop - texBottom) * screenHeight / 2;
|
| - Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Display: " + displayWidth +
|
| - " x " + displayHeight + ". Video: " + videoWidth +
|
| - " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
|
| - if (displayWidth > 1 && displayHeight > 1 &&
|
| - videoWidth > 1 && videoHeight > 1) {
|
| - float displayAspectRatio = displayWidth / displayHeight;
|
| - // videoAspectRatio should be the one after rotation applied.
|
| - float videoAspectRatio = 0;
|
| - if (rotationDegree == 90 || rotationDegree == 270) {
|
| - videoAspectRatio = (float)videoHeight / videoWidth;
|
| - } else {
|
| - videoAspectRatio = (float)videoWidth / videoHeight;
|
| - }
|
| - if (scalingType == ScalingType.SCALE_ASPECT_FIT) {
|
| - // Need to re-adjust vertices width or height to match video AR.
|
| - if (displayAspectRatio > videoAspectRatio) {
|
| - float deltaX = (displayWidth - videoAspectRatio * displayHeight) /
|
| - instance.screenWidth;
|
| - texRight -= deltaX;
|
| - texLeft += deltaX;
|
| - } else {
|
| - float deltaY = (displayHeight - displayWidth / videoAspectRatio) /
|
| - instance.screenHeight;
|
| - texTop -= deltaY;
|
| - texBottom += deltaY;
|
| - }
|
| - }
|
| - if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
| - // Need to re-adjust UV coordinates to match display AR.
|
| - boolean adjustU = true;
|
| - float ratio = 0;
|
| - if (displayAspectRatio > videoAspectRatio) {
|
| - ratio = (1.0f - videoAspectRatio / displayAspectRatio) /
|
| - 2.0f;
|
| - adjustU = (rotationDegree == 90 || rotationDegree == 270);
|
| - } else {
|
| - ratio = (1.0f - displayAspectRatio / videoAspectRatio) /
|
| - 2.0f;
|
| - adjustU = (rotationDegree == 0 || rotationDegree == 180);
|
| - }
|
| - if (adjustU) {
|
| - texOffsetU = ratio;
|
| - } else {
|
| - texOffsetV = ratio;
|
| - }
|
| - }
|
| - Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
|
| - ") - (" + texRight + "," + texTop + ")");
|
| - float textureVeticesFloat[] = new float[] {
|
| - texLeft, texTop,
|
| - texLeft, texBottom,
|
| - texRight, texTop,
|
| - texRight, texBottom
|
| - };
|
| - textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
|
| -
|
| - float uLeft = texOffsetU;
|
| - float uRight = 1.0f - texOffsetU;
|
| - float vTop = texOffsetV;
|
| - float vBottom = 1.0f - texOffsetV;
|
| - Log.d(TAG, " Texture UV: (" + uLeft + "," + vTop +
|
| - ") - (" + uRight + "," + vBottom + ")");
|
| - float textureCoordinatesFloat[] = new float[] {
|
| - uLeft, vTop, // top left
|
| - uLeft, vBottom, // bottom left
|
| - uRight, vTop, // top right
|
| - uRight, vBottom // bottom right
|
| - };
|
| -
|
| - // Rotation needs to be done before mirroring.
|
| - textureCoordinatesFloat = applyRotation(textureCoordinatesFloat,
|
| - rotationDegree);
|
| - textureCoordinatesFloat = applyMirror(textureCoordinatesFloat,
|
| - mirror);
|
| - textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
|
| + // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
|
| + // bounding box (ceil left/top and floor right/bottom) to not break constraints.
|
| + displayLayout.set(
|
| + (screenWidth * layoutInPercentage.left + 99) / 100,
|
| + (screenHeight * layoutInPercentage.top + 99) / 100,
|
| + (screenWidth * layoutInPercentage.right) / 100,
|
| + (screenHeight * layoutInPercentage.bottom) / 100);
|
| + Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
|
| + + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
|
| + + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
|
| + final float videoAspectRatio = (rotationDegree % 180 == 0)
|
| + ? (float) videoWidth / videoHeight
|
| + : (float) videoHeight / videoWidth;
|
| + // Adjust display size based on |scalingType|.
|
| + final float minVisibleFraction = convertScalingTypeToVisibleFraction(scalingType);
|
| + final Point displaySize = getDisplaySize(minVisibleFraction, videoAspectRatio,
|
| + displayLayout.width(), displayLayout.height());
|
| + displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
|
| + (displayLayout.height() - displaySize.y) / 2);
|
| + Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
|
| + + displayLayout.height());
|
| + // The matrix stack is using post-multiplication, which means that matrix operations:
|
| + // A; B; C; will end up as A * B * C. When you apply this to a vertex, it will result in:
|
| + // v' = A * B * C * v, i.e. the last matrix operation is the first thing that affects the
|
| + // vertex. This is the opposite of what you might expect.
|
| + Matrix.setIdentityM(texMatrix, 0);
|
| + // Move coordinates back to [0,1]x[0,1].
|
| + Matrix.translateM(texMatrix, 0, 0.5f, 0.5f, 0.0f);
|
| + // Rotate frame clockwise in the XY-plane (around the Z-axis).
|
| + Matrix.rotateM(texMatrix, 0, -rotationDegree, 0, 0, 1);
|
| + // Scale one dimension until video and display size have same aspect ratio.
|
| + final float displayAspectRatio = (float) displayLayout.width() / displayLayout.height();
|
| + if (displayAspectRatio > videoAspectRatio) {
|
| + Matrix.scaleM(texMatrix, 0, 1, videoAspectRatio / displayAspectRatio, 1);
|
| + } else {
|
| + Matrix.scaleM(texMatrix, 0, displayAspectRatio / videoAspectRatio, 1, 1);
|
| }
|
| + // TODO(magjed): We currently ignore the texture transform matrix from the SurfaceTexture.
|
| + // It contains a vertical flip that is hardcoded here instead.
|
| + Matrix.scaleM(texMatrix, 0, 1, -1, 1);
|
| + // Apply optional horizontal flip.
|
| + if (mirror) {
|
| + Matrix.scaleM(texMatrix, 0, -1, 1, 1);
|
| + }
|
| + // Center coordinates around origin.
|
| + Matrix.translateM(texMatrix, 0, -0.5f, -0.5f, 0.0f);
|
| updateTextureProperties = false;
|
| Log.d(TAG, " AdjustTextureCoords done");
|
| }
|
| }
|
|
|
| -
|
| - private float[] applyMirror(float textureCoordinatesFloat[],
|
| - boolean mirror) {
|
| - if (!mirror) {
|
| - return textureCoordinatesFloat;
|
| - }
|
| -
|
| - int index = rotationDegree / 90;
|
| - return applyMatrixOperation(textureCoordinatesFloat,
|
| - mirror_matrix[index]);
|
| - }
|
| -
|
| - private float[] applyRotation(float textureCoordinatesFloat[],
|
| - int rotationDegree) {
|
| - if (rotationDegree == 0) {
|
| - return textureCoordinatesFloat;
|
| - }
|
| -
|
| - int index = rotationDegree / 90 - 1;
|
| - return applyMatrixOperation(textureCoordinatesFloat,
|
| - rotation_matrix[index]);
|
| - }
|
| -
|
| - private float[] applyMatrixOperation(float textureCoordinatesFloat[],
|
| - int matrix_operation[]) {
|
| - float textureCoordinatesModifiedFloat[] =
|
| - new float[textureCoordinatesFloat.length];
|
| -
|
| - for(int i = 0; i < textureCoordinatesFloat.length; i++) {
|
| - textureCoordinatesModifiedFloat[matrix_operation[i]] =
|
| - textureCoordinatesFloat[i];
|
| - }
|
| - return textureCoordinatesModifiedFloat;
|
| - }
|
| -
|
| - private void draw() {
|
| + private void draw(GlRectDrawer drawer) {
|
| if (!seenFrame) {
|
| // No frame received yet - nothing to render.
|
| return;
|
| }
|
| long now = System.nanoTime();
|
|
|
| - GlShader currentShader;
|
| + // OpenGL defaults to lower left origin.
|
| + GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
|
| + displayLayout.width(), displayLayout.height());
|
|
|
| I420Frame frameFromQueue;
|
| synchronized (frameToRenderQueue) {
|
| @@ -428,33 +304,22 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| startTimeNs = now;
|
| }
|
|
|
| - if (rendererType == RendererType.RENDERER_YUV) {
|
| - // YUV textures rendering.
|
| - yuvShader.useProgram();
|
| - currentShader = yuvShader;
|
| -
|
| - for (int i = 0; i < 3; ++i) {
|
| - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
| - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
| - if (frameFromQueue != null) {
|
| - int w = (i == 0) ?
|
| - frameFromQueue.width : frameFromQueue.width / 2;
|
| - int h = (i == 0) ?
|
| - frameFromQueue.height : frameFromQueue.height / 2;
|
| + if (frameFromQueue != null) {
|
| + if (frameFromQueue.yuvFrame) {
|
| + // YUV textures rendering. Upload YUV data as textures.
|
| + for (int i = 0; i < 3; ++i) {
|
| + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
| + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
| + int w = (i == 0) ? frameFromQueue.width : frameFromQueue.width / 2;
|
| + int h = (i == 0) ? frameFromQueue.height : frameFromQueue.height / 2;
|
| GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
|
| w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
|
| frameFromQueue.yuvPlanes[i]);
|
| }
|
| - }
|
| - GLES20.glUniform1i(yuvShader.getUniformLocation("y_tex"), 0);
|
| - GLES20.glUniform1i(yuvShader.getUniformLocation("u_tex"), 1);
|
| - GLES20.glUniform1i(yuvShader.getUniformLocation("v_tex"), 2);
|
| - } else {
|
| - // External texture rendering.
|
| - oesShader.useProgram();
|
| - currentShader = oesShader;
|
| -
|
| - if (frameFromQueue != null) {
|
| + } else {
|
| + // External texture rendering. Copy texture id and update texture image to latest.
|
| + // TODO(magjed): We should not make an unmanaged copy of texture id. Also, this is not
|
| + // the best place to call updateTexImage.
|
| oesTexture = frameFromQueue.textureId;
|
| if (frameFromQueue.textureObject instanceof SurfaceTexture) {
|
| SurfaceTexture surfaceTexture =
|
| @@ -462,31 +327,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| surfaceTexture.updateTexImage();
|
| }
|
| }
|
| - GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
| - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTexture);
|
| - }
|
|
|
| - if (frameFromQueue != null) {
|
| frameToRenderQueue.poll();
|
| }
|
| }
|
|
|
| - int posLocation = currentShader.getAttribLocation("in_pos");
|
| - GLES20.glEnableVertexAttribArray(posLocation);
|
| - GLES20.glVertexAttribPointer(
|
| - posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
|
| -
|
| - int texLocation = currentShader.getAttribLocation("in_tc");
|
| - GLES20.glEnableVertexAttribArray(texLocation);
|
| - GLES20.glVertexAttribPointer(
|
| - texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
|
| -
|
| - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
| -
|
| - GLES20.glDisableVertexAttribArray(posLocation);
|
| - GLES20.glDisableVertexAttribArray(texLocation);
|
| -
|
| - GlUtil.checkNoGLES2Error("draw done");
|
| + if (rendererType == RendererType.RENDERER_YUV) {
|
| + drawer.drawYuv(videoWidth, videoHeight, yuvTextures, texMatrix);
|
| + } else {
|
| + drawer.drawOes(oesTexture, texMatrix);
|
| + }
|
|
|
| if (frameFromQueue != null) {
|
| framesRendered++;
|
| @@ -526,23 +376,17 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|
|
| public void setPosition(int x, int y, int width, int height,
|
| ScalingType scalingType, boolean mirror) {
|
| - float texLeft = (x - 50) / 50.0f;
|
| - float texTop = (50 - y) / 50.0f;
|
| - float texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
|
| - float texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
|
| + final Rect layoutInPercentage =
|
| + new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
|
| synchronized(updateTextureLock) {
|
| - if (texLeft == this.texLeft && texTop == this.texTop && texRight == this.texRight &&
|
| - texBottom == this.texBottom && scalingType == this.scalingType &&
|
| - mirror == this.mirror) {
|
| + if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
|
| + && mirror == this.mirror) {
|
| return;
|
| }
|
| Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
|
| ") " + width + " x " + height + ". Scaling: " + scalingType +
|
| ". Mirror: " + mirror);
|
| - this.texLeft = texLeft;
|
| - this.texTop = texTop;
|
| - this.texRight = texRight;
|
| - this.texBottom = texBottom;
|
| + this.layoutInPercentage.set(layoutInPercentage);
|
| this.scalingType = scalingType;
|
| this.mirror = mirror;
|
| updateTextureProperties = true;
|
| @@ -694,8 +538,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| final CountDownLatch countDownLatch = new CountDownLatch(1);
|
| instance.surface.queueEvent(new Runnable() {
|
| public void run() {
|
| - yuvImageRenderer.createTextures(
|
| - instance.yuvShader, instance.oesShader);
|
| + yuvImageRenderer.createTextures();
|
| yuvImageRenderer.setScreenSize(
|
| instance.screenWidth, instance.screenHeight);
|
| countDownLatch.countDown();
|
| @@ -754,14 +597,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
|
| }
|
|
|
| - // Create YUV and OES shaders.
|
| - yuvShader = new GlShader(VERTEX_SHADER_STRING, YUV_FRAGMENT_SHADER_STRING);
|
| - oesShader = new GlShader(VERTEX_SHADER_STRING, OES_FRAGMENT_SHADER_STRING);
|
| + // Create drawer for YUV/OES frames.
|
| + drawer = new GlRectDrawer();
|
|
|
| synchronized (yuvImageRenderers) {
|
| // Create textures for all images.
|
| for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
| - yuvImageRenderer.createTextures(yuvShader, oesShader);
|
| + yuvImageRenderer.createTextures();
|
| }
|
| onSurfaceCreatedCalled = true;
|
| }
|
| @@ -780,7 +622,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
| width + " x " + height + " ");
|
| screenWidth = width;
|
| screenHeight = height;
|
| - GLES20.glViewport(0, 0, width, height);
|
| synchronized (yuvImageRenderers) {
|
| for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
| yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
|
| @@ -790,10 +631,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|
|
| @Override
|
| public void onDrawFrame(GL10 unused) {
|
| + GLES20.glViewport(0, 0, screenWidth, screenHeight);
|
| GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
| synchronized (yuvImageRenderers) {
|
| for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
| - yuvImageRenderer.draw();
|
| + yuvImageRenderer.draw(drawer);
|
| }
|
| }
|
| }
|
|
|