Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2014 Google Inc. | 3 * Copyright 2014 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 95 // Create an OpenGL ES 2.0 context. | 95 // Create an OpenGL ES 2.0 context. |
| 96 surface.setPreserveEGLContextOnPause(true); | 96 surface.setPreserveEGLContextOnPause(true); |
| 97 surface.setEGLContextClientVersion(2); | 97 surface.setEGLContextClientVersion(2); |
| 98 surface.setRenderer(this); | 98 surface.setRenderer(this); |
| 99 surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); | 99 surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); |
| 100 | 100 |
| 101 yuvImageRenderers = new ArrayList<YuvImageRenderer>(); | 101 yuvImageRenderers = new ArrayList<YuvImageRenderer>(); |
| 102 } | 102 } |
| 103 | 103 |
| 104 /** | 104 /** |
| 105 * Each scaling type has a one-to-one correspondence to a numeric minimum frac tion of the video | |
| 106 * that must remain visible. | |
| 107 */ | |
| 108 public static float convertScalingTypeToVisibleFraction(ScalingType scalingTyp e) { | |
|
AlexG
2015/08/05 00:47:11
I think these helpers need to me moved to a separa
magjed_webrtc
2015/08/07 17:14:50
Done.
| |
| 109 switch (scalingType) { | |
| 110 case SCALE_ASPECT_FIT: | |
| 111 return 1.0f; | |
| 112 case SCALE_ASPECT_FILL: | |
| 113 return 0.0f; | |
| 114 case SCALE_ASPECT_BALANCED: | |
| 115 return BALANCED_VISIBLE_FRACTION; | |
| 116 default: | |
| 117 throw new IllegalArgumentException(); | |
| 118 } | |
| 119 } | |
| 120 | |
| 121 /** | |
| 122 * Calculates display size based on minimum fraction of the video that must re main visible, | |
| 123 * video aspect ratio, and maximum display size. | |
| 124 */ | |
| 125 public static Point getDisplaySize(float minVisibleFraction, float videoAspect Ratio, | |
| 126 int maxDisplayWidth, int maxDisplayHeight) { | |
| 127 // If there is no constraint on the amount of cropping, fill the allowed dis play area. | |
| 128 if (minVisibleFraction == 0 || videoAspectRatio == 0) { | |
| 129 return new Point(maxDisplayWidth, maxDisplayHeight); | |
| 130 } | |
| 131 // Each dimension is constrained on max display size and how much we are all owed to crop. | |
| 132 final int width = Math.min(maxDisplayWidth, | |
| 133 (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio)); | |
| 134 final int height = Math.min(maxDisplayHeight, | |
| 135 (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio)); | |
| 136 return new Point(width, height); | |
| 137 } | |
| 138 | |
| 139 /** | |
| 140 * Calculates a texture transformation matrix based on rotation, mirror, and v ideo vs display | |
| 141 * aspect ratio. | |
| 142 */ | |
| 143 public static void getTextureMatrix(float[] outputTextureMatrix, float rotatio nDegree, | |
| 144 boolean mirror, float videoAspectRatio, float displayAspectRatio) { | |
| 145 // The matrix stack is using post-multiplication, which means that matrix op erations: | |
| 146 // A; B; C; will end up as A * B * C. When you apply this to a vertex, it wi ll result in: | |
| 147 // v' = A * B * C * v, i.e. the last matrix operation is the first thing tha t affects the | |
| 148 // vertex. This is the opposite of what you might expect. | |
| 149 Matrix.setIdentityM(outputTextureMatrix, 0); | |
| 150 // Move coordinates back to [0,1]x[0,1]. | |
| 151 Matrix.translateM(outputTextureMatrix, 0, 0.5f, 0.5f, 0.0f); | |
| 152 // Rotate frame clockwise in the XY-plane (around the Z-axis). | |
| 153 Matrix.rotateM(outputTextureMatrix, 0, -rotationDegree, 0, 0, 1); | |
| 154 // Scale one dimension until video and display size have same aspect ratio. | |
| 155 if (displayAspectRatio > videoAspectRatio) { | |
| 156 Matrix.scaleM(outputTextureMatrix, 0, 1, videoAspectRatio / displayAspectR atio, 1); | |
| 157 } else { | |
| 158 Matrix.scaleM(outputTextureMatrix, 0, displayAspectRatio / videoAspectRati o, 1, 1); | |
| 159 } | |
| 160 // TODO(magjed): We currently ignore the texture transform matrix from the S urfaceTexture. | |
| 161 // It contains a vertical flip that is hardcoded here instead. | |
| 162 Matrix.scaleM(outputTextureMatrix, 0, 1, -1, 1); | |
| 163 // Apply optional horizontal flip. | |
| 164 if (mirror) { | |
| 165 Matrix.scaleM(outputTextureMatrix, 0, -1, 1, 1); | |
| 166 } | |
| 167 // Center coordinates around origin. | |
| 168 Matrix.translateM(outputTextureMatrix, 0, -0.5f, -0.5f, 0.0f); | |
| 169 } | |
| 170 | |
| 171 /** | |
| 105 * Class used to display stream of YUV420 frames at particular location | 172 * Class used to display stream of YUV420 frames at particular location |
| 106 * on a screen. New video frames are sent to display using renderFrame() | 173 * on a screen. New video frames are sent to display using renderFrame() |
| 107 * call. | 174 * call. |
| 108 */ | 175 */ |
| 109 private static class YuvImageRenderer implements VideoRenderer.Callbacks { | 176 private static class YuvImageRenderer implements VideoRenderer.Callbacks { |
| 110 private GLSurfaceView surface; | 177 private GLSurfaceView surface; |
| 111 private int id; | 178 private int id; |
| 112 private int[] yuvTextures = { -1, -1, -1 }; | 179 private int[] yuvTextures = { -1, -1, -1 }; |
| 113 private int oesTexture = -1; | 180 private int oesTexture = -1; |
| 114 | 181 |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 192 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, | 259 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, |
| 193 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); | 260 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); |
| 194 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, | 261 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, |
| 195 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); | 262 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); |
| 196 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, | 263 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, |
| 197 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); | 264 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); |
| 198 } | 265 } |
| 199 GlUtil.checkNoGLES2Error("y/u/v glGenTextures"); | 266 GlUtil.checkNoGLES2Error("y/u/v glGenTextures"); |
| 200 } | 267 } |
| 201 | 268 |
| 202 private static float convertScalingTypeToVisibleFraction(ScalingType scaling Type) { | |
| 203 switch (scalingType) { | |
| 204 case SCALE_ASPECT_FIT: | |
| 205 return 1.0f; | |
| 206 case SCALE_ASPECT_FILL: | |
| 207 return 0.0f; | |
| 208 case SCALE_ASPECT_BALANCED: | |
| 209 return BALANCED_VISIBLE_FRACTION; | |
| 210 default: | |
| 211 throw new IllegalArgumentException(); | |
| 212 } | |
| 213 } | |
| 214 | |
| 215 private static Point getDisplaySize(float minVisibleFraction, float videoAsp ectRatio, | |
| 216 int maxDisplayWidth, int maxDisplayHeight) { | |
| 217 // If there is no constraint on the amount of cropping, fill the allowed d isplay area. | |
| 218 if (minVisibleFraction == 0) { | |
| 219 return new Point(maxDisplayWidth, maxDisplayHeight); | |
| 220 } | |
| 221 // Each dimension is constrained on max display size and how much we are a llowed to crop. | |
| 222 final int width = Math.min(maxDisplayWidth, | |
| 223 (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio)); | |
| 224 final int height = Math.min(maxDisplayHeight, | |
| 225 (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio)); | |
| 226 return new Point(width, height); | |
| 227 } | |
| 228 | |
| 229 private void checkAdjustTextureCoords() { | 269 private void checkAdjustTextureCoords() { |
| 230 synchronized(updateTextureLock) { | 270 synchronized(updateTextureLock) { |
| 231 if (!updateTextureProperties) { | 271 if (!updateTextureProperties) { |
| 232 return; | 272 return; |
| 233 } | 273 } |
| 234 // Initialize to maximum allowed area. Round to integer coordinates inwa rds the layout | 274 // Initialize to maximum allowed area. Round to integer coordinates inwa rds the layout |
| 235 // bounding box (ceil left/top and floor right/bottom) to not break cons traints. | 275 // bounding box (ceil left/top and floor right/bottom) to not break cons traints. |
| 236 displayLayout.set( | 276 displayLayout.set( |
| 237 (screenWidth * layoutInPercentage.left + 99) / 100, | 277 (screenWidth * layoutInPercentage.left + 99) / 100, |
| 238 (screenHeight * layoutInPercentage.top + 99) / 100, | 278 (screenHeight * layoutInPercentage.top + 99) / 100, |
| 239 (screenWidth * layoutInPercentage.right) / 100, | 279 (screenWidth * layoutInPercentage.right) / 100, |
| 240 (screenHeight * layoutInPercentage.bottom) / 100); | 280 (screenHeight * layoutInPercentage.bottom) / 100); |
| 241 Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: " | 281 Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: " |
| 242 + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth | 282 + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth |
| 243 + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror); | 283 + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror); |
| 244 final float videoAspectRatio = (rotationDegree % 180 == 0) | 284 final float videoAspectRatio = (rotationDegree % 180 == 0) |
| 245 ? (float) videoWidth / videoHeight | 285 ? (float) videoWidth / videoHeight |
| 246 : (float) videoHeight / videoWidth; | 286 : (float) videoHeight / videoWidth; |
| 247 // Adjust display size based on |scalingType|. | 287 // Adjust display size based on |scalingType|. |
| 248 final float minVisibleFraction = convertScalingTypeToVisibleFraction(sca lingType); | 288 final float minVisibleFraction = convertScalingTypeToVisibleFraction(sca lingType); |
| 249 final Point displaySize = getDisplaySize(minVisibleFraction, videoAspect Ratio, | 289 final Point displaySize = getDisplaySize(minVisibleFraction, videoAspect Ratio, |
| 250 displayLayout.width(), displayLayout.height()); | 290 displayLayout.width(), displayLayout.height()); |
| 251 displayLayout.inset((displayLayout.width() - displaySize.x) / 2, | 291 displayLayout.inset((displayLayout.width() - displaySize.x) / 2, |
| 252 (displayLayout.height() - displaySize.y) / 2); | 292 (displayLayout.height() - displaySize.y) / 2); |
| 253 Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " | 293 Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " |
| 254 + displayLayout.height()); | 294 + displayLayout.height()); |
| 255 // The matrix stack is using post-multiplication, which means that matri x operations: | 295 getTextureMatrix(texMatrix, rotationDegree, mirror, videoAspectRatio, |
| 256 // A; B; C; will end up as A * B * C. When you apply this to a vertex, i t will result in: | 296 (float) displayLayout.width() / displayLayout.height()); |
| 257 // v' = A * B * C * v, i.e. the last matrix operation is the first thing that affects the | |
| 258 // vertex. This is the opposite of what you might expect. | |
| 259 Matrix.setIdentityM(texMatrix, 0); | |
| 260 // Move coordinates back to [0,1]x[0,1]. | |
| 261 Matrix.translateM(texMatrix, 0, 0.5f, 0.5f, 0.0f); | |
| 262 // Rotate frame clockwise in the XY-plane (around the Z-axis). | |
| 263 Matrix.rotateM(texMatrix, 0, -rotationDegree, 0, 0, 1); | |
| 264 // Scale one dimension until video and display size have same aspect rat io. | |
| 265 final float displayAspectRatio = (float) displayLayout.width() / display Layout.height(); | |
| 266 if (displayAspectRatio > videoAspectRatio) { | |
| 267 Matrix.scaleM(texMatrix, 0, 1, videoAspectRatio / displayAspectRatio , 1); | |
| 268 } else { | |
| 269 Matrix.scaleM(texMatrix, 0, displayAspectRatio / videoAspectRatio, 1 , 1); | |
| 270 } | |
| 271 // TODO(magjed): We currently ignore the texture transform matrix from t he SurfaceTexture. | |
| 272 // It contains a vertical flip that is hardcoded here instead. | |
| 273 Matrix.scaleM(texMatrix, 0, 1, -1, 1); | |
| 274 // Apply optional horizontal flip. | |
| 275 if (mirror) { | |
| 276 Matrix.scaleM(texMatrix, 0, -1, 1, 1); | |
| 277 } | |
| 278 // Center coordinates around origin. | |
| 279 Matrix.translateM(texMatrix, 0, -0.5f, -0.5f, 0.0f); | |
| 280 updateTextureProperties = false; | 297 updateTextureProperties = false; |
| 281 Log.d(TAG, " AdjustTextureCoords done"); | 298 Log.d(TAG, " AdjustTextureCoords done"); |
| 282 } | 299 } |
| 283 } | 300 } |
| 284 | 301 |
| 285 private void draw(GlRectDrawer drawer) { | 302 private void draw(GlRectDrawer drawer) { |
| 286 if (!seenFrame) { | 303 if (!seenFrame) { |
| 287 // No frame received yet - nothing to render. | 304 // No frame received yet - nothing to render. |
| 288 return; | 305 return; |
| 289 } | 306 } |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 408 | 425 |
| 409 this.videoWidth = videoWidth; | 426 this.videoWidth = videoWidth; |
| 410 this.videoHeight = videoHeight; | 427 this.videoHeight = videoHeight; |
| 411 rotationDegree = rotation; | 428 rotationDegree = rotation; |
| 412 int[] strides = { videoWidth, videoWidth / 2, videoWidth / 2 }; | 429 int[] strides = { videoWidth, videoWidth / 2, videoWidth / 2 }; |
| 413 | 430 |
| 414 // Clear rendering queue. | 431 // Clear rendering queue. |
| 415 frameToRenderQueue.poll(); | 432 frameToRenderQueue.poll(); |
| 416 // Re-allocate / allocate the frame. | 433 // Re-allocate / allocate the frame. |
| 417 yuvFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree , | 434 yuvFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDegree , |
| 418 strides, null); | 435 strides, null, 0); |
| 419 textureFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDe gree, | 436 textureFrameToRender = new I420Frame(videoWidth, videoHeight, rotationDe gree, |
| 420 null, -1); | 437 null, -1, 0); |
| 421 updateTextureProperties = true; | 438 updateTextureProperties = true; |
| 422 Log.d(TAG, " YuvImageRenderer.setSize done."); | 439 Log.d(TAG, " YuvImageRenderer.setSize done."); |
| 423 } | 440 } |
| 424 } | 441 } |
| 425 | 442 |
| 426 @Override | 443 @Override |
| 427 public synchronized void renderFrame(I420Frame frame) { | 444 public synchronized void renderFrame(I420Frame frame) { |
| 428 setSize(frame.width, frame.height, frame.rotationDegree); | 445 setSize(frame.width, frame.height, frame.rotationDegree); |
| 429 long now = System.nanoTime(); | 446 long now = System.nanoTime(); |
| 430 framesReceived++; | 447 framesReceived++; |
| 431 // Skip rendering of this frame if setSize() was not called. | 448 // Skip rendering of this frame if setSize() was not called. |
| 432 if (yuvFrameToRender == null || textureFrameToRender == null) { | 449 if (yuvFrameToRender == null || textureFrameToRender == null) { |
| 433 framesDropped++; | 450 framesDropped++; |
| 451 VideoRenderer.renderFrameDone(frame); | |
| 434 return; | 452 return; |
| 435 } | 453 } |
| 436 // Check input frame parameters. | 454 // Check input frame parameters. |
| 437 if (frame.yuvFrame) { | 455 if (frame.yuvFrame) { |
| 438 if (frame.yuvStrides[0] < frame.width || | 456 if (frame.yuvStrides[0] < frame.width || |
| 439 frame.yuvStrides[1] < frame.width / 2 || | 457 frame.yuvStrides[1] < frame.width / 2 || |
| 440 frame.yuvStrides[2] < frame.width / 2) { | 458 frame.yuvStrides[2] < frame.width / 2) { |
| 441 Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + | 459 Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + |
| 442 frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); | 460 frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); |
| 461 VideoRenderer.renderFrameDone(frame); | |
| 443 return; | 462 return; |
| 444 } | 463 } |
| 445 // Check incoming frame dimensions. | 464 // Check incoming frame dimensions. |
| 446 if (frame.width != yuvFrameToRender.width || | 465 if (frame.width != yuvFrameToRender.width || |
| 447 frame.height != yuvFrameToRender.height) { | 466 frame.height != yuvFrameToRender.height) { |
| 448 throw new RuntimeException("Wrong frame size " + | 467 throw new RuntimeException("Wrong frame size " + |
| 449 frame.width + " x " + frame.height); | 468 frame.width + " x " + frame.height); |
| 450 } | 469 } |
| 451 } | 470 } |
| 452 | 471 |
| 453 if (frameToRenderQueue.size() > 0) { | 472 if (frameToRenderQueue.size() > 0) { |
| 454 // Skip rendering of this frame if previous frame was not rendered yet. | 473 // Skip rendering of this frame if previous frame was not rendered yet. |
| 455 framesDropped++; | 474 framesDropped++; |
| 475 VideoRenderer.renderFrameDone(frame); | |
| 456 return; | 476 return; |
| 457 } | 477 } |
| 458 | 478 |
| 459 // Create a local copy of the frame. | 479 // Create a local copy of the frame. |
| 460 if (frame.yuvFrame) { | 480 if (frame.yuvFrame) { |
| 461 yuvFrameToRender.copyFrom(frame); | 481 yuvFrameToRender.copyFrom(frame); |
| 462 rendererType = RendererType.RENDERER_YUV; | 482 rendererType = RendererType.RENDERER_YUV; |
| 463 frameToRenderQueue.offer(yuvFrameToRender); | 483 frameToRenderQueue.offer(yuvFrameToRender); |
| 464 } else { | 484 } else { |
| 465 textureFrameToRender.copyFrom(frame); | 485 textureFrameToRender.copyFrom(frame); |
| 466 rendererType = RendererType.RENDERER_TEXTURE; | 486 rendererType = RendererType.RENDERER_TEXTURE; |
| 467 frameToRenderQueue.offer(textureFrameToRender); | 487 frameToRenderQueue.offer(textureFrameToRender); |
| 468 } | 488 } |
| 469 copyTimeNs += (System.nanoTime() - now); | 489 copyTimeNs += (System.nanoTime() - now); |
| 470 seenFrame = true; | 490 seenFrame = true; |
| 491 VideoRenderer.renderFrameDone(frame); | |
| 471 | 492 |
| 472 // Request rendering. | 493 // Request rendering. |
| 473 surface.requestRender(); | 494 surface.requestRender(); |
| 474 } | 495 } |
| 475 | 496 |
| 476 // TODO(guoweis): Remove this once chrome code base is updated. | 497 // TODO(guoweis): Remove this once chrome code base is updated. |
| 477 @Override | 498 @Override |
| 478 public boolean canApplyRotation() { | 499 public boolean canApplyRotation() { |
| 479 return true; | 500 return true; |
| 480 } | 501 } |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 634 GLES20.glViewport(0, 0, screenWidth, screenHeight); | 655 GLES20.glViewport(0, 0, screenWidth, screenHeight); |
| 635 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); | 656 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); |
| 636 synchronized (yuvImageRenderers) { | 657 synchronized (yuvImageRenderers) { |
| 637 for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { | 658 for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { |
| 638 yuvImageRenderer.draw(drawer); | 659 yuvImageRenderer.draw(drawer); |
| 639 } | 660 } |
| 640 } | 661 } |
| 641 } | 662 } |
| 642 | 663 |
| 643 } | 664 } |
| OLD | NEW |