OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include <GLES2/gl2.h> |
| 12 #include <GLES2/gl2ext.h> |
| 13 |
| 14 #include <stdio.h> |
| 15 #include <stdlib.h> |
| 16 |
| 17 #include "webrtc/modules/video_render/android/video_render_opengles20.h" |
| 18 |
| 19 //#define ANDROID_LOG |
| 20 |
| 21 #ifdef ANDROID_LOG |
| 22 #include <android/log.h> |
| 23 #include <stdio.h> |
| 24 |
| 25 #undef WEBRTC_TRACE |
| 26 #define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC
N*", __VA_ARGS__) |
| 27 #else |
| 28 #include "webrtc/system_wrappers/include/trace.h" |
| 29 #endif |
| 30 |
| 31 namespace webrtc { |
| 32 |
| 33 const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 }; |
| 34 |
| 35 const char VideoRenderOpenGles20::g_vertextShader[] = { |
| 36 "attribute vec4 aPosition;\n" |
| 37 "attribute vec2 aTextureCoord;\n" |
| 38 "varying vec2 vTextureCoord;\n" |
| 39 "void main() {\n" |
| 40 " gl_Position = aPosition;\n" |
| 41 " vTextureCoord = aTextureCoord;\n" |
| 42 "}\n" }; |
| 43 |
| 44 // The fragment shader. |
| 45 // Do YUV to RGB565 conversion. |
| 46 const char VideoRenderOpenGles20::g_fragmentShader[] = { |
| 47 "precision mediump float;\n" |
| 48 "uniform sampler2D Ytex;\n" |
| 49 "uniform sampler2D Utex,Vtex;\n" |
| 50 "varying vec2 vTextureCoord;\n" |
| 51 "void main(void) {\n" |
| 52 " float nx,ny,r,g,b,y,u,v;\n" |
| 53 " mediump vec4 txl,ux,vx;" |
| 54 " nx=vTextureCoord[0];\n" |
| 55 " ny=vTextureCoord[1];\n" |
| 56 " y=texture2D(Ytex,vec2(nx,ny)).r;\n" |
| 57 " u=texture2D(Utex,vec2(nx,ny)).r;\n" |
| 58 " v=texture2D(Vtex,vec2(nx,ny)).r;\n" |
| 59 |
| 60 //" y = v;\n"+ |
| 61 " y=1.1643*(y-0.0625);\n" |
| 62 " u=u-0.5;\n" |
| 63 " v=v-0.5;\n" |
| 64 |
| 65 " r=y+1.5958*v;\n" |
| 66 " g=y-0.39173*u-0.81290*v;\n" |
| 67 " b=y+2.017*u;\n" |
| 68 " gl_FragColor=vec4(r,g,b,1.0);\n" |
| 69 "}\n" }; |
| 70 |
| 71 VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) : |
| 72 _id(id), |
| 73 _textureWidth(-1), |
| 74 _textureHeight(-1) { |
| 75 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", |
| 76 __FUNCTION__, (int) _id); |
| 77 |
| 78 const GLfloat vertices[20] = { |
| 79 // X, Y, Z, U, V |
| 80 -1, -1, 0, 0, 1, // Bottom Left |
| 81 1, -1, 0, 1, 1, //Bottom Right |
| 82 1, 1, 0, 1, 0, //Top Right |
| 83 -1, 1, 0, 0, 0 }; //Top Left |
| 84 |
| 85 memcpy(_vertices, vertices, sizeof(_vertices)); |
| 86 } |
| 87 |
| 88 VideoRenderOpenGles20::~VideoRenderOpenGles20() { |
| 89 } |
| 90 |
| 91 int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) { |
| 92 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, |
| 93 "%s: width %d, height %d", __FUNCTION__, (int) width, |
| 94 (int) height); |
| 95 |
| 96 printGLString("Version", GL_VERSION); |
| 97 printGLString("Vendor", GL_VENDOR); |
| 98 printGLString("Renderer", GL_RENDERER); |
| 99 printGLString("Extensions", GL_EXTENSIONS); |
| 100 |
| 101 int maxTextureImageUnits[2]; |
| 102 int maxTextureSize[2]; |
| 103 glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits); |
| 104 glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize); |
| 105 |
| 106 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, |
| 107 "%s: number of textures %d, size %d", __FUNCTION__, |
| 108 (int) maxTextureImageUnits[0], (int) maxTextureSize[0]); |
| 109 |
| 110 _program = createProgram(g_vertextShader, g_fragmentShader); |
| 111 if (!_program) { |
| 112 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 113 "%s: Could not create program", __FUNCTION__); |
| 114 return -1; |
| 115 } |
| 116 |
| 117 int positionHandle = glGetAttribLocation(_program, "aPosition"); |
| 118 checkGlError("glGetAttribLocation aPosition"); |
| 119 if (positionHandle == -1) { |
| 120 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 121 "%s: Could not get aPosition handle", __FUNCTION__); |
| 122 return -1; |
| 123 } |
| 124 |
| 125 int textureHandle = glGetAttribLocation(_program, "aTextureCoord"); |
| 126 checkGlError("glGetAttribLocation aTextureCoord"); |
| 127 if (textureHandle == -1) { |
| 128 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 129 "%s: Could not get aTextureCoord handle", __FUNCTION__); |
| 130 return -1; |
| 131 } |
| 132 |
| 133 // set the vertices array in the shader |
| 134 // _vertices contains 4 vertices with 5 coordinates. |
| 135 // 3 for (xyz) for the vertices and 2 for the texture |
| 136 glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, |
| 137 5 * sizeof(GLfloat), _vertices); |
| 138 checkGlError("glVertexAttribPointer aPosition"); |
| 139 |
| 140 glEnableVertexAttribArray(positionHandle); |
| 141 checkGlError("glEnableVertexAttribArray positionHandle"); |
| 142 |
| 143 // set the texture coordinate array in the shader |
| 144 // _vertices contains 4 vertices with 5 coordinates. |
| 145 // 3 for (xyz) for the vertices and 2 for the texture |
| 146 glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 |
| 147 * sizeof(GLfloat), &_vertices[3]); |
| 148 checkGlError("glVertexAttribPointer maTextureHandle"); |
| 149 glEnableVertexAttribArray(textureHandle); |
| 150 checkGlError("glEnableVertexAttribArray textureHandle"); |
| 151 |
| 152 glUseProgram(_program); |
| 153 int i = glGetUniformLocation(_program, "Ytex"); |
| 154 checkGlError("glGetUniformLocation"); |
| 155 glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ |
| 156 checkGlError("glUniform1i Ytex"); |
| 157 |
| 158 i = glGetUniformLocation(_program, "Utex"); |
| 159 checkGlError("glGetUniformLocation Utex"); |
| 160 glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ |
| 161 checkGlError("glUniform1i Utex"); |
| 162 |
| 163 i = glGetUniformLocation(_program, "Vtex"); |
| 164 checkGlError("glGetUniformLocation"); |
| 165 glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ |
| 166 checkGlError("glUniform1i"); |
| 167 |
| 168 glViewport(0, 0, width, height); |
| 169 checkGlError("glViewport"); |
| 170 return 0; |
| 171 } |
| 172 |
| 173 // SetCoordinates |
| 174 // Sets the coordinates where the stream shall be rendered. |
| 175 // Values must be between 0 and 1. |
| 176 int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder, |
| 177 const float left, |
| 178 const float top, |
| 179 const float right, |
| 180 const float bottom) { |
| 181 if ((top > 1 || top < 0) || (right > 1 || right < 0) || |
| 182 (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) { |
| 183 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 184 "%s: Wrong coordinates", __FUNCTION__); |
| 185 return -1; |
| 186 } |
| 187 |
| 188 // X, Y, Z, U, V |
| 189 // -1, -1, 0, 0, 1, // Bottom Left |
| 190 // 1, -1, 0, 1, 1, //Bottom Right |
| 191 // 1, 1, 0, 1, 0, //Top Right |
| 192 // -1, 1, 0, 0, 0 //Top Left |
| 193 |
| 194 // Bottom Left |
| 195 _vertices[0] = (left * 2) - 1; |
| 196 _vertices[1] = -1 * (2 * bottom) + 1; |
| 197 _vertices[2] = zOrder; |
| 198 |
| 199 //Bottom Right |
| 200 _vertices[5] = (right * 2) - 1; |
| 201 _vertices[6] = -1 * (2 * bottom) + 1; |
| 202 _vertices[7] = zOrder; |
| 203 |
| 204 //Top Right |
| 205 _vertices[10] = (right * 2) - 1; |
| 206 _vertices[11] = -1 * (2 * top) + 1; |
| 207 _vertices[12] = zOrder; |
| 208 |
| 209 //Top Left |
| 210 _vertices[15] = (left * 2) - 1; |
| 211 _vertices[16] = -1 * (2 * top) + 1; |
| 212 _vertices[17] = zOrder; |
| 213 |
| 214 return 0; |
| 215 } |
| 216 |
| 217 int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) { |
| 218 if (frameToRender.IsZeroSize()) { |
| 219 return -1; |
| 220 } |
| 221 |
| 222 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", |
| 223 __FUNCTION__, (int) _id); |
| 224 |
| 225 glUseProgram(_program); |
| 226 checkGlError("glUseProgram"); |
| 227 |
| 228 if (_textureWidth != (GLsizei) frameToRender.width() || |
| 229 _textureHeight != (GLsizei) frameToRender.height()) { |
| 230 SetupTextures(frameToRender); |
| 231 } |
| 232 UpdateTextures(frameToRender); |
| 233 |
| 234 glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices); |
| 235 checkGlError("glDrawArrays"); |
| 236 |
| 237 return 0; |
| 238 } |
| 239 |
| 240 GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType, |
| 241 const char* pSource) { |
| 242 GLuint shader = glCreateShader(shaderType); |
| 243 if (shader) { |
| 244 glShaderSource(shader, 1, &pSource, NULL); |
| 245 glCompileShader(shader); |
| 246 GLint compiled = 0; |
| 247 glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); |
| 248 if (!compiled) { |
| 249 GLint infoLen = 0; |
| 250 glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); |
| 251 if (infoLen) { |
| 252 char* buf = (char*) malloc(infoLen); |
| 253 if (buf) { |
| 254 glGetShaderInfoLog(shader, infoLen, NULL, buf); |
| 255 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 256 "%s: Could not compile shader %d: %s", |
| 257 __FUNCTION__, shaderType, buf); |
| 258 free(buf); |
| 259 } |
| 260 glDeleteShader(shader); |
| 261 shader = 0; |
| 262 } |
| 263 } |
| 264 } |
| 265 return shader; |
| 266 } |
| 267 |
| 268 GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource, |
| 269 const char* pFragmentSource) { |
| 270 GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); |
| 271 if (!vertexShader) { |
| 272 return 0; |
| 273 } |
| 274 |
| 275 GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); |
| 276 if (!pixelShader) { |
| 277 return 0; |
| 278 } |
| 279 |
| 280 GLuint program = glCreateProgram(); |
| 281 if (program) { |
| 282 glAttachShader(program, vertexShader); |
| 283 checkGlError("glAttachShader"); |
| 284 glAttachShader(program, pixelShader); |
| 285 checkGlError("glAttachShader"); |
| 286 glLinkProgram(program); |
| 287 GLint linkStatus = GL_FALSE; |
| 288 glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); |
| 289 if (linkStatus != GL_TRUE) { |
| 290 GLint bufLength = 0; |
| 291 glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); |
| 292 if (bufLength) { |
| 293 char* buf = (char*) malloc(bufLength); |
| 294 if (buf) { |
| 295 glGetProgramInfoLog(program, bufLength, NULL, buf); |
| 296 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 297 "%s: Could not link program: %s", |
| 298 __FUNCTION__, buf); |
| 299 free(buf); |
| 300 } |
| 301 } |
| 302 glDeleteProgram(program); |
| 303 program = 0; |
| 304 } |
| 305 } |
| 306 return program; |
| 307 } |
| 308 |
| 309 void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) { |
| 310 const char *v = (const char *) glGetString(s); |
| 311 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n", |
| 312 name, v); |
| 313 } |
| 314 |
| 315 void VideoRenderOpenGles20::checkGlError(const char* op) { |
| 316 #ifdef ANDROID_LOG |
| 317 for (GLint error = glGetError(); error; error = glGetError()) { |
| 318 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 319 "after %s() glError (0x%x)\n", op, error); |
| 320 } |
| 321 #else |
| 322 return; |
| 323 #endif |
| 324 } |
| 325 |
| 326 static void InitializeTexture(int name, int id, int width, int height) { |
| 327 glActiveTexture(name); |
| 328 glBindTexture(GL_TEXTURE_2D, id); |
| 329 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); |
| 330 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
| 331 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| 332 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| 333 glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, |
| 334 GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); |
| 335 } |
| 336 |
| 337 void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { |
| 338 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, |
| 339 "%s: width %d, height %d", __FUNCTION__, |
| 340 frameToRender.width(), frameToRender.height()); |
| 341 |
| 342 const GLsizei width = frameToRender.width(); |
| 343 const GLsizei height = frameToRender.height(); |
| 344 |
| 345 glGenTextures(3, _textureIds); //Generate the Y, U and V texture |
| 346 InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height); |
| 347 InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2); |
| 348 InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2); |
| 349 |
| 350 checkGlError("SetupTextures"); |
| 351 |
| 352 _textureWidth = width; |
| 353 _textureHeight = height; |
| 354 } |
| 355 |
| 356 // Uploads a plane of pixel data, accounting for stride != width*bpp. |
| 357 static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride, |
| 358 const uint8_t* plane) { |
| 359 if (stride == width) { |
| 360 // Yay! We can upload the entire plane in a single GL call. |
| 361 glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, |
| 362 GL_UNSIGNED_BYTE, |
| 363 static_cast<const GLvoid*>(plane)); |
| 364 } else { |
| 365 // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't |
| 366 // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick. |
| 367 for (int row = 0; row < height; ++row) { |
| 368 glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE, |
| 369 GL_UNSIGNED_BYTE, |
| 370 static_cast<const GLvoid*>(plane + (row * stride))); |
| 371 } |
| 372 } |
| 373 } |
| 374 |
| 375 void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) { |
| 376 const GLsizei width = frameToRender.width(); |
| 377 const GLsizei height = frameToRender.height(); |
| 378 |
| 379 glActiveTexture(GL_TEXTURE0); |
| 380 glBindTexture(GL_TEXTURE_2D, _textureIds[0]); |
| 381 GlTexSubImage2D(width, height, frameToRender.stride(kYPlane), |
| 382 frameToRender.buffer(kYPlane)); |
| 383 |
| 384 glActiveTexture(GL_TEXTURE1); |
| 385 glBindTexture(GL_TEXTURE_2D, _textureIds[1]); |
| 386 GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane), |
| 387 frameToRender.buffer(kUPlane)); |
| 388 |
| 389 glActiveTexture(GL_TEXTURE2); |
| 390 glBindTexture(GL_TEXTURE_2D, _textureIds[2]); |
| 391 GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane), |
| 392 frameToRender.buffer(kVPlane)); |
| 393 |
| 394 checkGlError("UpdateTextures"); |
| 395 } |
| 396 |
| 397 } // namespace webrtc |
OLD | NEW |