OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #import "RTCShader.h" |
| 12 |
| 13 #include <vector> |
| 14 |
| 15 #import "RTCShader+Private.h" |
| 16 #import "WebRTC/RTCVideoFrame.h" |
| 17 |
| 18 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets |
| 19 // of 3 textures are used here, one for each of the Y, U and V planes. Having |
| 20 // two sets alleviates CPU blockage in the event that the GPU is asked to render |
| 21 // to a texture that is already in use. |
| 22 static const GLsizei kNumTextureSets = 2; |
| 23 static const GLsizei kNumTexturesPerSet = 3; |
| 24 static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; |
| 25 |
| 26 // Fragment shader converts YUV values from input textures into a final RGB |
| 27 // pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php. |
| 28 static const char kI420FragmentShaderSource[] = |
| 29 SHADER_VERSION |
| 30 "precision highp float;" |
| 31 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" |
| 32 "uniform lowp sampler2D s_textureY;\n" |
| 33 "uniform lowp sampler2D s_textureU;\n" |
| 34 "uniform lowp sampler2D s_textureV;\n" |
| 35 FRAGMENT_SHADER_OUT |
| 36 "void main() {\n" |
| 37 " float y, u, v, r, g, b;\n" |
| 38 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" |
| 39 " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n" |
| 40 " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n" |
| 41 " u = u - 0.5;\n" |
| 42 " v = v - 0.5;\n" |
| 43 " r = y + 1.403 * v;\n" |
| 44 " g = y - 0.344 * u - 0.714 * v;\n" |
| 45 " b = y + 1.770 * u;\n" |
| 46 " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n" |
| 47 " }\n"; |
| 48 |
| 49 @implementation RTCI420Shader { |
| 50 BOOL _hasUnpackRowLength; |
| 51 GLint _currentTextureSet; |
| 52 // Handles for OpenGL constructs. |
| 53 GLuint _textures[kNumTextures]; |
| 54 GLuint _i420Program; |
| 55 GLuint _vertexArray; |
| 56 GLuint _vertexBuffer; |
| 57 GLint _ySampler; |
| 58 GLint _uSampler; |
| 59 GLint _vSampler; |
| 60 // Used to create a non-padded plane for GPU upload when we receive padded |
| 61 // frames. |
| 62 std::vector<uint8_t> _planeBuffer; |
| 63 } |
| 64 |
| 65 - (instancetype)initWithContext:(GlContextType *)context { |
| 66 if (self = [super init]) { |
| 67 #if TARGET_OS_IPHONE |
| 68 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); |
| 69 #else |
| 70 _hasUnpackRowLength = YES; |
| 71 #endif |
| 72 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); |
| 73 if (![self setupI420Program] || ![self setupTextures] || |
| 74 !RTCSetupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)
) { |
| 75 self = nil; |
| 76 } |
| 77 } |
| 78 return self; |
| 79 } |
| 80 |
| 81 - (void)dealloc { |
| 82 glDeleteProgram(_i420Program); |
| 83 glDeleteTextures(kNumTextures, _textures); |
| 84 glDeleteBuffers(1, &_vertexBuffer); |
| 85 glDeleteVertexArrays(1, &_vertexArray); |
| 86 } |
| 87 |
| 88 - (BOOL)setupI420Program { |
| 89 _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource); |
| 90 if (!_i420Program) { |
| 91 return NO; |
| 92 } |
| 93 _ySampler = glGetUniformLocation(_i420Program, "s_textureY"); |
| 94 _uSampler = glGetUniformLocation(_i420Program, "s_textureU"); |
| 95 _vSampler = glGetUniformLocation(_i420Program, "s_textureV"); |
| 96 |
| 97 return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0); |
| 98 } |
| 99 |
| 100 - (BOOL)setupTextures { |
| 101 glGenTextures(kNumTextures, _textures); |
| 102 // Set parameters for each of the textures we created. |
| 103 for (GLsizei i = 0; i < kNumTextures; i++) { |
| 104 glBindTexture(GL_TEXTURE_2D, _textures[i]); |
| 105 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| 106 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
| 107 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| 108 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| 109 } |
| 110 return YES; |
| 111 } |
| 112 |
| 113 - (BOOL)drawFrame:(RTCVideoFrame*)frame { |
| 114 glUseProgram(_i420Program); |
| 115 if (![self updateTextureDataForFrame:frame]) { |
| 116 return NO; |
| 117 } |
| 118 #if !TARGET_OS_IPHONE |
| 119 glBindVertexArray(_vertexArray); |
| 120 #endif |
| 121 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); |
| 122 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); |
| 123 |
| 124 return YES; |
| 125 } |
| 126 |
| 127 - (void)uploadPlane:(const uint8_t *)plane |
| 128 sampler:(GLint)sampler |
| 129 offset:(GLint)offset |
| 130 width:(size_t)width |
| 131 height:(size_t)height |
| 132 stride:(int32_t)stride { |
| 133 glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset)); |
| 134 glBindTexture(GL_TEXTURE_2D, _textures[offset]); |
| 135 |
| 136 // When setting texture sampler uniforms, the texture index is used not |
| 137 // the texture handle. |
| 138 glUniform1i(sampler, offset); |
| 139 const uint8_t *uploadPlane = plane; |
| 140 if ((size_t)stride != width) { |
| 141 if (_hasUnpackRowLength) { |
| 142 // GLES3 allows us to specify stride. |
| 143 glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); |
| 144 glTexImage2D(GL_TEXTURE_2D, |
| 145 0, |
| 146 RTC_PIXEL_FORMAT, |
| 147 static_cast<GLsizei>(width), |
| 148 static_cast<GLsizei>(height), |
| 149 0, |
| 150 RTC_PIXEL_FORMAT, |
| 151 GL_UNSIGNED_BYTE, |
| 152 uploadPlane); |
| 153 glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); |
| 154 return; |
| 155 } else { |
| 156 // Make an unpadded copy and upload that instead. Quick profiling showed |
| 157 // that this is faster than uploading row by row using glTexSubImage2D. |
| 158 uint8_t *unpaddedPlane = _planeBuffer.data(); |
| 159 for (size_t y = 0; y < height; ++y) { |
| 160 memcpy(unpaddedPlane + y * width, plane + y * stride, width); |
| 161 } |
| 162 uploadPlane = unpaddedPlane; |
| 163 } |
| 164 } |
| 165 glTexImage2D(GL_TEXTURE_2D, |
| 166 0, |
| 167 RTC_PIXEL_FORMAT, |
| 168 static_cast<GLsizei>(width), |
| 169 static_cast<GLsizei>(height), |
| 170 0, |
| 171 RTC_PIXEL_FORMAT, |
| 172 GL_UNSIGNED_BYTE, |
| 173 uploadPlane); |
| 174 } |
| 175 |
| 176 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { |
| 177 GLint textureOffset = _currentTextureSet * 3; |
| 178 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); |
| 179 |
| 180 if (frame.yPitch != static_cast<int32_t>(frame.width) || |
| 181 frame.uPitch != static_cast<int32_t>(frame.chromaWidth) || |
| 182 frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) { |
| 183 _planeBuffer.resize(frame.width * frame.height); |
| 184 } |
| 185 |
| 186 [self uploadPlane:frame.yPlane |
| 187 sampler:_ySampler |
| 188 offset:textureOffset |
| 189 width:frame.width |
| 190 height:frame.height |
| 191 stride:frame.yPitch]; |
| 192 |
| 193 [self uploadPlane:frame.uPlane |
| 194 sampler:_uSampler |
| 195 offset:textureOffset + 1 |
| 196 width:frame.chromaWidth |
| 197 height:frame.chromaHeight |
| 198 stride:frame.uPitch]; |
| 199 |
| 200 [self uploadPlane:frame.vPlane |
| 201 sampler:_vSampler |
| 202 offset:textureOffset + 2 |
| 203 width:frame.chromaWidth |
| 204 height:frame.chromaHeight |
| 205 stride:frame.vPitch]; |
| 206 |
| 207 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; |
| 208 return YES; |
| 209 } |
| 210 |
| 211 @end |
OLD | NEW |