| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #import "RTCShader.h" | 11 #import "RTCI420TextureCache.h" |
| 12 |
| 13 #import "RTCShader+Private.h" |
| 12 | 14 |
| 13 #include <vector> | 15 #include <vector> |
| 14 | 16 |
| 15 #import "RTCShader+Private.h" | 17 // Two sets of 3 textures are used here, one for each of the Y, U and V planes.
Having two sets |
| 16 #import "WebRTC/RTCLogging.h" | 18 // alleviates CPU blockage in the event that the GPU is asked to render to a tex
ture that is already |
| 17 #import "WebRTC/RTCVideoFrame.h" | 19 // in use. |
| 18 | |
| 19 #include "webrtc/base/optional.h" | |
| 20 | |
| 21 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets | |
| 22 // of 3 textures are used here, one for each of the Y, U and V planes. Having | |
| 23 // two sets alleviates CPU blockage in the event that the GPU is asked to render | |
| 24 // to a texture that is already in use. | |
| 25 static const GLsizei kNumTextureSets = 2; | 20 static const GLsizei kNumTextureSets = 2; |
| 26 static const GLsizei kNumTexturesPerSet = 3; | 21 static const GLsizei kNumTexturesPerSet = 3; |
| 27 static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; | 22 static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; |
| 28 | 23 |
| 29 // Fragment shader converts YUV values from input textures into a final RGB | 24 @implementation RTCI420TextureCache { |
| 30 // pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php. | |
| 31 static const char kI420FragmentShaderSource[] = | |
| 32 SHADER_VERSION | |
| 33 "precision highp float;" | |
| 34 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" | |
| 35 "uniform lowp sampler2D s_textureY;\n" | |
| 36 "uniform lowp sampler2D s_textureU;\n" | |
| 37 "uniform lowp sampler2D s_textureV;\n" | |
| 38 FRAGMENT_SHADER_OUT | |
| 39 "void main() {\n" | |
| 40 " float y, u, v, r, g, b;\n" | |
| 41 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" | |
| 42 " u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n" | |
| 43 " v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n" | |
| 44 " u = u - 0.5;\n" | |
| 45 " v = v - 0.5;\n" | |
| 46 " r = y + 1.403 * v;\n" | |
| 47 " g = y - 0.344 * u - 0.714 * v;\n" | |
| 48 " b = y + 1.770 * u;\n" | |
| 49 " " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n" | |
| 50 " }\n"; | |
| 51 | |
| 52 @implementation RTCI420Shader { | |
| 53 BOOL _hasUnpackRowLength; | 25 BOOL _hasUnpackRowLength; |
| 54 GLint _currentTextureSet; | 26 GLint _currentTextureSet; |
| 55 // Handles for OpenGL constructs. | 27 // Handles for OpenGL constructs. |
| 56 GLuint _textures[kNumTextures]; | 28 GLuint _textures[kNumTextures]; |
| 57 GLuint _i420Program; | 29 // Used to create a non-padded plane for GPU upload when we receive padded fra
mes. |
| 58 GLuint _vertexArray; | |
| 59 GLuint _vertexBuffer; | |
| 60 GLint _ySampler; | |
| 61 GLint _uSampler; | |
| 62 GLint _vSampler; | |
| 63 // Store current rotation and only upload new vertex data when rotation | |
| 64 // changes. | |
| 65 rtc::Optional<RTCVideoRotation> _currentRotation; | |
| 66 // Used to create a non-padded plane for GPU upload when we receive padded | |
| 67 // frames. | |
| 68 std::vector<uint8_t> _planeBuffer; | 30 std::vector<uint8_t> _planeBuffer; |
| 69 } | 31 } |
| 70 | 32 |
| 33 - (GLuint)yTexture { |
| 34 return _textures[_currentTextureSet * kNumTexturesPerSet]; |
| 35 } |
| 36 |
| 37 - (GLuint)uTexture { |
| 38 return _textures[_currentTextureSet * kNumTexturesPerSet + 1]; |
| 39 } |
| 40 |
| 41 - (GLuint)vTexture { |
| 42 return _textures[_currentTextureSet * kNumTexturesPerSet + 2]; |
| 43 } |
| 44 |
| 71 - (instancetype)initWithContext:(GlContextType *)context { | 45 - (instancetype)initWithContext:(GlContextType *)context { |
| 72 if (self = [super init]) { | 46 if (self = [super init]) { |
| 73 #if TARGET_OS_IPHONE | 47 #if TARGET_OS_IPHONE |
| 74 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); | 48 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); |
| 75 #else | 49 #else |
| 76 _hasUnpackRowLength = YES; | 50 _hasUnpackRowLength = YES; |
| 77 #endif | 51 #endif |
| 78 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); | 52 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); |
| 79 if (![self setupI420Program] || ![self setupTextures] || | 53 |
| 80 !RTCSetupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)
) { | 54 [self setupTextures]; |
| 81 RTCLog(@"Failed to initialize RTCI420Shader."); | |
| 82 self = nil; | |
| 83 } | |
| 84 } | 55 } |
| 85 return self; | 56 return self; |
| 86 } | 57 } |
| 87 | 58 |
| 88 - (void)dealloc { | 59 - (void)dealloc { |
| 89 glDeleteProgram(_i420Program); | |
| 90 glDeleteTextures(kNumTextures, _textures); | 60 glDeleteTextures(kNumTextures, _textures); |
| 91 glDeleteBuffers(1, &_vertexBuffer); | |
| 92 glDeleteVertexArrays(1, &_vertexArray); | |
| 93 } | 61 } |
| 94 | 62 |
| 95 - (BOOL)setupI420Program { | 63 - (void)setupTextures { |
| 96 _i420Program = RTCCreateProgramFromFragmentSource(kI420FragmentShaderSource); | |
| 97 if (!_i420Program) { | |
| 98 return NO; | |
| 99 } | |
| 100 _ySampler = glGetUniformLocation(_i420Program, "s_textureY"); | |
| 101 _uSampler = glGetUniformLocation(_i420Program, "s_textureU"); | |
| 102 _vSampler = glGetUniformLocation(_i420Program, "s_textureV"); | |
| 103 | |
| 104 return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0); | |
| 105 } | |
| 106 | |
| 107 - (BOOL)setupTextures { | |
| 108 glGenTextures(kNumTextures, _textures); | 64 glGenTextures(kNumTextures, _textures); |
| 109 // Set parameters for each of the textures we created. | 65 // Set parameters for each of the textures we created. |
| 110 for (GLsizei i = 0; i < kNumTextures; i++) { | 66 for (GLsizei i = 0; i < kNumTextures; i++) { |
| 111 glBindTexture(GL_TEXTURE_2D, _textures[i]); | 67 glBindTexture(GL_TEXTURE_2D, _textures[i]); |
| 112 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 68 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| 113 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | 69 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
| 114 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | 70 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| 115 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | 71 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| 116 } | 72 } |
| 117 return YES; | |
| 118 } | |
| 119 | |
| 120 - (BOOL)drawFrame:(RTCVideoFrame*)frame { | |
| 121 glUseProgram(_i420Program); | |
| 122 if (![self updateTextureDataForFrame:frame]) { | |
| 123 return NO; | |
| 124 } | |
| 125 #if !TARGET_OS_IPHONE | |
| 126 glBindVertexArray(_vertexArray); | |
| 127 #endif | |
| 128 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | |
| 129 if (!_currentRotation || frame.rotation != *_currentRotation) { | |
| 130 _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); | |
| 131 RTCSetVertexData(*_currentRotation); | |
| 132 } | |
| 133 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | |
| 134 | |
| 135 return YES; | |
| 136 } | 73 } |
| 137 | 74 |
| 138 - (void)uploadPlane:(const uint8_t *)plane | 75 - (void)uploadPlane:(const uint8_t *)plane |
| 139 sampler:(GLint)sampler | 76 texture:(GLuint)texture |
| 140 offset:(GLint)offset | |
| 141 width:(size_t)width | 77 width:(size_t)width |
| 142 height:(size_t)height | 78 height:(size_t)height |
| 143 stride:(int32_t)stride { | 79 stride:(int32_t)stride { |
| 144 glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset)); | 80 glBindTexture(GL_TEXTURE_2D, texture); |
| 145 glBindTexture(GL_TEXTURE_2D, _textures[offset]); | |
| 146 | 81 |
| 147 // When setting texture sampler uniforms, the texture index is used not | |
| 148 // the texture handle. | |
| 149 glUniform1i(sampler, offset); | |
| 150 const uint8_t *uploadPlane = plane; | 82 const uint8_t *uploadPlane = plane; |
| 151 if ((size_t)stride != width) { | 83 if ((size_t)stride != width) { |
| 152 if (_hasUnpackRowLength) { | 84 if (_hasUnpackRowLength) { |
| 153 // GLES3 allows us to specify stride. | 85 // GLES3 allows us to specify stride. |
| 154 glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); | 86 glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); |
| 155 glTexImage2D(GL_TEXTURE_2D, | 87 glTexImage2D(GL_TEXTURE_2D, |
| 156 0, | 88 0, |
| 157 RTC_PIXEL_FORMAT, | 89 RTC_PIXEL_FORMAT, |
| 158 static_cast<GLsizei>(width), | 90 static_cast<GLsizei>(width), |
| 159 static_cast<GLsizei>(height), | 91 static_cast<GLsizei>(height), |
| (...skipping 17 matching lines...) Expand all Loading... |
| 177 0, | 109 0, |
| 178 RTC_PIXEL_FORMAT, | 110 RTC_PIXEL_FORMAT, |
| 179 static_cast<GLsizei>(width), | 111 static_cast<GLsizei>(width), |
| 180 static_cast<GLsizei>(height), | 112 static_cast<GLsizei>(height), |
| 181 0, | 113 0, |
| 182 RTC_PIXEL_FORMAT, | 114 RTC_PIXEL_FORMAT, |
| 183 GL_UNSIGNED_BYTE, | 115 GL_UNSIGNED_BYTE, |
| 184 uploadPlane); | 116 uploadPlane); |
| 185 } | 117 } |
| 186 | 118 |
| 187 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { | 119 - (void)uploadFrameToTextures:(RTCVideoFrame *)frame { |
| 188 GLint textureOffset = _currentTextureSet * 3; | 120 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; |
| 189 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); | |
| 190 | 121 |
| 191 const int chromaWidth = (frame.width + 1) / 2; | 122 const int chromaWidth = (frame.width + 1) / 2; |
| 192 const int chromaHeight = (frame.height + 1) / 2; | 123 const int chromaHeight = (frame.height + 1) / 2; |
| 193 if (frame.strideY != frame.width || | 124 if (frame.strideY != frame.width || |
| 194 frame.strideU != chromaWidth || | 125 frame.strideU != chromaWidth || |
| 195 frame.strideV != chromaWidth) { | 126 frame.strideV != chromaWidth) { |
| 196 _planeBuffer.resize(frame.width * frame.height); | 127 _planeBuffer.resize(frame.width * frame.height); |
| 197 } | 128 } |
| 198 | 129 |
| 199 [self uploadPlane:frame.dataY | 130 [self uploadPlane:frame.dataY |
| 200 sampler:_ySampler | 131 texture:self.yTexture |
| 201 offset:textureOffset | |
| 202 width:frame.width | 132 width:frame.width |
| 203 height:frame.height | 133 height:frame.height |
| 204 stride:frame.strideY]; | 134 stride:frame.strideY]; |
| 205 | 135 |
| 206 [self uploadPlane:frame.dataU | 136 [self uploadPlane:frame.dataU |
| 207 sampler:_uSampler | 137 texture:self.uTexture |
| 208 offset:textureOffset + 1 | |
| 209 width:chromaWidth | 138 width:chromaWidth |
| 210 height:chromaHeight | 139 height:chromaHeight |
| 211 stride:frame.strideU]; | 140 stride:frame.strideU]; |
| 212 | 141 |
| 213 [self uploadPlane:frame.dataV | 142 [self uploadPlane:frame.dataV |
| 214 sampler:_vSampler | 143 texture:self.vTexture |
| 215 offset:textureOffset + 2 | |
| 216 width:chromaWidth | 144 width:chromaWidth |
| 217 height:chromaHeight | 145 height:chromaHeight |
| 218 stride:frame.strideV]; | 146 stride:frame.strideV]; |
| 219 | |
| 220 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; | |
| 221 return YES; | |
| 222 } | 147 } |
| 223 | 148 |
| 224 @end | 149 @end |
| OLD | NEW |