OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #import "RTCOpenGLVideoRenderer.h" | 11 #import "RTCOpenGLVideoRenderer.h" |
12 | 12 |
13 #if TARGET_OS_IPHONE | 13 #if TARGET_OS_IPHONE |
14 #import <CoreVideo/CVOpenGLESTextureCache.h> | |
14 #import <OpenGLES/ES3/gl.h> | 15 #import <OpenGLES/ES3/gl.h> |
15 #else | 16 #else |
16 #import <OpenGL/gl3.h> | 17 #import <OpenGL/gl3.h> |
17 #endif | 18 #endif |
19 #include <memory> | |
18 #include <string.h> | 20 #include <string.h> |
19 #include <memory> | 21 #include <vector> |
20 | 22 |
21 #import "WebRTC/RTCVideoFrame.h" | 23 #import "WebRTC/RTCVideoFrame.h" |
22 | 24 |
25 #include "webrtc/base/checks.h" | |
26 #include "webrtc/base/logging.h" | |
23 | 27 |
24 // TODO(tkchin): check and log openGL errors. Methods here return BOOLs in | 28 // TODO(tkchin): check and log openGL errors. Methods here return BOOLs in |
25 // anticipation of that happening in the future. | 29 // anticipation of that happening in the future. |
26 | 30 |
27 #if TARGET_OS_IPHONE | 31 #if TARGET_OS_IPHONE |
28 #define RTC_PIXEL_FORMAT GL_LUMINANCE | 32 #define RTC_PIXEL_FORMAT GL_LUMINANCE |
29 #define SHADER_VERSION | 33 #define SHADER_VERSION |
30 #define VERTEX_SHADER_IN "attribute" | 34 #define VERTEX_SHADER_IN "attribute" |
31 #define VERTEX_SHADER_OUT "varying" | 35 #define VERTEX_SHADER_OUT "varying" |
32 #define FRAGMENT_SHADER_IN "varying" | 36 #define FRAGMENT_SHADER_IN "varying" |
33 #define FRAGMENT_SHADER_OUT | 37 #define FRAGMENT_SHADER_OUT |
34 #define FRAGMENT_SHADER_COLOR "gl_FragColor" | 38 #define FRAGMENT_SHADER_COLOR "gl_FragColor" |
35 #define FRAGMENT_SHADER_TEXTURE "texture2D" | 39 #define FRAGMENT_SHADER_TEXTURE "texture2D" |
40 | |
41 typedef EAGLContext GlContextType; | |
36 #else | 42 #else |
37 #define RTC_PIXEL_FORMAT GL_RED | 43 #define RTC_PIXEL_FORMAT GL_RED |
38 #define SHADER_VERSION "#version 150\n" | 44 #define SHADER_VERSION "#version 150\n" |
39 #define VERTEX_SHADER_IN "in" | 45 #define VERTEX_SHADER_IN "in" |
40 #define VERTEX_SHADER_OUT "out" | 46 #define VERTEX_SHADER_OUT "out" |
41 #define FRAGMENT_SHADER_IN "in" | 47 #define FRAGMENT_SHADER_IN "in" |
42 #define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n" | 48 #define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n" |
43 #define FRAGMENT_SHADER_COLOR "fragColor" | 49 #define FRAGMENT_SHADER_COLOR "fragColor" |
44 #define FRAGMENT_SHADER_TEXTURE "texture" | 50 #define FRAGMENT_SHADER_TEXTURE "texture" |
51 | |
52 typedef NSOpenGLContext GlContextType; | |
45 #endif | 53 #endif |
46 | 54 |
55 @protocol Shader | |
tkchin_webrtc
2016/08/02 00:17:18
nit: everything in ObjC must be prefixed withthe
| |
56 - (BOOL)drawFrame:(RTCVideoFrame*)frame; | |
tkchin_webrtc
2016/08/02 00:17:18
nit: (RTCVideoFrame *)
| |
57 @end | |
58 | |
47 // Vertex shader doesn't do anything except pass coordinates through. | 59 // Vertex shader doesn't do anything except pass coordinates through. |
48 static const char kVertexShaderSource[] = | 60 static const char kVertexShaderSource[] = |
49 SHADER_VERSION | 61 SHADER_VERSION |
50 VERTEX_SHADER_IN " vec2 position;\n" | 62 VERTEX_SHADER_IN " vec2 position;\n" |
51 VERTEX_SHADER_IN " vec2 texcoord;\n" | 63 VERTEX_SHADER_IN " vec2 texcoord;\n" |
52 VERTEX_SHADER_OUT " vec2 v_texcoord;\n" | 64 VERTEX_SHADER_OUT " vec2 v_texcoord;\n" |
53 "void main() {\n" | 65 "void main() {\n" |
54 " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n" | 66 " gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n" |
55 " v_texcoord = texcoord;\n" | 67 " v_texcoord = texcoord;\n" |
56 "}\n"; | 68 "}\n"; |
57 | 69 |
58 // Fragment shader converts YUV values from input textures into a final RGB | 70 // Fragment shader converts YUV values from input textures into a final RGB |
59 // pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php. | 71 // pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php. |
60 static const char kFragmentShaderSource[] = | 72 static const char kI420FragmentShaderSource[] = |
61 SHADER_VERSION | 73 SHADER_VERSION |
62 "precision highp float;" | 74 "precision highp float;" |
63 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" | 75 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" |
64 "uniform lowp sampler2D s_textureY;\n" | 76 "uniform lowp sampler2D s_textureY;\n" |
65 "uniform lowp sampler2D s_textureU;\n" | 77 "uniform lowp sampler2D s_textureU;\n" |
66 "uniform lowp sampler2D s_textureV;\n" | 78 "uniform lowp sampler2D s_textureV;\n" |
67 FRAGMENT_SHADER_OUT | 79 FRAGMENT_SHADER_OUT |
68 "void main() {\n" | 80 "void main() {\n" |
69 " float y, u, v, r, g, b;\n" | 81 " float y, u, v, r, g, b;\n" |
70 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" | 82 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" |
(...skipping 12 matching lines...) Expand all Loading... | |
83 GLuint CreateShader(GLenum type, const GLchar *source) { | 95 GLuint CreateShader(GLenum type, const GLchar *source) { |
84 GLuint shader = glCreateShader(type); | 96 GLuint shader = glCreateShader(type); |
85 if (!shader) { | 97 if (!shader) { |
86 return 0; | 98 return 0; |
87 } | 99 } |
88 glShaderSource(shader, 1, &source, NULL); | 100 glShaderSource(shader, 1, &source, NULL); |
89 glCompileShader(shader); | 101 glCompileShader(shader); |
90 GLint compileStatus = GL_FALSE; | 102 GLint compileStatus = GL_FALSE; |
91 glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus); | 103 glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus); |
92 if (compileStatus == GL_FALSE) { | 104 if (compileStatus == GL_FALSE) { |
105 GLint logLength = 0; | |
106 // The null termination character is included in the returned log length. | |
107 glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength); | |
108 if (logLength > 0) { | |
109 std::unique_ptr<char[]> compileLog(new char[logLength]); | |
110 // The returned string is null terminated. | |
111 glGetShaderInfoLog(shader, logLength, NULL, compileLog.get()); | |
112 LOG(LS_ERROR) << "Shader compile error: " << compileLog.get(); | |
113 } | |
93 glDeleteShader(shader); | 114 glDeleteShader(shader); |
94 shader = 0; | 115 shader = 0; |
95 } | 116 } |
96 return shader; | 117 return shader; |
97 } | 118 } |
98 | 119 |
99 // Links a shader program with the given vertex and fragment shaders and | 120 // Links a shader program with the given vertex and fragment shaders and |
100 // returns the program handle or 0 on error. | 121 // returns the program handle or 0 on error. |
101 GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) { | 122 GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) { |
102 if (vertexShader == 0 || fragmentShader == 0) { | 123 if (vertexShader == 0 || fragmentShader == 0) { |
103 return 0; | 124 return 0; |
104 } | 125 } |
105 GLuint program = glCreateProgram(); | 126 GLuint program = glCreateProgram(); |
106 if (!program) { | 127 if (!program) { |
107 return 0; | 128 return 0; |
108 } | 129 } |
109 glAttachShader(program, vertexShader); | 130 glAttachShader(program, vertexShader); |
110 glAttachShader(program, fragmentShader); | 131 glAttachShader(program, fragmentShader); |
111 glLinkProgram(program); | 132 glLinkProgram(program); |
112 GLint linkStatus = GL_FALSE; | 133 GLint linkStatus = GL_FALSE; |
113 glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); | 134 glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); |
114 if (linkStatus == GL_FALSE) { | 135 if (linkStatus == GL_FALSE) { |
115 glDeleteProgram(program); | 136 glDeleteProgram(program); |
116 program = 0; | 137 program = 0; |
117 } | 138 } |
118 return program; | 139 return program; |
119 } | 140 } |
120 | 141 |
142 // Creates and links a shader program with the given fragment shader source and | |
143 // a plain vertex shader. Returns the program handle or 0 on error. | |
144 GLuint CreateProgramFromFragmentSource(const char fragmentShaderSource[]) { | |
145 GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource); | |
146 RTC_CHECK(vertexShader) << "failed to create vertex shader"; | |
147 GLuint fragmentShader = | |
148 CreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource); | |
149 RTC_CHECK(fragmentShader) << "failed to create fragment shader"; | |
150 GLuint program = CreateProgram(vertexShader, fragmentShader); | |
151 // Shaders are created only to generate program. | |
152 if (vertexShader) { | |
153 glDeleteShader(vertexShader); | |
154 } | |
155 if (fragmentShader) { | |
156 glDeleteShader(fragmentShader); | |
157 } | |
158 return program; | |
159 } | |
160 | |
121 // When modelview and projection matrices are identity (default) the world is | 161 // When modelview and projection matrices are identity (default) the world is |
122 // contained in the square around origin with unit size 2. Drawing to these | 162 // contained in the square around origin with unit size 2. Drawing to these |
123 // coordinates is equivalent to drawing to the entire screen. The texture is | 163 // coordinates is equivalent to drawing to the entire screen. The texture is |
124 // stretched over that square using texture coordinates (u, v) that range | 164 // stretched over that square using texture coordinates (u, v) that range |
125 // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically | 165 // from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically |
126 // here because the incoming frame has origin in upper left hand corner but | 166 // here because the incoming frame has origin in upper left hand corner but |
127 // OpenGL expects origin in bottom left corner. | 167 // OpenGL expects origin in bottom left corner. |
128 const GLfloat gVertices[] = { | 168 static const GLfloat gVertices[] = { |
129 // X, Y, U, V. | 169 // X, Y, U, V. |
130 -1, -1, 0, 1, // Bottom left. | 170 -1, -1, 0, 1, // Bottom left. |
131 1, -1, 1, 1, // Bottom right. | 171 1, -1, 1, 1, // Bottom right. |
132 1, 1, 1, 0, // Top right. | 172 1, 1, 1, 0, // Top right. |
133 -1, 1, 0, 0, // Top left. | 173 -1, 1, 0, 0, // Top left. |
134 }; | 174 }; |
135 | 175 |
176 // Set vertex shader variables 'position' and 'texcoord' in |program| to the | |
177 // |gVertices| array above. It will use |vertexBuffer| and |vertexArray| to | |
178 // store the vertex data. | |
179 BOOL setupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* verte xArray) { | |
180 GLint position = glGetAttribLocation(program, "position"); | |
181 GLint texcoord = glGetAttribLocation(program, "texcoord"); | |
182 if (position < 0 || texcoord < 0) { | |
183 return NO; | |
184 } | |
185 #if !TARGET_OS_IPHONE | |
186 glGenVertexArrays(1, vertexArray); | |
187 if (*vertexArray == 0) { | |
188 return NO; | |
189 } | |
190 glBindVertexArray(*vertexArray); | |
191 #endif | |
192 glGenBuffers(1, vertexBuffer); | |
193 if (*vertexBuffer == 0) { | |
194 return NO; | |
195 } | |
196 glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer); | |
197 glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW); | |
198 | |
199 // Read position attribute from |gVertices| with size of 2 and stride of 4 | |
200 // beginning at the start of the array. The last argument indicates offset | |
201 // of data within |gVertices| as supplied to the vertex buffer. | |
202 glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), | |
203 (void*)0); | |
tkchin_webrtc
2016/08/02 00:17:18
nit: (void *)0
since we're in an ObjC++ file we ca
| |
204 glEnableVertexAttribArray(position); | |
205 | |
206 // Read texcoord attribute from |gVertices| with size of 2 and stride of 4 | |
207 // beginning at the first texcoord in the array. The last argument indicates | |
208 // offset of data within |gVertices| as supplied to the vertex buffer. | |
209 glVertexAttribPointer(texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), | |
210 (void*)(2 * sizeof(GLfloat))); | |
tkchin_webrtc
2016/08/02 00:17:19
ditto
| |
211 glEnableVertexAttribArray(texcoord); | |
212 | |
213 return YES; | |
214 } | |
215 | |
136 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets | 216 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets |
137 // of 3 textures are used here, one for each of the Y, U and V planes. Having | 217 // of 3 textures are used here, one for each of the Y, U and V planes. Having |
138 // two sets alleviates CPU blockage in the event that the GPU is asked to render | 218 // two sets alleviates CPU blockage in the event that the GPU is asked to render |
139 // to a texture that is already in use. | 219 // to a texture that is already in use. |
140 static const GLsizei kNumTextureSets = 2; | 220 static const GLsizei kNumTextureSets = 2; |
141 static const GLsizei kNumTextures = 3 * kNumTextureSets; | 221 static const GLsizei kNumTextures = 3 * kNumTextureSets; |
142 | 222 |
143 @implementation RTCOpenGLVideoRenderer { | 223 // Shader for non-native I420 frames. |
144 #if TARGET_OS_IPHONE | 224 @interface I420Shader : NSObject<Shader> |
tkchin_webrtc
2016/08/02 00:17:18
nit: style is NSObject <Shader>
together with pref
| |
145 EAGLContext *_context; | 225 - (BOOL)drawFrame:(RTCVideoFrame*)frame; |
tkchin_webrtc
2016/08/02 00:17:19
don't need to redeclare methods from protocol
| |
146 #else | 226 @end |
147 NSOpenGLContext *_context; | 227 |
148 #endif | 228 @implementation I420Shader { |
tkchin_webrtc
2016/08/02 00:17:17
Can we place the various shaders in their own file
| |
149 BOOL _isInitialized; | 229 BOOL _hasUnpackRowLength; |
150 GLint _currentTextureSet; | 230 GLint _currentTextureSet; |
151 // Handles for OpenGL constructs. | 231 // Handles for OpenGL constructs. |
152 GLuint _textures[kNumTextures]; | 232 GLuint _textures[kNumTextures]; |
153 GLuint _program; | 233 GLuint _i420Program; |
154 #if !TARGET_OS_IPHONE | |
155 GLuint _vertexArray; | 234 GLuint _vertexArray; |
156 #endif | |
157 GLuint _vertexBuffer; | 235 GLuint _vertexBuffer; |
158 GLint _position; | |
159 GLint _texcoord; | |
160 GLint _ySampler; | 236 GLint _ySampler; |
161 GLint _uSampler; | 237 GLint _uSampler; |
162 GLint _vSampler; | 238 GLint _vSampler; |
163 // Used to create a non-padded plane for GPU upload when we receive padded | 239 // Used to create a non-padded plane for GPU upload when we receive padded |
164 // frames. | 240 // frames. |
165 std::unique_ptr<uint8_t[]> _planeBuffer; | 241 std::vector<uint8_t> _planeBuffer; |
166 } | 242 } |
167 | 243 |
168 @synthesize lastDrawnFrame = _lastDrawnFrame; | 244 - (instancetype)initWithContext:(GlContextType *)context { |
169 | 245 if (self = [super init]) { |
170 + (void)initialize { | |
171 // Disable dithering for performance. | |
172 glDisable(GL_DITHER); | |
173 } | |
174 | |
175 #if TARGET_OS_IPHONE | 246 #if TARGET_OS_IPHONE |
176 - (instancetype)initWithContext:(EAGLContext *)context { | 247 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); |
177 #else | 248 #else |
178 - (instancetype)initWithContext:(NSOpenGLContext *)context { | 249 _hasUnpackRowLength = YES; |
179 #endif | 250 #endif |
180 NSAssert(context != nil, @"context cannot be nil"); | 251 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); |
181 if (self = [super init]) { | 252 if (![self setupI420Program] || ![self setupTextures] || |
182 _context = context; | 253 !setupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)) { |
254 self = nil; | |
255 } | |
183 } | 256 } |
184 return self; | 257 return self; |
185 } | 258 } |
186 | 259 |
187 - (BOOL)drawFrame:(RTCVideoFrame *)frame { | 260 - (void)dealloc { |
188 if (!_isInitialized) { | 261 if (_i420Program != 0) { |
262 glDeleteProgram(_i420Program); | |
263 } | |
264 if (_textures[0] != 0) { | |
265 glDeleteTextures(kNumTextures, _textures); | |
266 } | |
267 if (_vertexBuffer != 0) { | |
268 glDeleteBuffers(1, &_vertexBuffer); | |
269 } | |
270 if (_vertexArray != 0) { | |
271 glDeleteVertexArrays(1, &_vertexArray); | |
272 } | |
273 } | |
274 | |
275 - (BOOL)setupI420Program { | |
276 _i420Program = CreateProgramFromFragmentSource(kI420FragmentShaderSource); | |
277 if (!_i420Program) { | |
189 return NO; | 278 return NO; |
190 } | 279 } |
191 if (_lastDrawnFrame == frame) { | 280 _ySampler = glGetUniformLocation(_i420Program, "s_textureY"); |
192 return NO; | 281 _uSampler = glGetUniformLocation(_i420Program, "s_textureU"); |
193 } | 282 _vSampler = glGetUniformLocation(_i420Program, "s_textureV"); |
194 [self ensureGLContext]; | |
195 glClear(GL_COLOR_BUFFER_BIT); | |
196 if (frame) { | |
197 if (![self updateTextureSizesForFrame:frame] || | |
198 ![self updateTextureDataForFrame:frame]) { | |
199 return NO; | |
200 } | |
201 #if !TARGET_OS_IPHONE | |
202 glBindVertexArray(_vertexArray); | |
203 #endif | |
204 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | |
205 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | |
206 } | |
207 #if !TARGET_OS_IPHONE | |
208 [_context flushBuffer]; | |
209 #endif | |
210 _lastDrawnFrame = frame; | |
211 return YES; | |
212 } | |
213 | 283 |
214 - (void)setupGL { | 284 return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0); |
tkchin_webrtc
2016/08/02 00:17:19
these ivars are init-ed to zero by default after i
| |
215 if (_isInitialized) { | |
216 return; | |
217 } | |
218 [self ensureGLContext]; | |
219 if (![self setupProgram]) { | |
220 return; | |
221 } | |
222 if (![self setupTextures]) { | |
223 return; | |
224 } | |
225 if (![self setupVertices]) { | |
226 return; | |
227 } | |
228 glUseProgram(_program); | |
229 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); | |
230 _isInitialized = YES; | |
231 } | |
232 | |
233 - (void)teardownGL { | |
234 if (!_isInitialized) { | |
235 return; | |
236 } | |
237 [self ensureGLContext]; | |
238 glDeleteProgram(_program); | |
239 _program = 0; | |
240 glDeleteTextures(kNumTextures, _textures); | |
241 glDeleteBuffers(1, &_vertexBuffer); | |
242 _vertexBuffer = 0; | |
243 #if !TARGET_OS_IPHONE | |
244 glDeleteVertexArrays(1, &_vertexArray); | |
245 #endif | |
246 _isInitialized = NO; | |
247 } | |
248 | |
249 #pragma mark - Private | |
250 | |
251 - (void)ensureGLContext { | |
252 NSAssert(_context, @"context shouldn't be nil"); | |
253 #if TARGET_OS_IPHONE | |
254 if ([EAGLContext currentContext] != _context) { | |
255 [EAGLContext setCurrentContext:_context]; | |
256 } | |
257 #else | |
258 if ([NSOpenGLContext currentContext] != _context) { | |
259 [_context makeCurrentContext]; | |
260 } | |
261 #endif | |
262 } | |
263 | |
264 - (BOOL)setupProgram { | |
265 NSAssert(!_program, @"program already set up"); | |
266 GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource); | |
267 NSAssert(vertexShader, @"failed to create vertex shader"); | |
268 GLuint fragmentShader = | |
269 CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource); | |
270 NSAssert(fragmentShader, @"failed to create fragment shader"); | |
271 _program = CreateProgram(vertexShader, fragmentShader); | |
272 // Shaders are created only to generate program. | |
273 if (vertexShader) { | |
274 glDeleteShader(vertexShader); | |
275 } | |
276 if (fragmentShader) { | |
277 glDeleteShader(fragmentShader); | |
278 } | |
279 if (!_program) { | |
280 return NO; | |
281 } | |
282 _position = glGetAttribLocation(_program, "position"); | |
283 _texcoord = glGetAttribLocation(_program, "texcoord"); | |
284 _ySampler = glGetUniformLocation(_program, "s_textureY"); | |
285 _uSampler = glGetUniformLocation(_program, "s_textureU"); | |
286 _vSampler = glGetUniformLocation(_program, "s_textureV"); | |
287 if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 || | |
288 _vSampler < 0) { | |
289 return NO; | |
290 } | |
291 return YES; | |
292 } | 285 } |
293 | 286 |
294 - (BOOL)setupTextures { | 287 - (BOOL)setupTextures { |
295 glGenTextures(kNumTextures, _textures); | 288 glGenTextures(kNumTextures, _textures); |
296 // Set parameters for each of the textures we created. | 289 // Set parameters for each of the textures we created. |
297 for (GLsizei i = 0; i < kNumTextures; i++) { | 290 for (GLsizei i = 0; i < kNumTextures; i++) { |
298 glActiveTexture(GL_TEXTURE0 + i); | 291 glActiveTexture(GL_TEXTURE0 + i); |
299 glBindTexture(GL_TEXTURE_2D, _textures[i]); | 292 glBindTexture(GL_TEXTURE_2D, _textures[i]); |
300 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 293 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
301 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | 294 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
302 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | 295 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
303 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | 296 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
304 } | 297 } |
305 return YES; | 298 return YES; |
306 } | 299 } |
307 | 300 |
308 - (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame { | 301 - (BOOL)drawFrame:(RTCVideoFrame*)frame { |
309 if (frame.height == _lastDrawnFrame.height && | 302 glUseProgram(_i420Program); |
310 frame.width == _lastDrawnFrame.width && | 303 if (![self updateTextureDataForFrame:frame]) { |
311 frame.chromaWidth == _lastDrawnFrame.chromaWidth && | 304 return NO; |
312 frame.chromaHeight == _lastDrawnFrame.chromaHeight) { | |
313 return YES; | |
314 } | 305 } |
315 GLsizei lumaWidth = static_cast<GLsizei>(frame.width); | 306 #if !TARGET_OS_IPHONE |
316 GLsizei lumaHeight = static_cast<GLsizei>(frame.height); | 307 glBindVertexArray(_vertexArray); |
317 GLsizei chromaWidth = static_cast<GLsizei>(frame.chromaWidth); | 308 #endif |
318 GLsizei chromaHeight = static_cast<GLsizei>(frame.chromaHeight); | 309 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); |
319 for (GLint i = 0; i < kNumTextureSets; i++) { | 310 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); |
320 glActiveTexture(GL_TEXTURE0 + i * 3); | 311 |
321 glTexImage2D(GL_TEXTURE_2D, | |
322 0, | |
323 RTC_PIXEL_FORMAT, | |
324 lumaWidth, | |
325 lumaHeight, | |
326 0, | |
327 RTC_PIXEL_FORMAT, | |
328 GL_UNSIGNED_BYTE, | |
329 0); | |
330 glActiveTexture(GL_TEXTURE0 + i * 3 + 1); | |
331 glTexImage2D(GL_TEXTURE_2D, | |
332 0, | |
333 RTC_PIXEL_FORMAT, | |
334 chromaWidth, | |
335 chromaHeight, | |
336 0, | |
337 RTC_PIXEL_FORMAT, | |
338 GL_UNSIGNED_BYTE, | |
339 0); | |
340 glActiveTexture(GL_TEXTURE0 + i * 3 + 2); | |
341 glTexImage2D(GL_TEXTURE_2D, | |
342 0, | |
343 RTC_PIXEL_FORMAT, | |
344 chromaWidth, | |
345 chromaHeight, | |
346 0, | |
347 RTC_PIXEL_FORMAT, | |
348 GL_UNSIGNED_BYTE, | |
349 0); | |
350 } | |
351 if ((NSUInteger)frame.yPitch != frame.width || | |
352 (NSUInteger)frame.uPitch != frame.chromaWidth || | |
353 (NSUInteger)frame.vPitch != frame.chromaWidth) { | |
354 _planeBuffer.reset(new uint8_t[frame.width * frame.height]); | |
355 } else { | |
356 _planeBuffer.reset(); | |
357 } | |
358 return YES; | 312 return YES; |
359 } | 313 } |
360 | 314 |
361 - (void)uploadPlane:(const uint8_t *)plane | 315 - (void)uploadPlane:(const uint8_t *)plane |
362 sampler:(GLint)sampler | 316 sampler:(GLint)sampler |
363 offset:(GLint)offset | 317 offset:(GLint)offset |
364 width:(size_t)width | 318 width:(size_t)width |
365 height:(size_t)height | 319 height:(size_t)height |
366 stride:(int32_t)stride { | 320 stride:(int32_t)stride { |
367 glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset)); | 321 glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset)); |
322 glBindTexture(GL_TEXTURE_2D, _textures[offset]); | |
323 | |
368 // When setting texture sampler uniforms, the texture index is used not | 324 // When setting texture sampler uniforms, the texture index is used not |
369 // the texture handle. | 325 // the texture handle. |
370 glUniform1i(sampler, offset); | 326 glUniform1i(sampler, offset); |
371 #if TARGET_OS_IPHONE | |
372 BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3; | |
373 #else | |
374 BOOL hasUnpackRowLength = YES; | |
375 #endif | |
376 const uint8_t *uploadPlane = plane; | 327 const uint8_t *uploadPlane = plane; |
377 if ((size_t)stride != width) { | 328 if ((size_t)stride != width) { |
378 if (hasUnpackRowLength) { | 329 if (_hasUnpackRowLength) { |
379 // GLES3 allows us to specify stride. | 330 // GLES3 allows us to specify stride. |
380 glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); | 331 glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); |
381 glTexImage2D(GL_TEXTURE_2D, | 332 glTexImage2D(GL_TEXTURE_2D, |
382 0, | 333 0, |
383 RTC_PIXEL_FORMAT, | 334 RTC_PIXEL_FORMAT, |
384 static_cast<GLsizei>(width), | 335 static_cast<GLsizei>(width), |
385 static_cast<GLsizei>(height), | 336 static_cast<GLsizei>(height), |
386 0, | 337 0, |
387 RTC_PIXEL_FORMAT, | 338 RTC_PIXEL_FORMAT, |
388 GL_UNSIGNED_BYTE, | 339 GL_UNSIGNED_BYTE, |
389 uploadPlane); | 340 uploadPlane); |
390 glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); | 341 glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); |
391 return; | 342 return; |
392 } else { | 343 } else { |
393 // Make an unpadded copy and upload that instead. Quick profiling showed | 344 // Make an unpadded copy and upload that instead. Quick profiling showed |
394 // that this is faster than uploading row by row using glTexSubImage2D. | 345 // that this is faster than uploading row by row using glTexSubImage2D. |
395 uint8_t *unpaddedPlane = _planeBuffer.get(); | 346 uint8_t *unpaddedPlane = _planeBuffer.data(); |
396 for (size_t y = 0; y < height; ++y) { | 347 for (size_t y = 0; y < height; ++y) { |
397 memcpy(unpaddedPlane + y * width, plane + y * stride, width); | 348 memcpy(unpaddedPlane + y * width, plane + y * stride, width); |
398 } | 349 } |
399 uploadPlane = unpaddedPlane; | 350 uploadPlane = unpaddedPlane; |
400 } | 351 } |
401 } | 352 } |
402 glTexImage2D(GL_TEXTURE_2D, | 353 glTexImage2D(GL_TEXTURE_2D, |
403 0, | 354 0, |
404 RTC_PIXEL_FORMAT, | 355 RTC_PIXEL_FORMAT, |
405 static_cast<GLsizei>(width), | 356 static_cast<GLsizei>(width), |
406 static_cast<GLsizei>(height), | 357 static_cast<GLsizei>(height), |
407 0, | 358 0, |
408 RTC_PIXEL_FORMAT, | 359 RTC_PIXEL_FORMAT, |
409 GL_UNSIGNED_BYTE, | 360 GL_UNSIGNED_BYTE, |
410 uploadPlane); | 361 uploadPlane); |
411 } | 362 } |
412 | 363 |
413 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { | 364 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { |
414 GLint textureOffset = _currentTextureSet * 3; | 365 GLint textureOffset = _currentTextureSet * 3; |
415 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); | 366 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); |
416 | 367 |
368 if (frame.yPitch != static_cast<int32_t>(frame.width) || | |
369 frame.uPitch != static_cast<int32_t>(frame.chromaWidth) || | |
370 frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) { | |
371 _planeBuffer.resize(frame.width * frame.height); | |
372 } | |
373 | |
417 [self uploadPlane:frame.yPlane | 374 [self uploadPlane:frame.yPlane |
418 sampler:_ySampler | 375 sampler:_ySampler |
419 offset:textureOffset | 376 offset:textureOffset |
420 width:frame.width | 377 width:frame.width |
421 height:frame.height | 378 height:frame.height |
422 stride:frame.yPitch]; | 379 stride:frame.yPitch]; |
423 | 380 |
424 [self uploadPlane:frame.uPlane | 381 [self uploadPlane:frame.uPlane |
425 sampler:_uSampler | 382 sampler:_uSampler |
426 offset:textureOffset + 1 | 383 offset:textureOffset + 1 |
427 width:frame.chromaWidth | 384 width:frame.chromaWidth |
428 height:frame.chromaHeight | 385 height:frame.chromaHeight |
429 stride:frame.uPitch]; | 386 stride:frame.uPitch]; |
430 | 387 |
431 [self uploadPlane:frame.vPlane | 388 [self uploadPlane:frame.vPlane |
432 sampler:_vSampler | 389 sampler:_vSampler |
433 offset:textureOffset + 2 | 390 offset:textureOffset + 2 |
434 width:frame.chromaWidth | 391 width:frame.chromaWidth |
435 height:frame.chromaHeight | 392 height:frame.chromaHeight |
436 stride:frame.vPitch]; | 393 stride:frame.vPitch]; |
437 | 394 |
438 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; | 395 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; |
439 return YES; | 396 return YES; |
440 } | 397 } |
441 | 398 |
442 - (BOOL)setupVertices { | 399 @end |
400 | |
401 // Native CVPixelBufferRef rendering is only supported on iPhone because it | |
402 // depends on CVOpenGLESTextureCacheCreate. | |
403 #if TARGET_OS_IPHONE | |
404 static const char kNV12FragmentShaderSource[] = | |
405 SHADER_VERSION | |
406 "precision mediump float;" | |
407 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" | |
408 "uniform lowp sampler2D s_textureY;\n" | |
409 "uniform lowp sampler2D s_textureUV;\n" | |
410 FRAGMENT_SHADER_OUT | |
411 "void main() {\n" | |
412 " mediump float y;\n" | |
413 " mediump vec2 uv;\n" | |
414 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" | |
415 " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n" | |
416 " vec2(0.5, 0.5);\n" | |
417 " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n" | |
418 " y - 0.344 * uv.x - 0.714 * uv.y,\n" | |
419 " y + 1.770 * uv.x,\n" | |
420 " 1.0);\n" | |
421 " }\n"; | |
422 | |
423 // Shader for native NV12 frames. | |
424 @interface NativeNV12Shader : NSObject<Shader> | |
425 - (BOOL)drawFrame:(RTCVideoFrame*)frame; | |
426 @end | |
427 | |
428 @implementation NativeNV12Shader { | |
429 GLuint _vertexBuffer; | |
430 GLuint _nv12Program; | |
431 GLint _ySampler; | |
432 GLint _uvSampler; | |
433 CVOpenGLESTextureCacheRef _textureCache; | |
434 } | |
435 | |
436 - (instancetype)initWithContext:(GlContextType *)context { | |
437 if (self = [super init]) { | |
438 if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] || | |
439 !setupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { | |
440 self = nil; | |
441 } | |
442 } | |
443 return self; | |
444 } | |
445 | |
446 - (void)dealloc { | |
447 if (_nv12Program != 0) { | |
448 glDeleteProgram(_nv12Program); | |
449 } | |
450 if (_vertexBuffer != 0) { | |
451 glDeleteBuffers(1, &_vertexBuffer); | |
452 } | |
453 if (_textureCache) { | |
454 CFRelease(_textureCache); | |
tkchin_webrtc
2016/08/02 00:17:18
nit: assign nullptr after release
| |
455 } | |
456 } | |
457 | |
458 - (BOOL)setupNV12Program { | |
459 _nv12Program = CreateProgramFromFragmentSource(kNV12FragmentShaderSource); | |
460 if (!_nv12Program) { | |
461 return NO; | |
462 } | |
463 _ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); | |
464 _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); | |
465 | |
466 return (_ySampler >= 0 && _uvSampler >= 0); | |
467 } | |
468 | |
469 - (BOOL)setupTextureCacheWithContext:(GlContextType *)context { | |
470 CVReturn err = CVOpenGLESTextureCacheCreate( | |
471 kCFAllocatorDefault, NULL, | |
472 #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API | |
473 context, | |
474 #else | |
475 (__bridge void*)context, | |
tkchin_webrtc
2016/08/02 00:17:17
nit: void *)
| |
476 #endif | |
477 NULL, &_textureCache); | |
478 return !err; | |
tkchin_webrtc
2016/08/02 00:17:17
err == noErr or CVReturnTrue ? I remember there wa
| |
479 } | |
480 | |
481 - (BOOL)drawFrame:(RTCVideoFrame*)frame { | |
tkchin_webrtc
2016/08/02 00:17:17
nit: RTCVideoFrame *)
| |
482 CVPixelBufferRef pixelBuffer = frame.nativeHandle; | |
483 RTC_CHECK(pixelBuffer); | |
484 glUseProgram(_nv12Program); | |
485 const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); | |
486 RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || | |
487 pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) | |
488 << "Unsupported native pixel format: " << pixelFormat; | |
489 | |
490 // Y-plane. | |
491 const int luma_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); | |
tkchin_webrtc
2016/08/02 00:17:18
nit: camel case instead of underscore for ObjC fun
| |
492 const int luma_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); | |
493 | |
494 CVOpenGLESTextureRef lumaTexture; | |
tkchin_webrtc
2016/08/02 00:17:18
nit: init to nullptr
| |
495 glActiveTexture(GL_TEXTURE0); | |
496 glUniform1i(_ySampler, 0); | |
497 CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage( | |
498 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | |
499 RTC_PIXEL_FORMAT, luma_width, luma_height, RTC_PIXEL_FORMAT, | |
500 GL_UNSIGNED_BYTE, 0, &lumaTexture); | |
501 if (err) { | |
502 CFRelease(lumaTexture); | |
503 return NO; | |
504 } | |
505 | |
506 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | |
507 CVOpenGLESTextureGetTarget(lumaTexture)); | |
508 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture)); | |
509 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
510 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
511 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
512 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
513 | |
514 // UV-plane. | |
515 const int chroma_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); | |
tkchin_webrtc
2016/08/02 00:17:19
ditto chromeWidth / chromaHeight
| |
516 const int chroma_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); | |
517 | |
518 CVOpenGLESTextureRef chromaTexture; | |
519 glActiveTexture(GL_TEXTURE1); | |
520 glUniform1i(_uvSampler, 1); | |
521 err = CVOpenGLESTextureCacheCreateTextureFromImage( | |
522 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | |
523 GL_LUMINANCE_ALPHA, chroma_width, chroma_height, GL_LUMINANCE_ALPHA, | |
524 GL_UNSIGNED_BYTE, 1, &chromaTexture); | |
525 if (err) { | |
526 CFRelease(chromaTexture); | |
527 CFRelease(lumaTexture); | |
528 return NO; | |
529 } | |
530 | |
531 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | |
532 CVOpenGLESTextureGetTarget(chromaTexture)); | |
533 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture)); | |
534 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
535 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
536 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
537 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
538 | |
539 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | |
540 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | |
541 | |
542 CFRelease(chromaTexture); | |
543 CFRelease(lumaTexture); | |
544 | |
545 return YES; | |
546 } | |
547 | |
548 @end | |
549 #endif // TARGET_OS_IPHONE | |
550 | |
551 @implementation RTCOpenGLVideoRenderer { | |
552 GlContextType* _context; | |
tkchin_webrtc
2016/08/02 00:17:19
nit: GLContextType *_context
| |
553 BOOL _isInitialized; | |
554 NSObject<Shader>* _i420Shader; | |
tkchin_webrtc
2016/08/02 00:17:17
typically we declare id instead of saying that it
| |
555 NSObject<Shader>* _nv12Shader; | |
556 } | |
557 | |
558 @synthesize lastDrawnFrame = _lastDrawnFrame; | |
559 | |
560 + (void)initialize { | |
561 // Disable dithering for performance. | |
562 glDisable(GL_DITHER); | |
563 } | |
564 | |
565 - (instancetype)initWithContext:(GlContextType *)context { | |
566 NSAssert(context != nil, @"context cannot be nil"); | |
567 if (self = [super init]) { | |
568 _context = context; | |
569 } | |
570 return self; | |
571 } | |
572 | |
573 - (BOOL)drawFrame:(RTCVideoFrame*)frame { | |
574 if (!_isInitialized || !frame || frame == _lastDrawnFrame) { | |
575 return NO; | |
576 } | |
577 [self ensureGLContext]; | |
578 glClear(GL_COLOR_BUFFER_BIT); | |
579 NSObject<Shader>* shader; | |
tkchin_webrtc
2016/08/02 00:17:19
id<RTCShader> shader = nil;
| |
580 #if TARGET_OS_IPHONE | |
581 if (frame.nativeHandle) { | |
582 if (!_nv12Shader) { | |
583 _nv12Shader = [[NativeNV12Shader alloc] initWithContext:_context]; | |
584 } | |
585 shader = _nv12Shader; | |
586 #else | |
587 // Rendering native CVPixelBuffer is not supported on OS X. | |
588 if (false) { | |
589 #endif | |
590 } else { | |
591 if (!_i420Shader) { | |
592 _i420Shader = [[I420Shader alloc] initWithContext:_context]; | |
593 } | |
594 shader = _i420Shader; | |
595 } | |
596 if (!shader || ![shader drawFrame:frame]) { | |
597 return NO; | |
598 } | |
599 | |
443 #if !TARGET_OS_IPHONE | 600 #if !TARGET_OS_IPHONE |
444 NSAssert(!_vertexArray, @"vertex array already set up"); | 601 [_context flushBuffer]; |
445 glGenVertexArrays(1, &_vertexArray); | 602 #endif |
446 if (!_vertexArray) { | 603 _lastDrawnFrame = frame; |
447 return NO; | |
448 } | |
449 glBindVertexArray(_vertexArray); | |
450 #endif | |
451 NSAssert(!_vertexBuffer, @"vertex buffer already set up"); | |
452 glGenBuffers(1, &_vertexBuffer); | |
453 if (!_vertexBuffer) { | |
454 #if !TARGET_OS_IPHONE | |
455 glDeleteVertexArrays(1, &_vertexArray); | |
456 _vertexArray = 0; | |
457 #endif | |
458 return NO; | |
459 } | |
460 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | |
461 glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW); | |
462 | |
463 // Read position attribute from |gVertices| with size of 2 and stride of 4 | |
464 // beginning at the start of the array. The last argument indicates offset | |
465 // of data within |gVertices| as supplied to the vertex buffer. | |
466 glVertexAttribPointer( | |
467 _position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0); | |
468 glEnableVertexAttribArray(_position); | |
469 | |
470 // Read texcoord attribute from |gVertices| with size of 2 and stride of 4 | |
471 // beginning at the first texcoord in the array. The last argument indicates | |
472 // offset of data within |gVertices| as supplied to the vertex buffer. | |
473 glVertexAttribPointer(_texcoord, | |
474 2, | |
475 GL_FLOAT, | |
476 GL_FALSE, | |
477 4 * sizeof(GLfloat), | |
478 (void *)(2 * sizeof(GLfloat))); | |
479 glEnableVertexAttribArray(_texcoord); | |
480 | 604 |
481 return YES; | 605 return YES; |
482 } | 606 } |
483 | 607 |
484 @end | 608 - (void)setupGL { |
609 if (_isInitialized) { | |
610 return; | |
611 } | |
612 [self ensureGLContext]; | |
613 _isInitialized = YES; | |
614 } | |
615 | |
616 - (void)teardownGL { | |
617 if (!_isInitialized) { | |
618 return; | |
619 } | |
620 [self ensureGLContext]; | |
621 _i420Shader = nil; | |
622 _nv12Shader = nil; | |
623 _isInitialized = NO; | |
624 } | |
625 | |
626 #pragma mark - Private | |
627 | |
628 - (void)ensureGLContext { | |
629 NSAssert(_context, @"context shouldn't be nil"); | |
630 #if TARGET_OS_IPHONE | |
631 if ([EAGLContext currentContext] != _context) { | |
632 [EAGLContext setCurrentContext:_context]; | |
633 } | |
634 #else | |
635 if ([NSOpenGLContext currentContext] != _context) { | |
636 [_context makeCurrentContext]; | |
637 } | |
638 #endif | |
639 } | |
640 | |
641 @end | |
OLD | NEW |