OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #import "RTCShader.h" | 11 #import "RTCNV12TextureCache.h" |
12 | |
13 // Native CVPixelBufferRef rendering is only supported on iPhone because it | |
14 // depends on CVOpenGLESTextureCacheCreate. | |
15 #if TARGET_OS_IPHONE | |
16 | |
17 #import <CoreVideo/CVOpenGLESTextureCache.h> | |
18 | 12 |
19 #import "RTCShader+Private.h" | 13 #import "RTCShader+Private.h" |
daniela-webrtc
2017/04/26 08:56:23
Is this needed?
magjed_webrtc
2017/04/26 13:14:41
It's needed for RTC_PIXEL_FORMAT, but since this f
| |
20 #import "WebRTC/RTCLogging.h" | |
21 #import "WebRTC/RTCVideoFrame.h" | |
22 | 14 |
23 #include "webrtc/base/checks.h" | 15 @implementation RTCNV12TextureCache { |
24 #include "webrtc/base/optional.h" | |
25 | |
26 static const char kNV12FragmentShaderSource[] = | |
27 SHADER_VERSION | |
28 "precision mediump float;" | |
29 FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" | |
30 "uniform lowp sampler2D s_textureY;\n" | |
31 "uniform lowp sampler2D s_textureUV;\n" | |
32 FRAGMENT_SHADER_OUT | |
33 "void main() {\n" | |
34 " mediump float y;\n" | |
35 " mediump vec2 uv;\n" | |
36 " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" | |
37 " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n" | |
38 " vec2(0.5, 0.5);\n" | |
39 " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n" | |
40 " y - 0.344 * uv.x - 0.714 * uv.y,\n" | |
41 " y + 1.770 * uv.x,\n" | |
42 " 1.0);\n" | |
43 " }\n"; | |
44 | |
45 @implementation RTCNativeNV12Shader { | |
46 GLuint _vertexBuffer; | |
47 GLuint _nv12Program; | |
48 GLint _ySampler; | |
49 GLint _uvSampler; | |
50 CVOpenGLESTextureCacheRef _textureCache; | 16 CVOpenGLESTextureCacheRef _textureCache; |
51 // Store current rotation and only upload new vertex data when rotation | 17 CVOpenGLESTextureRef _yTexture; |
daniela-webrtc
2017/04/26 08:56:23
It's bit weird that the ivar and the property have
magjed_webrtc
2017/04/26 13:14:41
I agree it's a bit weird, and I don't know the con
daniela-webrtc
2017/04/26 13:31:02
Yes this is better. And yes, you are right exposi
| |
52 // changes. | 18 CVOpenGLESTextureRef _uvTexture; |
53 rtc::Optional<RTCVideoRotation> _currentRotation; | |
54 } | 19 } |
55 | 20 |
56 - (instancetype)initWithContext:(GlContextType *)context { | 21 - (GLuint)yTexture { |
22 return CVOpenGLESTextureGetName(_yTexture); | |
23 } | |
24 | |
25 - (GLuint)uvTexture { | |
26 return CVOpenGLESTextureGetName(_uvTexture); | |
27 } | |
28 | |
29 - (instancetype)initWithContext:(EAGLContext *)context { | |
57 if (self = [super init]) { | 30 if (self = [super init]) { |
58 if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] || | 31 CVReturn ret = CVOpenGLESTextureCacheCreate( |
59 !RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { | 32 kCFAllocatorDefault, NULL, |
60 RTCLog(@"Failed to initialize RTCNativeNV12Shader."); | 33 #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API |
34 context, | |
35 #else | |
36 (__bridge void *)context, | |
37 #endif | |
38 NULL, &_textureCache); | |
39 if (ret != kCVReturnSuccess) { | |
61 self = nil; | 40 self = nil; |
62 } | 41 } |
63 } | 42 } |
64 return self; | 43 return self; |
65 } | 44 } |
66 | 45 |
67 - (void)dealloc { | 46 - (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame { |
68 glDeleteProgram(_nv12Program); | |
69 glDeleteBuffers(1, &_vertexBuffer); | |
70 if (_textureCache) { | |
71 CFRelease(_textureCache); | |
72 _textureCache = nullptr; | |
73 } | |
74 } | |
75 | |
76 - (BOOL)setupNV12Program { | |
77 _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource); | |
78 if (!_nv12Program) { | |
79 return NO; | |
80 } | |
81 _ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); | |
82 _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); | |
83 | |
84 return (_ySampler >= 0 && _uvSampler >= 0); | |
85 } | |
86 | |
87 - (BOOL)setupTextureCacheWithContext:(GlContextType *)context { | |
88 CVReturn ret = CVOpenGLESTextureCacheCreate( | |
89 kCFAllocatorDefault, NULL, | |
90 #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API | |
91 context, | |
92 #else | |
93 (__bridge void *)context, | |
94 #endif | |
95 NULL, &_textureCache); | |
96 return ret == kCVReturnSuccess; | |
97 } | |
98 | |
99 - (BOOL)drawFrame:(RTCVideoFrame *)frame { | |
100 CVPixelBufferRef pixelBuffer = frame.nativeHandle; | 47 CVPixelBufferRef pixelBuffer = frame.nativeHandle; |
101 RTC_CHECK(pixelBuffer); | 48 NSParameterAssert(pixelBuffer); |
102 glUseProgram(_nv12Program); | |
103 const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); | |
104 RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || | |
daniela-webrtc
2017/04/26 08:56:23
We no longer want to check the pixel format?
magjed_webrtc
2017/04/26 13:14:41
I did a straight-forward conversion to NSAssert (I
daniela-webrtc
2017/04/26 13:31:02
Acknowledged.
| |
105 pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) | |
106 << "Unsupported native pixel format: " << pixelFormat; | |
107 | 49 |
108 // Y-plane. | 50 // Y-plane. |
109 const int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); | 51 const int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); |
110 const int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); | 52 const int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); |
111 | 53 |
112 CVOpenGLESTextureRef lumaTexture = nullptr; | 54 _yTexture = nil; |
113 glActiveTexture(GL_TEXTURE0); | |
114 glUniform1i(_ySampler, 0); | |
115 CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage( | 55 CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage( |
116 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | 56 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, |
117 RTC_PIXEL_FORMAT, lumaWidth, lumaHeight, RTC_PIXEL_FORMAT, | 57 RTC_PIXEL_FORMAT, lumaWidth, lumaHeight, RTC_PIXEL_FORMAT, |
118 GL_UNSIGNED_BYTE, 0, &lumaTexture); | 58 GL_UNSIGNED_BYTE, 0, &_yTexture); |
119 if (ret != kCVReturnSuccess) { | 59 if (ret != kCVReturnSuccess) { |
120 CFRelease(lumaTexture); | 60 CFRelease(_yTexture); |
121 return NO; | 61 return NO; |
122 } | 62 } |
123 | 63 NSAssert(CVOpenGLESTextureGetTarget(_yTexture) == GL_TEXTURE_2D, |
124 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | 64 @"Unexpected GLES texture target"); |
125 CVOpenGLESTextureGetTarget(lumaTexture)); | 65 glBindTexture(GL_TEXTURE_2D, self.yTexture); |
126 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture)); | |
127 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 66 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
128 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | 67 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
129 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | 68 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
130 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | 69 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
131 | 70 |
132 // UV-plane. | 71 // UV-plane. |
133 const int chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); | 72 const int chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); |
134 const int chromeHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); | 73 const int chromeHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); |
135 | 74 |
136 CVOpenGLESTextureRef chromaTexture = nullptr; | 75 _uvTexture = nil; |
137 glActiveTexture(GL_TEXTURE1); | |
138 glUniform1i(_uvSampler, 1); | |
139 ret = CVOpenGLESTextureCacheCreateTextureFromImage( | 76 ret = CVOpenGLESTextureCacheCreateTextureFromImage( |
140 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | 77 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, |
141 GL_LUMINANCE_ALPHA, chromaWidth, chromeHeight, GL_LUMINANCE_ALPHA, | 78 GL_LUMINANCE_ALPHA, chromaWidth, chromeHeight, GL_LUMINANCE_ALPHA, |
142 GL_UNSIGNED_BYTE, 1, &chromaTexture); | 79 GL_UNSIGNED_BYTE, 1, &_uvTexture); |
143 if (ret != kCVReturnSuccess) { | 80 if (ret != kCVReturnSuccess) { |
144 CFRelease(chromaTexture); | 81 CFRelease(_uvTexture); |
145 CFRelease(lumaTexture); | 82 CFRelease(_yTexture); |
146 return NO; | 83 return NO; |
147 } | 84 } |
148 | 85 NSAssert(CVOpenGLESTextureGetTarget(_uvTexture) == GL_TEXTURE_2D, |
149 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | 86 @"Unexpected GLES texture target"); |
150 CVOpenGLESTextureGetTarget(chromaTexture)); | 87 glBindTexture(GL_TEXTURE_2D, self.uvTexture); |
151 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture)); | |
152 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 88 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
153 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | 89 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
154 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | 90 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
155 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | 91 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
156 | 92 |
157 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | |
158 if (!_currentRotation || frame.rotation != *_currentRotation) { | |
159 _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); | |
160 RTCSetVertexData(*_currentRotation); | |
161 } | |
162 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | |
163 | |
164 CFRelease(chromaTexture); | |
165 CFRelease(lumaTexture); | |
166 | |
167 return YES; | 93 return YES; |
168 } | 94 } |
169 | 95 |
96 - (void)releaseTextures { | |
97 if (_uvTexture) { | |
98 CFRelease(_uvTexture); | |
99 _uvTexture = nil; | |
100 } | |
101 if (_yTexture) { | |
102 CFRelease(_yTexture); | |
103 _yTexture = nil; | |
104 } | |
105 } | |
106 | |
107 - (void)dealloc { | |
108 [self releaseTextures]; | |
109 if (_textureCache) { | |
110 CFRelease(_textureCache); | |
111 _textureCache = nil; | |
112 } | |
113 } | |
114 | |
170 @end | 115 @end |
171 #endif // TARGET_OS_IPHONE | 116 |
OLD | NEW |