Index: webrtc/sdk/objc/Framework/Classes/RTCNV12TextureCache.m |
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm b/webrtc/sdk/objc/Framework/Classes/RTCNV12TextureCache.m |
similarity index 35% |
copy from webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm |
copy to webrtc/sdk/objc/Framework/Classes/RTCNV12TextureCache.m |
index 0e221bfd112606090915f69862611e8a7223f50d..d613c8b211c5bbcd3da037424a5d7e007b87244b 100644 |
--- a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm |
+++ b/webrtc/sdk/objc/Framework/Classes/RTCNV12TextureCache.m |
@@ -1,5 +1,5 @@ |
/* |
- * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
* |
* Use of this source code is governed by a BSD-style license |
* that can be found in the LICENSE file in the root of the source |
@@ -8,122 +8,61 @@ |
* be found in the AUTHORS file in the root of the source tree. |
*/ |
-#import "RTCShader.h" |
- |
-// Native CVPixelBufferRef rendering is only supported on iPhone because it |
-// depends on CVOpenGLESTextureCacheCreate. |
-#if TARGET_OS_IPHONE |
- |
-#import <CoreVideo/CVOpenGLESTextureCache.h> |
+#import "RTCNV12TextureCache.h" |
#import "RTCShader+Private.h" |
daniela-webrtc
2017/04/26 08:56:23
Is this needed?
magjed_webrtc
2017/04/26 13:14:41
It's needed for RTC_PIXEL_FORMAT, but since this f
|
-#import "WebRTC/RTCLogging.h" |
-#import "WebRTC/RTCVideoFrame.h" |
- |
-#include "webrtc/base/checks.h" |
-#include "webrtc/base/optional.h" |
- |
-static const char kNV12FragmentShaderSource[] = |
- SHADER_VERSION |
- "precision mediump float;" |
- FRAGMENT_SHADER_IN " vec2 v_texcoord;\n" |
- "uniform lowp sampler2D s_textureY;\n" |
- "uniform lowp sampler2D s_textureUV;\n" |
- FRAGMENT_SHADER_OUT |
- "void main() {\n" |
- " mediump float y;\n" |
- " mediump vec2 uv;\n" |
- " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n" |
- " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n" |
- " vec2(0.5, 0.5);\n" |
- " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n" |
- " y - 0.344 * uv.x - 0.714 * uv.y,\n" |
- " y + 1.770 * uv.x,\n" |
- " 1.0);\n" |
- " }\n"; |
- |
-@implementation RTCNativeNV12Shader { |
- GLuint _vertexBuffer; |
- GLuint _nv12Program; |
- GLint _ySampler; |
- GLint _uvSampler; |
- CVOpenGLESTextureCacheRef _textureCache; |
- // Store current rotation and only upload new vertex data when rotation |
- // changes. |
- rtc::Optional<RTCVideoRotation> _currentRotation; |
-} |
-- (instancetype)initWithContext:(GlContextType *)context { |
- if (self = [super init]) { |
- if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] || |
- !RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { |
- RTCLog(@"Failed to initialize RTCNativeNV12Shader."); |
- self = nil; |
- } |
- } |
- return self; |
+@implementation RTCNV12TextureCache { |
+ CVOpenGLESTextureCacheRef _textureCache; |
+ CVOpenGLESTextureRef _yTexture; |
daniela-webrtc
2017/04/26 08:56:23
It's bit weird that the ivar and the property have
magjed_webrtc
2017/04/26 13:14:41
I agree it's a bit weird, and I don't know the con
daniela-webrtc
2017/04/26 13:31:02
Yes this is better. And yes, you are right exposi
|
+ CVOpenGLESTextureRef _uvTexture; |
} |
-- (void)dealloc { |
- glDeleteProgram(_nv12Program); |
- glDeleteBuffers(1, &_vertexBuffer); |
- if (_textureCache) { |
- CFRelease(_textureCache); |
- _textureCache = nullptr; |
- } |
+- (GLuint)yTexture { |
+ return CVOpenGLESTextureGetName(_yTexture); |
} |
-- (BOOL)setupNV12Program { |
- _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource); |
- if (!_nv12Program) { |
- return NO; |
- } |
- _ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); |
- _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); |
- |
- return (_ySampler >= 0 && _uvSampler >= 0); |
+- (GLuint)uvTexture { |
+ return CVOpenGLESTextureGetName(_uvTexture); |
} |
-- (BOOL)setupTextureCacheWithContext:(GlContextType *)context { |
- CVReturn ret = CVOpenGLESTextureCacheCreate( |
- kCFAllocatorDefault, NULL, |
+- (instancetype)initWithContext:(EAGLContext *)context { |
+ if (self = [super init]) { |
+ CVReturn ret = CVOpenGLESTextureCacheCreate( |
+ kCFAllocatorDefault, NULL, |
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API |
- context, |
+ context, |
#else |
- (__bridge void *)context, |
+ (__bridge void *)context, |
#endif |
- NULL, &_textureCache); |
- return ret == kCVReturnSuccess; |
+ NULL, &_textureCache); |
+ if (ret != kCVReturnSuccess) { |
+ self = nil; |
+ } |
+ } |
+ return self; |
} |
-- (BOOL)drawFrame:(RTCVideoFrame *)frame { |
+- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame { |
CVPixelBufferRef pixelBuffer = frame.nativeHandle; |
- RTC_CHECK(pixelBuffer); |
- glUseProgram(_nv12Program); |
- const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); |
- RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || |
daniela-webrtc
2017/04/26 08:56:23
We no longer want to check the pixel format?
magjed_webrtc
2017/04/26 13:14:41
I did a straight-forward conversion to NSAssert (I
daniela-webrtc
2017/04/26 13:31:02
Acknowledged.
|
- pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) |
- << "Unsupported native pixel format: " << pixelFormat; |
+ NSParameterAssert(pixelBuffer); |
// Y-plane. |
const int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); |
const int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); |
- CVOpenGLESTextureRef lumaTexture = nullptr; |
- glActiveTexture(GL_TEXTURE0); |
- glUniform1i(_ySampler, 0); |
+ _yTexture = nil; |
CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage( |
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, |
RTC_PIXEL_FORMAT, lumaWidth, lumaHeight, RTC_PIXEL_FORMAT, |
- GL_UNSIGNED_BYTE, 0, &lumaTexture); |
+ GL_UNSIGNED_BYTE, 0, &_yTexture); |
if (ret != kCVReturnSuccess) { |
- CFRelease(lumaTexture); |
+ CFRelease(_yTexture); |
return NO; |
} |
- |
- RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), |
- CVOpenGLESTextureGetTarget(lumaTexture)); |
- glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture)); |
+ NSAssert(CVOpenGLESTextureGetTarget(_yTexture) == GL_TEXTURE_2D, |
+ @"Unexpected GLES texture target"); |
+ glBindTexture(GL_TEXTURE_2D, self.yTexture); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
@@ -133,39 +72,45 @@ static const char kNV12FragmentShaderSource[] = |
const int chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); |
const int chromeHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); |
- CVOpenGLESTextureRef chromaTexture = nullptr; |
- glActiveTexture(GL_TEXTURE1); |
- glUniform1i(_uvSampler, 1); |
+ _uvTexture = nil; |
ret = CVOpenGLESTextureCacheCreateTextureFromImage( |
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, |
GL_LUMINANCE_ALPHA, chromaWidth, chromeHeight, GL_LUMINANCE_ALPHA, |
- GL_UNSIGNED_BYTE, 1, &chromaTexture); |
+ GL_UNSIGNED_BYTE, 1, &_uvTexture); |
if (ret != kCVReturnSuccess) { |
- CFRelease(chromaTexture); |
- CFRelease(lumaTexture); |
+ CFRelease(_uvTexture); |
+ CFRelease(_yTexture); |
return NO; |
} |
- |
- RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), |
- CVOpenGLESTextureGetTarget(chromaTexture)); |
- glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture)); |
+ NSAssert(CVOpenGLESTextureGetTarget(_uvTexture) == GL_TEXTURE_2D, |
+ @"Unexpected GLES texture target"); |
+ glBindTexture(GL_TEXTURE_2D, self.uvTexture); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
- glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); |
- if (!_currentRotation || frame.rotation != *_currentRotation) { |
- _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); |
- RTCSetVertexData(*_currentRotation); |
- } |
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4); |
+ return YES; |
+} |
- CFRelease(chromaTexture); |
- CFRelease(lumaTexture); |
+- (void)releaseTextures { |
+ if (_uvTexture) { |
+ CFRelease(_uvTexture); |
+ _uvTexture = nil; |
+ } |
+ if (_yTexture) { |
+ CFRelease(_yTexture); |
+ _yTexture = nil; |
+ } |
+} |
- return YES; |
+- (void)dealloc { |
+ [self releaseTextures]; |
+ if (_textureCache) { |
+ CFRelease(_textureCache); |
+ _textureCache = nil; |
+ } |
} |
@end |
-#endif // TARGET_OS_IPHONE |
+ |