| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #import "RTCShader.h" | 11 #import "RTCShader.h" |
| 12 | 12 |
| 13 // Native CVPixelBufferRef rendering is only supported on iPhone because it | 13 #import "RTCNV12TextureCache.h" |
| 14 // depends on CVOpenGLESTextureCacheCreate. | |
| 15 #if TARGET_OS_IPHONE | |
| 16 | |
| 17 #import <CoreVideo/CVOpenGLESTextureCache.h> | |
| 18 | |
| 19 #import "RTCShader+Private.h" | 14 #import "RTCShader+Private.h" |
| 20 #import "WebRTC/RTCLogging.h" | 15 #import "WebRTC/RTCLogging.h" |
| 21 #import "WebRTC/RTCVideoFrame.h" | 16 #import "WebRTC/RTCVideoFrame.h" |
| 22 | 17 |
| 23 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
| 24 #include "webrtc/base/optional.h" | 19 #include "webrtc/base/optional.h" |
| 25 | 20 |
| 26 static const char kNV12FragmentShaderSource[] = | 21 static const char kNV12FragmentShaderSource[] = |
| 27 SHADER_VERSION | 22 SHADER_VERSION |
| 28 "precision mediump float;" | 23 "precision mediump float;" |
| (...skipping 11 matching lines...) Expand all Loading... |
| 40 " y - 0.344 * uv.x - 0.714 * uv.y,\n" | 35 " y - 0.344 * uv.x - 0.714 * uv.y,\n" |
| 41 " y + 1.770 * uv.x,\n" | 36 " y + 1.770 * uv.x,\n" |
| 42 " 1.0);\n" | 37 " 1.0);\n" |
| 43 " }\n"; | 38 " }\n"; |
| 44 | 39 |
| 45 @implementation RTCNativeNV12Shader { | 40 @implementation RTCNativeNV12Shader { |
| 46 GLuint _vertexBuffer; | 41 GLuint _vertexBuffer; |
| 47 GLuint _nv12Program; | 42 GLuint _nv12Program; |
| 48 GLint _ySampler; | 43 GLint _ySampler; |
| 49 GLint _uvSampler; | 44 GLint _uvSampler; |
| 50 CVOpenGLESTextureCacheRef _textureCache; | 45 RTCNV12TextureCache *_textureCache; |
| 51 // Store current rotation and only upload new vertex data when rotation | 46 // Store current rotation and only upload new vertex data when rotation |
| 52 // changes. | 47 // changes. |
| 53 rtc::Optional<RTCVideoRotation> _currentRotation; | 48 rtc::Optional<RTCVideoRotation> _currentRotation; |
| 54 } | 49 } |
| 55 | 50 |
| 56 - (instancetype)initWithContext:(GlContextType *)context { | 51 - (instancetype)initWithContext:(GlContextType *)context { |
| 57 if (self = [super init]) { | 52 if (self = [super init]) { |
| 58 if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context]
|| | 53 _textureCache = [[RTCNV12TextureCache alloc] initWithContext:context]; |
| 54 if (!_textureCache || ![self setupNV12Program] || |
| 59 !RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { | 55 !RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { |
| 60 RTCLog(@"Failed to initialize RTCNativeNV12Shader."); | 56 RTCLog(@"Failed to initialize RTCNativeNV12Shader."); |
| 61 self = nil; | 57 self = nil; |
| 62 } | 58 } |
| 63 } | 59 } |
| 64 return self; | 60 return self; |
| 65 } | 61 } |
| 66 | 62 |
| 67 - (void)dealloc { | 63 - (void)dealloc { |
| 68 glDeleteProgram(_nv12Program); | 64 glDeleteProgram(_nv12Program); |
| 69 glDeleteBuffers(1, &_vertexBuffer); | 65 glDeleteBuffers(1, &_vertexBuffer); |
| 70 if (_textureCache) { | |
| 71 CFRelease(_textureCache); | |
| 72 _textureCache = nullptr; | |
| 73 } | |
| 74 } | 66 } |
| 75 | 67 |
| 76 - (BOOL)setupNV12Program { | 68 - (BOOL)setupNV12Program { |
| 77 _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource); | 69 _nv12Program = RTCCreateProgramFromFragmentSource(kNV12FragmentShaderSource); |
| 78 if (!_nv12Program) { | 70 if (!_nv12Program) { |
| 79 return NO; | 71 return NO; |
| 80 } | 72 } |
| 81 _ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); | 73 _ySampler = glGetUniformLocation(_nv12Program, "s_textureY"); |
| 82 _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); | 74 _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV"); |
| 83 | 75 |
| 84 return (_ySampler >= 0 && _uvSampler >= 0); | 76 return (_ySampler >= 0 && _uvSampler >= 0); |
| 85 } | 77 } |
| 86 | 78 |
| 87 - (BOOL)setupTextureCacheWithContext:(GlContextType *)context { | |
| 88 CVReturn ret = CVOpenGLESTextureCacheCreate( | |
| 89 kCFAllocatorDefault, NULL, | |
| 90 #if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API | |
| 91 context, | |
| 92 #else | |
| 93 (__bridge void *)context, | |
| 94 #endif | |
| 95 NULL, &_textureCache); | |
| 96 return ret == kCVReturnSuccess; | |
| 97 } | |
| 98 | |
| 99 - (BOOL)drawFrame:(RTCVideoFrame *)frame { | 79 - (BOOL)drawFrame:(RTCVideoFrame *)frame { |
| 100 CVPixelBufferRef pixelBuffer = frame.nativeHandle; | |
| 101 RTC_CHECK(pixelBuffer); | |
| 102 glUseProgram(_nv12Program); | 80 glUseProgram(_nv12Program); |
| 103 const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); | 81 if (![_textureCache uploadFrameToTextures:frame]) { |
| 104 RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || | |
| 105 pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) | |
| 106 << "Unsupported native pixel format: " << pixelFormat; | |
| 107 | |
| 108 // Y-plane. | |
| 109 const int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); | |
| 110 const int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); | |
| 111 | |
| 112 CVOpenGLESTextureRef lumaTexture = nullptr; | |
| 113 glActiveTexture(GL_TEXTURE0); | |
| 114 glUniform1i(_ySampler, 0); | |
| 115 CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage( | |
| 116 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | |
| 117 RTC_PIXEL_FORMAT, lumaWidth, lumaHeight, RTC_PIXEL_FORMAT, | |
| 118 GL_UNSIGNED_BYTE, 0, &lumaTexture); | |
| 119 if (ret != kCVReturnSuccess) { | |
| 120 CFRelease(lumaTexture); | |
| 121 return NO; | 82 return NO; |
| 122 } | 83 } |
| 123 | 84 |
| 124 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | 85 // Y-plane. |
| 125 CVOpenGLESTextureGetTarget(lumaTexture)); | 86 glActiveTexture(GL_TEXTURE0); |
| 126 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture)); | 87 glUniform1i(_ySampler, 0); |
| 127 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 88 glBindTexture(GL_TEXTURE_2D, _textureCache.yTexture); |
| 128 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
| 129 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
| 130 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
| 131 | 89 |
| 132 // UV-plane. | 90 // UV-plane. |
| 133 const int chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); | |
| 134 const int chromeHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); | |
| 135 | |
| 136 CVOpenGLESTextureRef chromaTexture = nullptr; | |
| 137 glActiveTexture(GL_TEXTURE1); | 91 glActiveTexture(GL_TEXTURE1); |
| 138 glUniform1i(_uvSampler, 1); | 92 glUniform1i(_uvSampler, 1); |
| 139 ret = CVOpenGLESTextureCacheCreateTextureFromImage( | 93 glBindTexture(GL_TEXTURE_2D, _textureCache.uvTexture); |
| 140 kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, | |
| 141 GL_LUMINANCE_ALPHA, chromaWidth, chromeHeight, GL_LUMINANCE_ALPHA, | |
| 142 GL_UNSIGNED_BYTE, 1, &chromaTexture); | |
| 143 if (ret != kCVReturnSuccess) { | |
| 144 CFRelease(chromaTexture); | |
| 145 CFRelease(lumaTexture); | |
| 146 return NO; | |
| 147 } | |
| 148 | |
| 149 RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D), | |
| 150 CVOpenGLESTextureGetTarget(chromaTexture)); | |
| 151 glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture)); | |
| 152 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
| 153 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
| 154 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
| 155 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
| 156 | 94 |
| 157 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | 95 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); |
| 158 if (!_currentRotation || frame.rotation != *_currentRotation) { | 96 if (!_currentRotation || frame.rotation != *_currentRotation) { |
| 159 _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); | 97 _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); |
| 160 RTCSetVertexData(*_currentRotation); | 98 RTCSetVertexData(*_currentRotation); |
| 161 } | 99 } |
| 162 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | 100 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); |
| 163 | 101 |
| 164 CFRelease(chromaTexture); | 102 [_textureCache releaseTextures]; |
| 165 CFRelease(lumaTexture); | |
| 166 | 103 |
| 167 return YES; | 104 return YES; |
| 168 } | 105 } |
| 169 | 106 |
| 170 @end | 107 @end |
| 171 #endif // TARGET_OS_IPHONE | |
| OLD | NEW |