Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(99)

Unified Diff: webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm

Issue 2154243002: iOS: Add support for rendering native CVPixelBuffers directly (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Make CVOpenGLESTextures local variables Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m ('k') | no next file » | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
index 7d7b416b888843e4028d5068de1362c57fc5f383..5c082f0fb8bc0f1871d91cf6305896eaba022d0c 100644
--- a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
@@ -11,15 +11,19 @@
#import "RTCOpenGLVideoRenderer.h"
#if TARGET_OS_IPHONE
+#import <CoreVideo/CVOpenGLESTextureCache.h>
#import <OpenGLES/ES3/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
-#include <string.h>
#include <memory>
+#include <string.h>
+#include <vector>
#import "WebRTC/RTCVideoFrame.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
// anticipation of that happening in the future.
@@ -33,6 +37,8 @@
#define FRAGMENT_SHADER_OUT
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
#define FRAGMENT_SHADER_TEXTURE "texture2D"
+
+typedef EAGLContext GlContextType;
#else
#define RTC_PIXEL_FORMAT GL_RED
#define SHADER_VERSION "#version 150\n"
@@ -42,8 +48,14 @@
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
#define FRAGMENT_SHADER_COLOR "fragColor"
#define FRAGMENT_SHADER_TEXTURE "texture"
+
+typedef NSOpenGLContext GlContextType;
#endif
+@protocol Shader
tkchin_webrtc 2016/08/02 00:17:18 nit: everything in ObjC must be prefixed withthe
+- (BOOL)drawFrame:(RTCVideoFrame*)frame;
tkchin_webrtc 2016/08/02 00:17:18 nit: (RTCVideoFrame *)
+@end
+
// Vertex shader doesn't do anything except pass coordinates through.
static const char kVertexShaderSource[] =
SHADER_VERSION
@@ -57,7 +69,7 @@ static const char kVertexShaderSource[] =
// Fragment shader converts YUV values from input textures into a final RGB
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
-static const char kFragmentShaderSource[] =
+static const char kI420FragmentShaderSource[] =
SHADER_VERSION
"precision highp float;"
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
@@ -90,6 +102,15 @@ GLuint CreateShader(GLenum type, const GLchar *source) {
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
+ GLint logLength = 0;
+ // The null termination character is included in the returned log length.
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
+ if (logLength > 0) {
+ std::unique_ptr<char[]> compileLog(new char[logLength]);
+ // The returned string is null terminated.
+ glGetShaderInfoLog(shader, logLength, NULL, compileLog.get());
+ LOG(LS_ERROR) << "Shader compile error: " << compileLog.get();
+ }
glDeleteShader(shader);
shader = 0;
}
@@ -118,6 +139,25 @@ GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
return program;
}
+// Creates and links a shader program with the given fragment shader source and
+// a plain vertex shader. Returns the program handle or 0 on error.
+GLuint CreateProgramFromFragmentSource(const char fragmentShaderSource[]) {
+ GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
+ RTC_CHECK(vertexShader) << "failed to create vertex shader";
+ GLuint fragmentShader =
+ CreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
+ RTC_CHECK(fragmentShader) << "failed to create fragment shader";
+ GLuint program = CreateProgram(vertexShader, fragmentShader);
+ // Shaders are created only to generate program.
+ if (vertexShader) {
+ glDeleteShader(vertexShader);
+ }
+ if (fragmentShader) {
+ glDeleteShader(fragmentShader);
+ }
+ return program;
+}
+
// When modelview and projection matrices are identity (default) the world is
// contained in the square around origin with unit size 2. Drawing to these
// coordinates is equivalent to drawing to the entire screen. The texture is
@@ -125,7 +165,7 @@ GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
// here because the incoming frame has origin in upper left hand corner but
// OpenGL expects origin in bottom left corner.
-const GLfloat gVertices[] = {
+static const GLfloat gVertices[] = {
// X, Y, U, V.
-1, -1, 0, 1, // Bottom left.
1, -1, 1, 1, // Bottom right.
@@ -133,6 +173,46 @@ const GLfloat gVertices[] = {
-1, 1, 0, 0, // Top left.
};
+// Set vertex shader variables 'position' and 'texcoord' in |program| to the
+// |gVertices| array above. It will use |vertexBuffer| and |vertexArray| to
+// store the vertex data.
+BOOL setupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* vertexArray) {
+ GLint position = glGetAttribLocation(program, "position");
+ GLint texcoord = glGetAttribLocation(program, "texcoord");
+ if (position < 0 || texcoord < 0) {
+ return NO;
+ }
+#if !TARGET_OS_IPHONE
+ glGenVertexArrays(1, vertexArray);
+ if (*vertexArray == 0) {
+ return NO;
+ }
+ glBindVertexArray(*vertexArray);
+#endif
+ glGenBuffers(1, vertexBuffer);
+ if (*vertexBuffer == 0) {
+ return NO;
+ }
+ glBindBuffer(GL_ARRAY_BUFFER, *vertexBuffer);
+ glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
+
+ // Read position attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the start of the array. The last argument indicates offset
+ // of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
+ (void*)0);
tkchin_webrtc 2016/08/02 00:17:18 nit: (void *)0 since we're in an ObjC++ file we ca
+ glEnableVertexAttribArray(position);
+
+ // Read texcoord attribute from |gVertices| with size of 2 and stride of 4
+ // beginning at the first texcoord in the array. The last argument indicates
+ // offset of data within |gVertices| as supplied to the vertex buffer.
+ glVertexAttribPointer(texcoord, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
+ (void*)(2 * sizeof(GLfloat)));
tkchin_webrtc 2016/08/02 00:17:19 ditto
+ glEnableVertexAttribArray(texcoord);
+
+ return YES;
+}
+
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
// of 3 textures are used here, one for each of the Y, U and V planes. Having
// two sets alleviates CPU blockage in the event that the GPU is asked to render
@@ -140,155 +220,68 @@ const GLfloat gVertices[] = {
static const GLsizei kNumTextureSets = 2;
static const GLsizei kNumTextures = 3 * kNumTextureSets;
-@implementation RTCOpenGLVideoRenderer {
-#if TARGET_OS_IPHONE
- EAGLContext *_context;
-#else
- NSOpenGLContext *_context;
-#endif
- BOOL _isInitialized;
+// Shader for non-native I420 frames.
+@interface I420Shader : NSObject<Shader>
tkchin_webrtc 2016/08/02 00:17:18 nit: style is NSObject <Shader> together with pref
+- (BOOL)drawFrame:(RTCVideoFrame*)frame;
tkchin_webrtc 2016/08/02 00:17:19 don't need to redeclare methods from protocol
+@end
+
+@implementation I420Shader {
tkchin_webrtc 2016/08/02 00:17:17 Can we place the various shaders in their own file
+ BOOL _hasUnpackRowLength;
GLint _currentTextureSet;
// Handles for OpenGL constructs.
GLuint _textures[kNumTextures];
- GLuint _program;
-#if !TARGET_OS_IPHONE
+ GLuint _i420Program;
GLuint _vertexArray;
-#endif
GLuint _vertexBuffer;
- GLint _position;
- GLint _texcoord;
GLint _ySampler;
GLint _uSampler;
GLint _vSampler;
// Used to create a non-padded plane for GPU upload when we receive padded
// frames.
- std::unique_ptr<uint8_t[]> _planeBuffer;
-}
-
-@synthesize lastDrawnFrame = _lastDrawnFrame;
-
-+ (void)initialize {
- // Disable dithering for performance.
- glDisable(GL_DITHER);
+ std::vector<uint8_t> _planeBuffer;
}
+- (instancetype)initWithContext:(GlContextType *)context {
+ if (self = [super init]) {
#if TARGET_OS_IPHONE
-- (instancetype)initWithContext:(EAGLContext *)context {
+ _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3);
#else
-- (instancetype)initWithContext:(NSOpenGLContext *)context {
+ _hasUnpackRowLength = YES;
#endif
- NSAssert(context != nil, @"context cannot be nil");
- if (self = [super init]) {
- _context = context;
- }
- return self;
-}
-
-- (BOOL)drawFrame:(RTCVideoFrame *)frame {
- if (!_isInitialized) {
- return NO;
- }
- if (_lastDrawnFrame == frame) {
- return NO;
- }
- [self ensureGLContext];
- glClear(GL_COLOR_BUFFER_BIT);
- if (frame) {
- if (![self updateTextureSizesForFrame:frame] ||
- ![self updateTextureDataForFrame:frame]) {
- return NO;
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+ if (![self setupI420Program] || ![self setupTextures] ||
+ !setupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)) {
+ self = nil;
}
-#if !TARGET_OS_IPHONE
- glBindVertexArray(_vertexArray);
-#endif
- glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
-#if !TARGET_OS_IPHONE
- [_context flushBuffer];
-#endif
- _lastDrawnFrame = frame;
- return YES;
+ return self;
}
-- (void)setupGL {
- if (_isInitialized) {
- return;
+- (void)dealloc {
+ if (_i420Program != 0) {
+ glDeleteProgram(_i420Program);
}
- [self ensureGLContext];
- if (![self setupProgram]) {
- return;
- }
- if (![self setupTextures]) {
- return;
- }
- if (![self setupVertices]) {
- return;
+ if (_textures[0] != 0) {
+ glDeleteTextures(kNumTextures, _textures);
}
- glUseProgram(_program);
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- _isInitialized = YES;
-}
-
-- (void)teardownGL {
- if (!_isInitialized) {
- return;
+ if (_vertexBuffer != 0) {
+ glDeleteBuffers(1, &_vertexBuffer);
}
- [self ensureGLContext];
- glDeleteProgram(_program);
- _program = 0;
- glDeleteTextures(kNumTextures, _textures);
- glDeleteBuffers(1, &_vertexBuffer);
- _vertexBuffer = 0;
-#if !TARGET_OS_IPHONE
- glDeleteVertexArrays(1, &_vertexArray);
-#endif
- _isInitialized = NO;
-}
-
-#pragma mark - Private
-
-- (void)ensureGLContext {
- NSAssert(_context, @"context shouldn't be nil");
-#if TARGET_OS_IPHONE
- if ([EAGLContext currentContext] != _context) {
- [EAGLContext setCurrentContext:_context];
- }
-#else
- if ([NSOpenGLContext currentContext] != _context) {
- [_context makeCurrentContext];
+ if (_vertexArray != 0) {
+ glDeleteVertexArrays(1, &_vertexArray);
}
-#endif
}
-- (BOOL)setupProgram {
- NSAssert(!_program, @"program already set up");
- GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
- NSAssert(vertexShader, @"failed to create vertex shader");
- GLuint fragmentShader =
- CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
- NSAssert(fragmentShader, @"failed to create fragment shader");
- _program = CreateProgram(vertexShader, fragmentShader);
- // Shaders are created only to generate program.
- if (vertexShader) {
- glDeleteShader(vertexShader);
- }
- if (fragmentShader) {
- glDeleteShader(fragmentShader);
- }
- if (!_program) {
- return NO;
- }
- _position = glGetAttribLocation(_program, "position");
- _texcoord = glGetAttribLocation(_program, "texcoord");
- _ySampler = glGetUniformLocation(_program, "s_textureY");
- _uSampler = glGetUniformLocation(_program, "s_textureU");
- _vSampler = glGetUniformLocation(_program, "s_textureV");
- if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
- _vSampler < 0) {
+- (BOOL)setupI420Program {
+ _i420Program = CreateProgramFromFragmentSource(kI420FragmentShaderSource);
+ if (!_i420Program) {
return NO;
}
- return YES;
+ _ySampler = glGetUniformLocation(_i420Program, "s_textureY");
+ _uSampler = glGetUniformLocation(_i420Program, "s_textureU");
+ _vSampler = glGetUniformLocation(_i420Program, "s_textureV");
+
+ return (_ySampler >= 0 && _uSampler >= 0 && _vSampler >= 0);
tkchin_webrtc 2016/08/02 00:17:19 these ivars are init-ed to zero by default after i
}
- (BOOL)setupTextures {
@@ -305,56 +298,17 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
return YES;
}
-- (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame {
- if (frame.height == _lastDrawnFrame.height &&
- frame.width == _lastDrawnFrame.width &&
- frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
- frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
- return YES;
- }
- GLsizei lumaWidth = static_cast<GLsizei>(frame.width);
- GLsizei lumaHeight = static_cast<GLsizei>(frame.height);
- GLsizei chromaWidth = static_cast<GLsizei>(frame.chromaWidth);
- GLsizei chromaHeight = static_cast<GLsizei>(frame.chromaHeight);
- for (GLint i = 0; i < kNumTextureSets; i++) {
- glActiveTexture(GL_TEXTURE0 + i * 3);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- RTC_PIXEL_FORMAT,
- lumaWidth,
- lumaHeight,
- 0,
- RTC_PIXEL_FORMAT,
- GL_UNSIGNED_BYTE,
- 0);
- glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- RTC_PIXEL_FORMAT,
- chromaWidth,
- chromaHeight,
- 0,
- RTC_PIXEL_FORMAT,
- GL_UNSIGNED_BYTE,
- 0);
- glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- RTC_PIXEL_FORMAT,
- chromaWidth,
- chromaHeight,
- 0,
- RTC_PIXEL_FORMAT,
- GL_UNSIGNED_BYTE,
- 0);
- }
- if ((NSUInteger)frame.yPitch != frame.width ||
- (NSUInteger)frame.uPitch != frame.chromaWidth ||
- (NSUInteger)frame.vPitch != frame.chromaWidth) {
- _planeBuffer.reset(new uint8_t[frame.width * frame.height]);
- } else {
- _planeBuffer.reset();
+- (BOOL)drawFrame:(RTCVideoFrame*)frame {
+ glUseProgram(_i420Program);
+ if (![self updateTextureDataForFrame:frame]) {
+ return NO;
}
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(_vertexArray);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+
return YES;
}
@@ -365,17 +319,14 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
height:(size_t)height
stride:(int32_t)stride {
glActiveTexture(static_cast<GLenum>(GL_TEXTURE0 + offset));
+ glBindTexture(GL_TEXTURE_2D, _textures[offset]);
+
// When setting texture sampler uniforms, the texture index is used not
// the texture handle.
glUniform1i(sampler, offset);
-#if TARGET_OS_IPHONE
- BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
-#else
- BOOL hasUnpackRowLength = YES;
-#endif
const uint8_t *uploadPlane = plane;
if ((size_t)stride != width) {
- if (hasUnpackRowLength) {
+ if (_hasUnpackRowLength) {
// GLES3 allows us to specify stride.
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexImage2D(GL_TEXTURE_2D,
@@ -392,7 +343,7 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
} else {
// Make an unpadded copy and upload that instead. Quick profiling showed
// that this is faster than uploading row by row using glTexSubImage2D.
- uint8_t *unpaddedPlane = _planeBuffer.get();
+ uint8_t *unpaddedPlane = _planeBuffer.data();
for (size_t y = 0; y < height; ++y) {
memcpy(unpaddedPlane + y * width, plane + y * stride, width);
}
@@ -414,6 +365,12 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
GLint textureOffset = _currentTextureSet * 3;
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
+ if (frame.yPitch != static_cast<int32_t>(frame.width) ||
+ frame.uPitch != static_cast<int32_t>(frame.chromaWidth) ||
+ frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) {
+ _planeBuffer.resize(frame.width * frame.height);
+ }
+
[self uploadPlane:frame.yPlane
sampler:_ySampler
offset:textureOffset
@@ -439,46 +396,246 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
return YES;
}
-- (BOOL)setupVertices {
-#if !TARGET_OS_IPHONE
- NSAssert(!_vertexArray, @"vertex array already set up");
- glGenVertexArrays(1, &_vertexArray);
- if (!_vertexArray) {
+@end
+
+// Native CVPixelBufferRef rendering is only supported on iPhone because it
+// depends on CVOpenGLESTextureCacheCreate.
+#if TARGET_OS_IPHONE
+static const char kNV12FragmentShaderSource[] =
+ SHADER_VERSION
+ "precision mediump float;"
+ FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
+ "uniform lowp sampler2D s_textureY;\n"
+ "uniform lowp sampler2D s_textureUV;\n"
+ FRAGMENT_SHADER_OUT
+ "void main() {\n"
+ " mediump float y;\n"
+ " mediump vec2 uv;\n"
+ " y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
+ " uv = " FRAGMENT_SHADER_TEXTURE "(s_textureUV, v_texcoord).ra -\n"
+ " vec2(0.5, 0.5);\n"
+ " " FRAGMENT_SHADER_COLOR " = vec4(y + 1.403 * uv.y,\n"
+ " y - 0.344 * uv.x - 0.714 * uv.y,\n"
+ " y + 1.770 * uv.x,\n"
+ " 1.0);\n"
+ " }\n";
+
+// Shader for native NV12 frames.
+@interface NativeNV12Shader : NSObject<Shader>
+- (BOOL)drawFrame:(RTCVideoFrame*)frame;
+@end
+
+@implementation NativeNV12Shader {
+ GLuint _vertexBuffer;
+ GLuint _nv12Program;
+ GLint _ySampler;
+ GLint _uvSampler;
+ CVOpenGLESTextureCacheRef _textureCache;
+}
+
+- (instancetype)initWithContext:(GlContextType *)context {
+ if (self = [super init]) {
+ if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] ||
+ !setupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) {
+ self = nil;
+ }
+ }
+ return self;
+}
+
+- (void)dealloc {
+ if (_nv12Program != 0) {
+ glDeleteProgram(_nv12Program);
+ }
+ if (_vertexBuffer != 0) {
+ glDeleteBuffers(1, &_vertexBuffer);
+ }
+ if (_textureCache) {
+ CFRelease(_textureCache);
tkchin_webrtc 2016/08/02 00:17:18 nit: assign nullptr after release
+ }
+}
+
+- (BOOL)setupNV12Program {
+ _nv12Program = CreateProgramFromFragmentSource(kNV12FragmentShaderSource);
+ if (!_nv12Program) {
return NO;
}
- glBindVertexArray(_vertexArray);
-#endif
- NSAssert(!_vertexBuffer, @"vertex buffer already set up");
- glGenBuffers(1, &_vertexBuffer);
- if (!_vertexBuffer) {
-#if !TARGET_OS_IPHONE
- glDeleteVertexArrays(1, &_vertexArray);
- _vertexArray = 0;
+ _ySampler = glGetUniformLocation(_nv12Program, "s_textureY");
+ _uvSampler = glGetUniformLocation(_nv12Program, "s_textureUV");
+
+ return (_ySampler >= 0 && _uvSampler >= 0);
+}
+
+- (BOOL)setupTextureCacheWithContext:(GlContextType *)context {
+ CVReturn err = CVOpenGLESTextureCacheCreate(
+ kCFAllocatorDefault, NULL,
+#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
+ context,
+#else
+ (__bridge void*)context,
tkchin_webrtc 2016/08/02 00:17:17 nit: void *)
#endif
+ NULL, &_textureCache);
+ return !err;
tkchin_webrtc 2016/08/02 00:17:17 err == noErr or CVReturnTrue ? I remember there wa
+}
+
+- (BOOL)drawFrame:(RTCVideoFrame*)frame {
tkchin_webrtc 2016/08/02 00:17:17 nit: RTCVideoFrame *)
+ CVPixelBufferRef pixelBuffer = frame.nativeHandle;
+ RTC_CHECK(pixelBuffer);
+ glUseProgram(_nv12Program);
+ const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ RTC_CHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
+ pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
+ << "Unsupported native pixel format: " << pixelFormat;
+
+ // Y-plane.
+ const int luma_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
tkchin_webrtc 2016/08/02 00:17:18 nit: camel case instead of underscore for ObjC fun
+ const int luma_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
+
+ CVOpenGLESTextureRef lumaTexture;
tkchin_webrtc 2016/08/02 00:17:18 nit: init to nullptr
+ glActiveTexture(GL_TEXTURE0);
+ glUniform1i(_ySampler, 0);
+ CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
+ RTC_PIXEL_FORMAT, luma_width, luma_height, RTC_PIXEL_FORMAT,
+ GL_UNSIGNED_BYTE, 0, &lumaTexture);
+ if (err) {
+ CFRelease(lumaTexture);
+ return NO;
+ }
+
+ RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D),
+ CVOpenGLESTextureGetTarget(lumaTexture));
+ glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(lumaTexture));
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+ // UV-plane.
+ const int chroma_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
tkchin_webrtc 2016/08/02 00:17:19 ditto chromeWidth / chromaHeight
+ const int chroma_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
+
+ CVOpenGLESTextureRef chromaTexture;
+ glActiveTexture(GL_TEXTURE1);
+ glUniform1i(_uvSampler, 1);
+ err = CVOpenGLESTextureCacheCreateTextureFromImage(
+ kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
+ GL_LUMINANCE_ALPHA, chroma_width, chroma_height, GL_LUMINANCE_ALPHA,
+ GL_UNSIGNED_BYTE, 1, &chromaTexture);
+ if (err) {
+ CFRelease(chromaTexture);
+ CFRelease(lumaTexture);
return NO;
}
+
+ RTC_CHECK_EQ(static_cast<GLenum>(GL_TEXTURE_2D),
+ CVOpenGLESTextureGetTarget(chromaTexture));
+ glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chromaTexture));
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
- glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
- // Read position attribute from |gVertices| with size of 2 and stride of 4
- // beginning at the start of the array. The last argument indicates offset
- // of data within |gVertices| as supplied to the vertex buffer.
- glVertexAttribPointer(
- _position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
- glEnableVertexAttribArray(_position);
+ CFRelease(chromaTexture);
+ CFRelease(lumaTexture);
- // Read texcoord attribute from |gVertices| with size of 2 and stride of 4
- // beginning at the first texcoord in the array. The last argument indicates
- // offset of data within |gVertices| as supplied to the vertex buffer.
- glVertexAttribPointer(_texcoord,
- 2,
- GL_FLOAT,
- GL_FALSE,
- 4 * sizeof(GLfloat),
- (void *)(2 * sizeof(GLfloat)));
- glEnableVertexAttribArray(_texcoord);
+ return YES;
+}
+
+@end
+#endif // TARGET_OS_IPHONE
+
+@implementation RTCOpenGLVideoRenderer {
+ GlContextType* _context;
tkchin_webrtc 2016/08/02 00:17:19 nit: GLContextType *_context
+ BOOL _isInitialized;
+ NSObject<Shader>* _i420Shader;
tkchin_webrtc 2016/08/02 00:17:17 typically we declare id instead of saying that it
+ NSObject<Shader>* _nv12Shader;
+}
+
+@synthesize lastDrawnFrame = _lastDrawnFrame;
+
++ (void)initialize {
+ // Disable dithering for performance.
+ glDisable(GL_DITHER);
+}
+
+- (instancetype)initWithContext:(GlContextType *)context {
+ NSAssert(context != nil, @"context cannot be nil");
+ if (self = [super init]) {
+ _context = context;
+ }
+ return self;
+}
+
+- (BOOL)drawFrame:(RTCVideoFrame*)frame {
+ if (!_isInitialized || !frame || frame == _lastDrawnFrame) {
+ return NO;
+ }
+ [self ensureGLContext];
+ glClear(GL_COLOR_BUFFER_BIT);
+ NSObject<Shader>* shader;
tkchin_webrtc 2016/08/02 00:17:19 id<RTCShader> shader = nil;
+#if TARGET_OS_IPHONE
+ if (frame.nativeHandle) {
+ if (!_nv12Shader) {
+ _nv12Shader = [[NativeNV12Shader alloc] initWithContext:_context];
+ }
+ shader = _nv12Shader;
+#else
+ // Rendering native CVPixelBuffer is not supported on OS X.
+ if (false) {
+#endif
+ } else {
+ if (!_i420Shader) {
+ _i420Shader = [[I420Shader alloc] initWithContext:_context];
+ }
+ shader = _i420Shader;
+ }
+ if (!shader || ![shader drawFrame:frame]) {
+ return NO;
+ }
+
+#if !TARGET_OS_IPHONE
+ [_context flushBuffer];
+#endif
+ _lastDrawnFrame = frame;
return YES;
}
+- (void)setupGL {
+ if (_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ _isInitialized = YES;
+}
+
+- (void)teardownGL {
+ if (!_isInitialized) {
+ return;
+ }
+ [self ensureGLContext];
+ _i420Shader = nil;
+ _nv12Shader = nil;
+ _isInitialized = NO;
+}
+
+#pragma mark - Private
+
+- (void)ensureGLContext {
+ NSAssert(_context, @"context shouldn't be nil");
+#if TARGET_OS_IPHONE
+ if ([EAGLContext currentContext] != _context) {
+ [EAGLContext setCurrentContext:_context];
+ }
+#else
+ if ([NSOpenGLContext currentContext] != _context) {
+ [_context makeCurrentContext];
+ }
+#endif
+}
+
@end
« no previous file with comments | « webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698