| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #import "RTCShader.h" | 11 #import "RTCShader.h" |
| 12 | 12 |
| 13 #include <vector> | 13 #include <vector> |
| 14 | 14 |
| 15 #import "RTCShader+Private.h" | 15 #import "RTCShader+Private.h" |
| 16 #import "WebRTC/RTCVideoFrame.h" | 16 #import "WebRTC/RTCVideoFrame.h" |
| 17 | 17 |
| 18 #include "webrtc/api/video/video_rotation.h" | |
| 19 #include "webrtc/base/optional.h" | 18 #include "webrtc/base/optional.h" |
| 20 | 19 |
| 21 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets | 20 // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets |
| 22 // of 3 textures are used here, one for each of the Y, U and V planes. Having | 21 // of 3 textures are used here, one for each of the Y, U and V planes. Having |
| 23 // two sets alleviates CPU blockage in the event that the GPU is asked to render | 22 // two sets alleviates CPU blockage in the event that the GPU is asked to render |
| 24 // to a texture that is already in use. | 23 // to a texture that is already in use. |
| 25 static const GLsizei kNumTextureSets = 2; | 24 static const GLsizei kNumTextureSets = 2; |
| 26 static const GLsizei kNumTexturesPerSet = 3; | 25 static const GLsizei kNumTexturesPerSet = 3; |
| 27 static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; | 26 static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; |
| 28 | 27 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 55 // Handles for OpenGL constructs. | 54 // Handles for OpenGL constructs. |
| 56 GLuint _textures[kNumTextures]; | 55 GLuint _textures[kNumTextures]; |
| 57 GLuint _i420Program; | 56 GLuint _i420Program; |
| 58 GLuint _vertexArray; | 57 GLuint _vertexArray; |
| 59 GLuint _vertexBuffer; | 58 GLuint _vertexBuffer; |
| 60 GLint _ySampler; | 59 GLint _ySampler; |
| 61 GLint _uSampler; | 60 GLint _uSampler; |
| 62 GLint _vSampler; | 61 GLint _vSampler; |
| 63 // Store current rotation and only upload new vertex data when rotation | 62 // Store current rotation and only upload new vertex data when rotation |
| 64 // changes. | 63 // changes. |
| 65 rtc::Optional<webrtc::VideoRotation> _currentRotation; | 64 rtc::Optional<RTCVideoRotation> _currentRotation; |
| 66 // Used to create a non-padded plane for GPU upload when we receive padded | 65 // Used to create a non-padded plane for GPU upload when we receive padded |
| 67 // frames. | 66 // frames. |
| 68 std::vector<uint8_t> _planeBuffer; | 67 std::vector<uint8_t> _planeBuffer; |
| 69 } | 68 } |
| 70 | 69 |
| 71 - (instancetype)initWithContext:(GlContextType *)context { | 70 - (instancetype)initWithContext:(GlContextType *)context { |
| 72 if (self = [super init]) { | 71 if (self = [super init]) { |
| 73 #if TARGET_OS_IPHONE | 72 #if TARGET_OS_IPHONE |
| 74 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); | 73 _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); |
| 75 #else | 74 #else |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 119 - (BOOL)drawFrame:(RTCVideoFrame*)frame { | 118 - (BOOL)drawFrame:(RTCVideoFrame*)frame { |
| 120 glUseProgram(_i420Program); | 119 glUseProgram(_i420Program); |
| 121 if (![self updateTextureDataForFrame:frame]) { | 120 if (![self updateTextureDataForFrame:frame]) { |
| 122 return NO; | 121 return NO; |
| 123 } | 122 } |
| 124 #if !TARGET_OS_IPHONE | 123 #if !TARGET_OS_IPHONE |
| 125 glBindVertexArray(_vertexArray); | 124 glBindVertexArray(_vertexArray); |
| 126 #endif | 125 #endif |
| 127 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); | 126 glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); |
| 128 if (!_currentRotation || frame.rotation != *_currentRotation) { | 127 if (!_currentRotation || frame.rotation != *_currentRotation) { |
| 129 _currentRotation = rtc::Optional<webrtc::VideoRotation>( | 128 _currentRotation = rtc::Optional<RTCVideoRotation>(frame.rotation); |
| 130 static_cast<webrtc::VideoRotation>(frame.rotation)); | |
| 131 RTCSetVertexData(*_currentRotation); | 129 RTCSetVertexData(*_currentRotation); |
| 132 } | 130 } |
| 133 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); | 131 glDrawArrays(GL_TRIANGLE_FAN, 0, 4); |
| 134 | 132 |
| 135 return YES; | 133 return YES; |
| 136 } | 134 } |
| 137 | 135 |
| 138 - (void)uploadPlane:(const uint8_t *)plane | 136 - (void)uploadPlane:(const uint8_t *)plane |
| 139 sampler:(GLint)sampler | 137 sampler:(GLint)sampler |
| 140 offset:(GLint)offset | 138 offset:(GLint)offset |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 181 0, | 179 0, |
| 182 RTC_PIXEL_FORMAT, | 180 RTC_PIXEL_FORMAT, |
| 183 GL_UNSIGNED_BYTE, | 181 GL_UNSIGNED_BYTE, |
| 184 uploadPlane); | 182 uploadPlane); |
| 185 } | 183 } |
| 186 | 184 |
| 187 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { | 185 - (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame { |
| 188 GLint textureOffset = _currentTextureSet * 3; | 186 GLint textureOffset = _currentTextureSet * 3; |
| 189 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); | 187 NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); |
| 190 | 188 |
| 191 if (frame.yPitch != static_cast<int32_t>(frame.width) || | 189 const int chromaWidth = (frame.width + 1) / 2; |
| 192 frame.uPitch != static_cast<int32_t>(frame.chromaWidth) || | 190 const int chromaHeight = (frame.height + 1) / 2; |
| 193 frame.vPitch != static_cast<int32_t>(frame.chromaWidth)) { | 191 if (frame.strideY != frame.width || |
| 192 frame.strideU != chromaWidth || |
| 193 frame.strideV != chromaWidth) { |
| 194 _planeBuffer.resize(frame.width * frame.height); | 194 _planeBuffer.resize(frame.width * frame.height); |
| 195 } | 195 } |
| 196 | 196 |
| 197 [self uploadPlane:frame.yPlane | 197 [self uploadPlane:frame.dataY |
| 198 sampler:_ySampler | 198 sampler:_ySampler |
| 199 offset:textureOffset | 199 offset:textureOffset |
| 200 width:frame.width | 200 width:frame.width |
| 201 height:frame.height | 201 height:frame.height |
| 202 stride:frame.yPitch]; | 202 stride:frame.strideY]; |
| 203 | 203 |
| 204 [self uploadPlane:frame.uPlane | 204 [self uploadPlane:frame.dataU |
| 205 sampler:_uSampler | 205 sampler:_uSampler |
| 206 offset:textureOffset + 1 | 206 offset:textureOffset + 1 |
| 207 width:frame.chromaWidth | 207 width:chromaWidth |
| 208 height:frame.chromaHeight | 208 height:chromaHeight |
| 209 stride:frame.uPitch]; | 209 stride:frame.strideU]; |
| 210 | 210 |
| 211 [self uploadPlane:frame.vPlane | 211 [self uploadPlane:frame.dataV |
| 212 sampler:_vSampler | 212 sampler:_vSampler |
| 213 offset:textureOffset + 2 | 213 offset:textureOffset + 2 |
| 214 width:frame.chromaWidth | 214 width:chromaWidth |
| 215 height:frame.chromaHeight | 215 height:chromaHeight |
| 216 stride:frame.vPitch]; | 216 stride:frame.strideV]; |
| 217 | 217 |
| 218 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; | 218 _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; |
| 219 return YES; | 219 return YES; |
| 220 } | 220 } |
| 221 | 221 |
| 222 @end | 222 @end |
| OLD | NEW |