Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm

Issue 2784243003: iOS/MacOS:Refactor metal rendering by extracting common implementation (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #import "RTCMTLNV12Renderer.h" 11 #import "RTCMTLRenderer+Private.h"
12 12
13 #import <Metal/Metal.h> 13 #import <Metal/Metal.h>
14 #import <MetalKit/MetalKit.h> 14 #import <MetalKit/MetalKit.h>
15 15
16 #import "WebRTC/RTCLogging.h" 16 #import "WebRTC/RTCLogging.h"
17 #import "WebRTC/RTCVideoFrame.h" 17 #import "WebRTC/RTCVideoFrame.h"
18 18
19 #include "webrtc/api/video/video_rotation.h" 19 #include "webrtc/api/video/video_rotation.h"
20 20 #include "webrtc/base/checks.h"
21 #define MTL_STRINGIFY(s) @ #s
22 21
23 // As defined in shaderSource. 22 // As defined in shaderSource.
24 static NSString *const vertexFunctionName = @"vertexPassthrough"; 23 static NSString *const vertexFunctionName = @"vertexPassthrough";
25 static NSString *const fragmentFunctionName = @"fragmentColorConversion"; 24 static NSString *const fragmentFunctionName = @"fragmentColorConversion";
26 25
27 static NSString *const pipelineDescriptorLabel = @"RTCPipeline"; 26 static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
28 static NSString *const commandBufferLabel = @"RTCCommandBuffer"; 27 static NSString *const commandBufferLabel = @"RTCCommandBuffer";
29 static NSString *const renderEncoderLabel = @"RTCEncoder"; 28 static NSString *const renderEncoderLabel = @"RTCEncoder";
30 static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame"; 29 static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
31 30
(...skipping 17 matching lines...) Expand all
49 case webrtc::kVideoRotation_90: 48 case webrtc::kVideoRotation_90:
50 return 16; 49 return 16;
51 case webrtc::kVideoRotation_180: 50 case webrtc::kVideoRotation_180:
52 return 32; 51 return 32;
53 case webrtc::kVideoRotation_270: 52 case webrtc::kVideoRotation_270:
54 return 48; 53 return 48;
55 } 54 }
56 return 0; 55 return 0;
57 } 56 }
58 57
59 static NSString *const shaderSource = MTL_STRINGIFY(
60 using namespace metal; typedef struct {
61 packed_float2 position;
62 packed_float2 texcoord;
63 } Vertex;
64
65 typedef struct {
66 float4 position[[position]];
67 float2 texcoord;
68 } Varyings;
69
70 vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
71 unsigned int vid[[vertex_id]]) {
72 Varyings out;
73 device Vertex &v = verticies[vid];
74 out.position = float4(float2(v.position), 0.0, 1.0);
75 out.texcoord = v.texcoord;
76
77 return out;
78 }
79
80 // Receiving YCrCb textures.
81 fragment half4 fragmentColorConversion(
82 Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[text ure(0)]],
83 texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
84 constexpr sampler s(address::clamp_to_edge, filter::linear);
85 float y;
86 float2 uv;
87 y = textureY.sample(s, in.texcoord).r;
88 uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
89
90 // Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
91 float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
92
93 return half4(out);
94 });
95
96 // The max number of command buffers in flight (submitted to GPU). 58 // The max number of command buffers in flight (submitted to GPU).
97 // For now setting it up to 1. 59 // For now setting it up to 1.
98 // In future we might use triple buffering method if it improves performance. 60 // In future we might use triple buffering method if it improves performance.
99 static const NSInteger kMaxInflightBuffers = 1; 61 static const NSInteger kMaxInflightBuffers = 1;
100 62
101 @implementation RTCMTLNV12Renderer { 63 @implementation RTCMTLRenderer {
102 __kindof MTKView *_view; 64 __kindof MTKView *_view;
103 65
104 // Controller. 66 // Controller.
105 dispatch_semaphore_t _inflight_semaphore; 67 dispatch_semaphore_t _inflight_semaphore;
106 68
107 // Renderer. 69 // Renderer.
108 id<MTLDevice> _device; 70 id<MTLDevice> _device;
109 id<MTLCommandQueue> _commandQueue; 71 id<MTLCommandQueue> _commandQueue;
110 id<MTLLibrary> _defaultLibrary; 72 id<MTLLibrary> _defaultLibrary;
111 id<MTLRenderPipelineState> _pipelineState; 73 id<MTLRenderPipelineState> _pipelineState;
112 74
113 // Textures.
114 CVMetalTextureCacheRef _textureCache;
115 id<MTLTexture> _yTexture;
116 id<MTLTexture> _CrCbTexture;
117
118 // Buffers. 75 // Buffers.
119 id<MTLBuffer> _vertexBuffer; 76 id<MTLBuffer> _vertexBuffer;
120 77
121 // RTC Frame parameters. 78 // RTC Frame parameters.
122 int _offset; 79 int _offset;
123 } 80 }
124 81
125 - (instancetype)init { 82 - (instancetype)init {
126 if (self = [super init]) { 83 if (self = [super init]) {
127 // _offset of 0 is equal to rotation of 0. 84 // _offset of 0 is equal to rotation of 0.
128 _offset = 0; 85 _offset = 0;
129 _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers); 86 _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
130 } 87 }
131 88
132 return self; 89 return self;
133 } 90 }
134 91
135 - (BOOL)addRenderingDestination:(__kindof MTKView *)view { 92 - (BOOL)addRenderingDestination:(__kindof MTKView *)view {
136 return [self setupWithView:view]; 93 return [self setupWithView:view];
137 } 94 }
138 95
139 #pragma mark - Private 96 #pragma mark - Private
140 97
141 - (BOOL)setupWithView:(__kindof MTKView *)view { 98 - (BOOL)setupWithView:(__kindof MTKView *)view {
142 BOOL success = NO; 99 BOOL success = NO;
143 if ([self setupMetal]) { 100 if ([self setupMetal]) {
144 [self setupView:view]; 101 [self setupView:view];
145 [self loadAssets]; 102 [self loadAssets];
146 [self setupBuffers]; 103 [self setupBuffers];
147 [self initializeTextureCache];
148 success = YES; 104 success = YES;
149 } 105 }
150 return success; 106 return success;
151 } 107 }
108 #pragma mark - Inheritance
109
110 - (id<MTLDevice>)currentMetalDevice {
111 return _device;
112 }
113
114 - (NSString *)shaderSource {
115 RTC_CHECK(0) << "Virtual method not implemented in subclass.";
magjed_webrtc 2017/04/04 08:07:36 nit: Use RTC_NOTREACHED instead.
daniela-webrtc 2017/04/05 11:49:31 Done.
116 return nil;
117 }
118
119 - (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
120 RTC_CHECK(0) << "Virtual method not implemented in subclass.";
121 }
122
123 - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
124 _offset = offsetForRotation((webrtc::VideoRotation)frame.rotation);
magjed_webrtc 2017/04/04 08:07:36 Use static_cast<webrtc::VideoRotation> instead. Or
daniela-webrtc 2017/04/05 11:49:31 Actually I'll use RTCVideoRotation directly instea
125 return YES;
126 }
152 127
153 #pragma mark - GPU methods 128 #pragma mark - GPU methods
154 129
155 - (BOOL)setupMetal { 130 - (BOOL)setupMetal {
156 // Set the view to use the default device. 131 // Set the view to use the default device.
157 _device = MTLCreateSystemDefaultDevice(); 132 _device = MTLCreateSystemDefaultDevice();
158 if (!_device) { 133 if (!_device) {
159 return NO; 134 return NO;
160 } 135 }
161 136
162 // Create a new command queue. 137 // Create a new command queue.
163 _commandQueue = [_device newCommandQueue]; 138 _commandQueue = [_device newCommandQueue];
164 139
165 // Load metal library from source. 140 // Load metal library from source.
166 NSError *libraryError = nil; 141 NSError *libraryError = nil;
167 142
168 id<MTLLibrary> sourceLibrary = 143 id<MTLLibrary> sourceLibrary =
169 [_device newLibraryWithSource:shaderSource options:NULL error:&libraryErro r]; 144 [_device newLibraryWithSource:[self shaderSource] options:NULL error:&libr aryError];
170 145
171 if (libraryError) { 146 if (libraryError) {
172 RTCLogError(@"Metal: Library with source failed\n%@", libraryError); 147 RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
173 return NO; 148 return NO;
174 } 149 }
175 150
176 if (!sourceLibrary) { 151 if (!sourceLibrary) {
177 RTCLogError(@"Metal: Failed to load library. %@", libraryError); 152 RTCLogError(@"Metal: Failed to load library. %@", libraryError);
178 return NO; 153 return NO;
179 } 154 }
180 _defaultLibrary = sourceLibrary; 155 _defaultLibrary = sourceLibrary;
181 156
182 return YES; 157 return YES;
183 } 158 }
184 159
185 - (void)setupView:(__kindof MTKView *)view { 160 - (void)setupView:(__kindof MTKView *)view {
186 view.device = _device; 161 view.device = _device;
187 162
188 view.preferredFramesPerSecond = 30; 163 view.preferredFramesPerSecond = 30;
189 view.autoResizeDrawable = NO; 164 view.autoResizeDrawable = NO;
190 165
191 // We need to keep reference to the view as it's needed down the rendering pip eline. 166 // We need to keep reference to the view as it's needed down the rendering pip eline.
192 _view = view; 167 _view = view;
193 } 168 }
194 169
195 - (void)loadAssets { 170 - (void)loadAssets {
196 id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFu nctionName]; 171 id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFu nctionName];
197 id<MTLFunction> fragmentFunction = 172 id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragme ntFunctionName];
198 [_defaultLibrary newFunctionWithName:fragmentFunctionName];
199 173
200 MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescripto r alloc] init]; 174 MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescripto r alloc] init];
201 pipelineDescriptor.label = pipelineDescriptorLabel; 175 pipelineDescriptor.label = pipelineDescriptorLabel;
202 pipelineDescriptor.vertexFunction = vertexFunction; 176 pipelineDescriptor.vertexFunction = vertexFunction;
203 pipelineDescriptor.fragmentFunction = fragmentFunction; 177 pipelineDescriptor.fragmentFunction = fragmentFunction;
204 pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat; 178 pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
205 pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid; 179 pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
206 NSError *error = nil; 180 NSError *error = nil;
207 _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescrip tor error:&error]; 181 _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescrip tor error:&error];
208 182
209 if (!_pipelineState) { 183 if (!_pipelineState) {
210 RTCLogError(@"Metal: Failed to create pipeline state. %@", error); 184 RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
211 } 185 }
212 } 186 }
213 187
214 - (void)setupBuffers { 188 - (void)setupBuffers {
215 _vertexBuffer = [_device newBufferWithBytes:cubeVertexData 189 _vertexBuffer = [_device newBufferWithBytes:cubeVertexData
216 length:sizeof(cubeVertexData) 190 length:sizeof(cubeVertexData)
217 options:MTLResourceOptionCPUCacheModeDefau lt]; 191 options:MTLResourceOptionCPUCacheModeDefau lt];
218 } 192 }
219 193
220 - (void)initializeTextureCache {
221 CVReturn status =
222 CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, _device, nil, &_textur eCache);
223 if (status != kCVReturnSuccess) {
224 RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
225 }
226 }
227
228 - (void)render { 194 - (void)render {
229 // Wait until the inflight (curently sent to GPU) command buffer 195 // Wait until the inflight (curently sent to GPU) command buffer
230 // has completed the GPU work. 196 // has completed the GPU work.
231 dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER); 197 dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
232 198
233 id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer]; 199 id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
234 commandBuffer.label = commandBufferLabel; 200 commandBuffer.label = commandBufferLabel;
235 201
236 __block dispatch_semaphore_t block_semaphore = _inflight_semaphore; 202 __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
237 [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) { 203 [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
238 // GPU work completed. 204 // GPU work completed.
239 dispatch_semaphore_signal(block_semaphore); 205 dispatch_semaphore_signal(block_semaphore);
240 }]; 206 }];
241 207
242 MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescrip tor; 208 MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescrip tor;
243 if (renderPassDescriptor) { // Valid drawable. 209 if (renderPassDescriptor) { // Valid drawable.
244 id<MTLRenderCommandEncoder> renderEncoder = 210 id<MTLRenderCommandEncoder> renderEncoder =
245 [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; 211 [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
246 renderEncoder.label = renderEncoderLabel; 212 renderEncoder.label = renderEncoderLabel;
247 213
248 // Set context state. 214 // Set context state.
249 [renderEncoder pushDebugGroup:renderEncoderDebugGroup]; 215 [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
250 [renderEncoder setRenderPipelineState:_pipelineState]; 216 [renderEncoder setRenderPipelineState:_pipelineState];
251 [renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0]; 217 [renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
252 [renderEncoder setFragmentTexture:_yTexture atIndex:0]; 218 [self uploadTexturesToRenderEncoder:renderEncoder];
253 [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
254 219
255 [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip 220 [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
256 vertexStart:0 221 vertexStart:0
257 vertexCount:4 222 vertexCount:4
258 instanceCount:1]; 223 instanceCount:1];
259 [renderEncoder popDebugGroup]; 224 [renderEncoder popDebugGroup];
260 [renderEncoder endEncoding]; 225 [renderEncoder endEncoding];
261 226
262 [commandBuffer presentDrawable:_view.currentDrawable]; 227 [commandBuffer presentDrawable:_view.currentDrawable];
263 } 228 }
264 229
265 // CPU work is completed, GPU work can be started. 230 // CPU work is completed, GPU work can be started.
266 [commandBuffer commit]; 231 [commandBuffer commit];
267 } 232 }
268 233
269 #pragma mark - RTCMTLRenderer 234 #pragma mark - RTCMTLRenderer
270 235
271 - (void)drawFrame:(RTCVideoFrame *)frame { 236 - (void)drawFrame:(RTCVideoFrame *)frame {
272 @autoreleasepool { 237 @autoreleasepool {
273 if ([self setupTexturesForFrame:frame]) 238 if ([self setupTexturesForFrame:frame]) {
274 [self render]; 239 [self render];
240 }
275 } 241 }
276 } 242 }
277 243
278 - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
279 CVPixelBufferRef pixelBuffer = frame.nativeHandle;
280
281 id<MTLTexture> lumaTexture = nil;
282 id<MTLTexture> chromaTexture = nil;
283 CVMetalTextureRef outTexture = nullptr;
284
285 // Luma (y) texture.
286 int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
287 int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
288
289 int indexPlane = 0;
290 CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
291 kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unor m, lumaWidth,
292 lumaHeight, indexPlane, &outTexture);
293
294 if (result == kCVReturnSuccess) {
295 lumaTexture = CVMetalTextureGetTexture(outTexture);
296 }
297
298 // Same as CFRelease except it can be passed NULL without crashing.
299 CVBufferRelease(outTexture);
300 outTexture = nullptr;
301
302 // Chroma (CrCb) texture.
303 indexPlane = 1;
304 result = CVMetalTextureCacheCreateTextureFromImage(
305 kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Uno rm, lumaWidth / 2,
306 lumaHeight / 2, indexPlane, &outTexture);
307 if (result == kCVReturnSuccess) {
308 chromaTexture = CVMetalTextureGetTexture(outTexture);
309 }
310 CVBufferRelease(outTexture);
311
312 if (lumaTexture != nil && chromaTexture != nil) {
313 _yTexture = lumaTexture;
314 _CrCbTexture = chromaTexture;
315 _offset = offsetForRotation((webrtc::VideoRotation)frame.rotation);
316 return YES;
317 }
318 return NO;
319 }
320
321 @end 244 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698