OLD | NEW |
---|---|
(Empty) | |
1 /* | |
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCMTLNV12Renderer.h" | |
12 | |
13 #import <Metal/Metal.h> | |
14 #import <MetalKit/MetalKit.h> | |
15 | |
16 #import <WebRTC/RTCMTLVideoView.h> | |
17 #import "WebRTC/RTCLogging.h" | |
18 #import "WebRTC/RTCVideoFrame.h" | |
19 | |
20 #include "webrtc/api/video/video_rotation.h" | |
21 | |
22 #define MTL_STRINGIFY(s) @ #s | |
23 | |
24 // As defined in shaderSource. | |
25 static NSString *const vertexFunctionName = @"vertexPassthrough"; | |
26 static NSString *const fragmentFunctionName = @"fragmentColorConversion"; | |
27 | |
28 static NSString *const pipelineDescriptorLabel = @"RTCPipeline"; | |
29 static NSString *const commandBufferLabel = @"RTCCommandBuffer"; | |
30 static NSString *const renderEncoderLabel = @"RTCEncoder"; | |
31 static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame"; | |
32 | |
33 static const float cubeVertexData[64] = { | |
34 -1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1. 0, 0.0, | |
35 | |
36 // rotation = 90, offset = 16. | |
37 -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0. 0, 0.0, | |
38 | |
39 // rotation = 180, offset = 32. | |
40 -1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0. 0, 1.0, | |
41 | |
42 // rotation = 270, offset = 48. | |
43 -1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1. 0, 1.0, | |
44 }; | |
45 | |
46 static inline int offsetForRotation(webrtc::VideoRotation rotation) { | |
47 switch (rotation) { | |
48 case webrtc::kVideoRotation_0: | |
49 return 0; | |
50 case webrtc::kVideoRotation_90: | |
51 return 16; | |
52 case webrtc::kVideoRotation_180: | |
53 return 32; | |
54 case webrtc::kVideoRotation_270: | |
55 return 48; | |
56 } | |
57 return 0; | |
58 } | |
59 | |
60 static NSString *const shaderSource = MTL_STRINGIFY( | |
61 using namespace metal; typedef struct { | |
62 packed_float2 position; | |
63 packed_float2 texcoord; | |
64 } Vertex; | |
65 | |
66 typedef struct { | |
67 float4 position[[position]]; | |
68 float2 texcoord; | |
69 } Varyings; | |
70 | |
71 vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]], | |
72 unsigned int vid[[vertex_id]]) { | |
73 Varyings out; | |
74 device Vertex &v = verticies[vid]; | |
75 out.position = float4(float2(v.position), 0.0, 1.0); | |
76 out.texcoord = v.texcoord; | |
77 | |
78 return out; | |
79 } | |
80 | |
81 fragment half4 fragmentColorConversion( | |
82 Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[text ure(0)]], | |
83 texture2d<float, access::sample> textureCbCr[[texture(1)]]) { | |
84 constexpr sampler s(address::clamp_to_edge, filter::linear); | |
85 float y; | |
86 float2 uv; | |
87 y = textureY.sample(s, in.texcoord).r; | |
88 uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5); | |
89 | |
90 float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0); | |
tkchin_webrtc
2017/02/22 00:22:27
nit: I'd doco source of these constants and which
daniela-webrtc
2017/02/22 14:50:22
Done.
| |
91 | |
92 return half4(out); | |
93 }); | |
94 | |
95 // The max number of command buffers in flight. | |
96 // For now setting it up to 1. | |
97 // In future we might use triple buffering method if it improves performance. | |
98 | |
99 static const NSInteger kMaxInflightBuffers = 1; | |
100 | |
101 @interface RTCNV12Renderer () | |
tkchin_webrtc
2017/02/22 00:22:27
doesn't seem to be a need for this decl
daniela-webrtc
2017/02/22 14:50:22
Done.
| |
102 @end | |
103 | |
104 @implementation RTCNV12Renderer { | |
105 __kindof MTKView *_view; | |
106 | |
107 // Controller. | |
108 dispatch_semaphore_t _inflight_semaphore; | |
109 | |
110 // Renderer. | |
111 id<MTLDevice> _device; | |
112 id<MTLCommandQueue> _commandQueue; | |
113 id<MTLLibrary> _defaultLibrary; | |
114 id<MTLRenderPipelineState> _pipelineState; | |
115 | |
116 // Textures. | |
117 CVMetalTextureCacheRef _textureCache; | |
118 id<MTLTexture> _yTexture; | |
119 id<MTLTexture> _CrCbTexture; | |
120 | |
121 // Buffers. | |
122 id<MTLBuffer> _vertexBuffer; | |
123 | |
124 // RTC Frame parameters. | |
125 int offset; | |
tkchin_webrtc
2017/02/22 00:22:27
_offset
| |
126 } | |
127 | |
128 - (instancetype)init { | |
129 if (self = [super init]) { | |
130 // Offset of 0 is equal to rotation of 0. | |
131 offset = 0; | |
132 _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers); | |
133 } | |
134 | |
135 return self; | |
136 } | |
137 | |
138 - (BOOL)addRenderingDestination:(__kindof MTKView *)view { | |
139 return [self setupWithView:view]; | |
140 } | |
141 | |
142 #pragma mark - Private | |
143 | |
144 - (BOOL)setupWithView:(__kindof MTKView *)view { | |
145 BOOL success = NO; | |
146 if ([self setupMetal]) { | |
147 [self setupView:view]; | |
148 [self loadAssets]; | |
149 [self setupBuffers]; | |
150 [self initializeTextureCache]; | |
151 success = YES; | |
152 } | |
153 return success; | |
154 } | |
155 | |
156 #pragma mark - GPU methods | |
157 | |
158 - (BOOL)setupMetal { | |
159 // Set the view to use the default device. | |
160 _device = MTLCreateSystemDefaultDevice(); | |
161 if (!_device) { | |
162 return NO; | |
163 } | |
164 | |
165 // Create a new command queue. | |
166 _commandQueue = [_device newCommandQueue]; | |
167 | |
168 // Load metal library from source. | |
169 NSError *libraryError = nil; | |
170 | |
171 id<MTLLibrary> sourceLibrary = | |
172 [_device newLibraryWithSource:shaderSource options:NULL error:&libraryErro r]; | |
173 | |
174 if (libraryError) { | |
175 RTCLogError(@"Metal: Library with source failed\n%@", libraryError); | |
176 return NO; | |
177 } | |
178 | |
179 if (!sourceLibrary) { | |
180 RTCLogError(@"Metal: Failed to load library. %@", libraryError); | |
181 return NO; | |
182 } | |
183 _defaultLibrary = sourceLibrary; | |
184 | |
185 return YES; | |
186 } | |
187 | |
188 - (void)setupView:(__kindof MTKView *)view { | |
189 view.device = _device; | |
190 | |
191 view.preferredFramesPerSecond = 30; | |
192 view.autoResizeDrawable = NO; | |
193 | |
194 // We need to keep reference to the view as it's needed down the rendering pip eline. | |
195 _view = view; | |
196 } | |
197 | |
198 - (void)loadAssets { | |
199 id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFu nctionName]; | |
200 id<MTLFunction> fragmentFunction = | |
201 [_defaultLibrary newFunctionWithName:fragmentFunctionName]; | |
202 | |
203 MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescripto r alloc] init]; | |
204 pipelineDescriptor.label = pipelineDescriptorLabel; | |
205 pipelineDescriptor.vertexFunction = vertexFunction; | |
206 pipelineDescriptor.fragmentFunction = fragmentFunction; | |
207 pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat; | |
208 pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid; | |
209 NSError *error = nil; | |
210 _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescrip tor error:&error]; | |
211 | |
212 if (!_pipelineState) { | |
213 RTCLogError(@"Metal: Failed to create pipeline state. %@", error); | |
214 } | |
215 } | |
216 | |
217 - (void)setupBuffers { | |
218 _vertexBuffer = [_device newBufferWithBytes:cubeVertexData | |
219 length:sizeof(cubeVertexData) | |
220 options:MTLResourceOptionCPUCacheModeDefau lt]; | |
221 } | |
222 | |
223 - (void)initializeTextureCache { | |
224 CVReturn status = | |
225 CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, _device, nil, &_textur eCache); | |
226 if (status != kCVReturnSuccess) { | |
227 RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); | |
228 } | |
229 } | |
230 | |
231 - (void)render { | |
232 dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER); | |
233 | |
234 id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer]; | |
235 commandBuffer.label = commandBufferLabel; | |
236 | |
237 __block dispatch_semaphore_t block_semaphore = _inflight_semaphore; | |
238 [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) { | |
239 dispatch_semaphore_signal(block_semaphore); | |
tkchin_webrtc
2017/02/22 00:22:27
can you doco what the semaphores accomplish?
from
daniela-webrtc
2017/02/22 14:50:22
Indeed. We need to wait for the inflight (submitte
| |
240 }]; | |
241 | |
242 MTLRenderPassDescriptor *_renderPassDescriptor = _view.currentRenderPassDescri ptor; | |
243 if (_renderPassDescriptor) { // Valid drawable. | |
244 id<MTLRenderCommandEncoder> renderEncoder = | |
245 [commandBuffer renderCommandEncoderWithDescriptor:_renderPassDescriptor] ; | |
246 renderEncoder.label = renderEncoderLabel; | |
247 | |
248 // Set context state. | |
249 [renderEncoder pushDebugGroup:renderEncoderDebugGroup]; | |
250 [renderEncoder setRenderPipelineState:_pipelineState]; | |
251 [renderEncoder setVertexBuffer:_vertexBuffer offset:offset * sizeof(float) a tIndex:0]; | |
252 [renderEncoder setFragmentTexture:_yTexture atIndex:0]; | |
253 [renderEncoder setFragmentTexture:_CrCbTexture atIndex:1]; | |
254 | |
255 [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip | |
256 vertexStart:0 | |
257 vertexCount:4 | |
258 instanceCount:1]; | |
259 [renderEncoder popDebugGroup]; | |
260 [renderEncoder endEncoding]; | |
261 | |
262 [commandBuffer presentDrawable:_view.currentDrawable]; | |
263 } | |
264 | |
265 [commandBuffer commit]; | |
266 } | |
267 | |
268 #pragma mark - RTCMTLRenderer | |
269 | |
270 - (void)drawFrame:(RTCVideoFrame *)frame { | |
271 [self setupTexturesForFrame:frame]; | |
272 @autoreleasepool { | |
tkchin_webrtc
2017/02/22 00:22:27
why is autoreleasepool needed here? to drain the c
daniela-webrtc
2017/02/22 14:50:22
Command buffers and most importantly drawables.
"I
| |
273 [self render]; | |
274 } | |
275 } | |
276 | |
277 - (void)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { | |
278 CVPixelBufferRef pixelBuffer = frame.nativeHandle; | |
279 | |
280 id<MTLTexture> lumaTexture = nil; | |
281 id<MTLTexture> chromaTexture = nil; | |
282 CVMetalTextureRef outTexture = nullptr; | |
283 | |
284 // Luma (y) texture. | |
285 int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); | |
286 int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); | |
287 | |
288 int indexPlane = 0; | |
289 CVReturn result = CVMetalTextureCacheCreateTextureFromImage( | |
290 kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unor m, lumaWidth, | |
291 lumaHeight, indexPlane, &outTexture); | |
292 | |
293 if (result == kCVReturnSuccess) { | |
294 lumaTexture = CVMetalTextureGetTexture(outTexture); | |
295 } | |
296 | |
297 // Same as CFRelease except it can be passed NULL without crashing. | |
298 CVBufferRelease(outTexture); | |
299 outTexture = nullptr; | |
300 | |
301 // Chroma (CrCb) texture. | |
302 indexPlane = 1; | |
303 result = CVMetalTextureCacheCreateTextureFromImage( | |
304 kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Uno rm, lumaWidth / 2, | |
305 lumaHeight / 2, indexPlane, &outTexture); | |
306 if (result == kCVReturnSuccess) { | |
307 chromaTexture = CVMetalTextureGetTexture(outTexture); | |
308 } | |
309 CVBufferRelease(outTexture); | |
310 | |
311 if (lumaTexture != nil && chromaTexture != nil) { | |
312 _yTexture = lumaTexture; | |
313 _CrCbTexture = chromaTexture; | |
314 offset = offsetForRotation((webrtc::VideoRotation)frame.rotation); | |
315 } | |
316 } | |
317 | |
318 @end | |
OLD | NEW |