| Index: webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
|
| diff --git a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
|
| index 57e2dd64df83c82692cbcdf1132502035e789fbe..7ca1d4e7669625cc9447a9c24dfb16531c6029a5 100644
|
| --- a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
|
| +++ b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
|
| @@ -16,46 +16,10 @@
|
| #import "WebRTC/RTCLogging.h"
|
| #import "WebRTC/RTCVideoFrame.h"
|
|
|
| -#include "webrtc/api/video/video_rotation.h"
|
| +#import "RTCMTLRenderer+Private.h"
|
|
|
| #define MTL_STRINGIFY(s) @ #s
|
|
|
| -// As defined in shaderSource.
|
| -static NSString *const vertexFunctionName = @"vertexPassthrough";
|
| -static NSString *const fragmentFunctionName = @"fragmentColorConversion";
|
| -
|
| -static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
|
| -static NSString *const commandBufferLabel = @"RTCCommandBuffer";
|
| -static NSString *const renderEncoderLabel = @"RTCEncoder";
|
| -static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
|
| -
|
| -static const float cubeVertexData[64] = {
|
| - -1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0,
|
| -
|
| - // rotation = 90, offset = 16.
|
| - -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0,
|
| -
|
| - // rotation = 180, offset = 32.
|
| - -1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0,
|
| -
|
| - // rotation = 270, offset = 48.
|
| - -1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0,
|
| -};
|
| -
|
| -static inline int offsetForRotation(webrtc::VideoRotation rotation) {
|
| - switch (rotation) {
|
| - case webrtc::kVideoRotation_0:
|
| - return 0;
|
| - case webrtc::kVideoRotation_90:
|
| - return 16;
|
| - case webrtc::kVideoRotation_180:
|
| - return 32;
|
| - case webrtc::kVideoRotation_270:
|
| - return 48;
|
| - }
|
| - return 0;
|
| -}
|
| -
|
| static NSString *const shaderSource = MTL_STRINGIFY(
|
| using namespace metal; typedef struct {
|
| packed_float2 position;
|
| @@ -103,24 +67,7 @@ static NSString *const shaderSource = MTL_STRINGIFY(
|
| return half4(out);
|
| });
|
|
|
| -// The max number of command buffers in flight.
|
| -// For now setting it up to 1.
|
| -// In future we might use triple buffering method if it improves performance.
|
| -
|
| -static const NSInteger kMaxInflightBuffers = 1;
|
| -
|
| @implementation RTCMTLI420Renderer {
|
| - __kindof MTKView *_view;
|
| -
|
| - // Controller.
|
| - dispatch_semaphore_t _inflight_semaphore;
|
| -
|
| - // Renderer.
|
| - id<MTLDevice> _device;
|
| - id<MTLCommandQueue> _commandQueue;
|
| - id<MTLLibrary> _defaultLibrary;
|
| - id<MTLRenderPipelineState> _pipelineState;
|
| -
|
| // Textures.
|
| id<MTLTexture> _yTexture;
|
| id<MTLTexture> _uTexture;
|
| @@ -133,161 +80,23 @@ static const NSInteger kMaxInflightBuffers = 1;
|
| int _height;
|
| int _chromaWidth;
|
| int _chromaHeight;
|
| -
|
| - // Buffers.
|
| - id<MTLBuffer> _vertexBuffer;
|
| -
|
| - // RTC Frame parameters.
|
| - int _offset;
|
| -}
|
| -
|
| -- (instancetype)init {
|
| - if (self = [super init]) {
|
| - // Offset of 0 is equal to rotation of 0.
|
| - _offset = 0;
|
| - _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
|
| - }
|
| -
|
| - return self;
|
| -}
|
| -
|
| -- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
|
| - return [self setupWithView:view];
|
| }
|
|
|
| -#pragma mark - Private
|
| +#pragma mark - Virtual
|
|
|
| -- (BOOL)setupWithView:(__kindof MTKView *)view {
|
| - BOOL success = NO;
|
| - if ([self setupMetal]) {
|
| - [self setupView:view];
|
| - [self loadAssets];
|
| - [self setupBuffers];
|
| - success = YES;
|
| - }
|
| - return success;
|
| +- (NSString *)shaderSource {
|
| + return shaderSource;
|
| }
|
|
|
| -#pragma mark - GPU methods
|
| -
|
| -- (BOOL)setupMetal {
|
| - // Set the view to use the default device.
|
| - _device = MTLCreateSystemDefaultDevice();
|
| - if (!_device) {
|
| - return NO;
|
| - }
|
| -
|
| - // Create a new command queue.
|
| - _commandQueue = [_device newCommandQueue];
|
| -
|
| - // Load metal library from source.
|
| - NSError *libraryError = nil;
|
| -
|
| - id<MTLLibrary> sourceLibrary =
|
| - [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
|
| -
|
| - if (libraryError) {
|
| - RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
|
| - return NO;
|
| - }
|
| +- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
| + [super setupTexturesForFrame:frame];
|
|
|
| - if (!sourceLibrary) {
|
| - RTCLogError(@"Metal: Failed to load library. %@", libraryError);
|
| + id<MTLDevice> device = [self currentMetalDevice];
|
| + if (!device) {
|
| return NO;
|
| }
|
| - _defaultLibrary = sourceLibrary;
|
|
|
| - return YES;
|
| -}
|
| -
|
| -- (void)setupView:(__kindof MTKView *)view {
|
| - view.device = _device;
|
| -
|
| - view.preferredFramesPerSecond = 30;
|
| - view.autoResizeDrawable = NO;
|
| -
|
| - // We need to keep reference to the view as it's needed down the rendering pipeline.
|
| - _view = view;
|
| -}
|
| -
|
| -- (void)loadAssets {
|
| - id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
|
| - id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
|
| -
|
| - MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
|
| - pipelineDescriptor.label = pipelineDescriptorLabel;
|
| - pipelineDescriptor.vertexFunction = vertexFunction;
|
| - pipelineDescriptor.fragmentFunction = fragmentFunction;
|
| - pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
|
| - pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
|
| - NSError *error = nil;
|
| - _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
|
| -
|
| - if (!_pipelineState) {
|
| - RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
|
| - }
|
| -}
|
| -
|
| -- (void)setupBuffers {
|
| - _vertexBuffer = [_device newBufferWithBytes:cubeVertexData
|
| - length:sizeof(cubeVertexData)
|
| - options:MTLStorageModeShared];
|
| -}
|
| -
|
| -- (void)render {
|
| - dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
|
| -
|
| - id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
|
| - commandBuffer.label = commandBufferLabel;
|
| -
|
| - __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
|
| - [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
|
| - dispatch_semaphore_signal(block_semaphore);
|
| - }];
|
| -
|
| - MTLRenderPassDescriptor *_renderPassDescriptor = _view.currentRenderPassDescriptor;
|
| - if (_renderPassDescriptor) { // Valid drawable.
|
| - id<MTLRenderCommandEncoder> renderEncoder =
|
| - [commandBuffer renderCommandEncoderWithDescriptor:_renderPassDescriptor];
|
| - renderEncoder.label = renderEncoderLabel;
|
| -
|
| - // Set context state.
|
| - [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
|
| - [renderEncoder setRenderPipelineState:_pipelineState];
|
| - [renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
|
| - [renderEncoder setFragmentTexture:_yTexture atIndex:0];
|
| - [renderEncoder setFragmentTexture:_uTexture atIndex:1];
|
| - [renderEncoder setFragmentTexture:_vTexture atIndex:2];
|
| -
|
| - [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
|
| - vertexStart:0
|
| - vertexCount:4
|
| - instanceCount:1];
|
| - [renderEncoder popDebugGroup];
|
| - [renderEncoder endEncoding];
|
| -
|
| - [commandBuffer presentDrawable:_view.currentDrawable];
|
| - }
|
| -
|
| - [commandBuffer commit];
|
| -}
|
| -
|
| -#pragma mark - RTCMTLRenderer
|
| -
|
| -- (void)drawFrame:(RTCVideoFrame *)frame {
|
| - if (!frame) {
|
| - return;
|
| - }
|
| - if ([self setupTexturesForFrame:frame]) {
|
| - @autoreleasepool {
|
| - [self render];
|
| - }
|
| - }
|
| -}
|
| -
|
| -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
|
| // Luma (y) texture.
|
| -
|
| if (!_descriptor || (_width != frame.width && _height != frame.height)) {
|
| _width = frame.width;
|
| _height = frame.height;
|
| @@ -296,7 +105,7 @@ static const NSInteger kMaxInflightBuffers = 1;
|
| height:_height
|
| mipmapped:NO];
|
| _descriptor.usage = MTLTextureUsageShaderRead;
|
| - _yTexture = [_device newTextureWithDescriptor:_descriptor];
|
| + _yTexture = [device newTextureWithDescriptor:_descriptor];
|
| }
|
|
|
| // Chroma (u,v) textures
|
| @@ -315,8 +124,8 @@ static const NSInteger kMaxInflightBuffers = 1;
|
| height:_chromaHeight
|
| mipmapped:NO];
|
| _chromaDescriptor.usage = MTLTextureUsageShaderRead;
|
| - _uTexture = [_device newTextureWithDescriptor:_chromaDescriptor];
|
| - _vTexture = [_device newTextureWithDescriptor:_chromaDescriptor];
|
| + _uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
|
| + _vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
|
| }
|
|
|
| [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
|
| @@ -328,9 +137,13 @@ static const NSInteger kMaxInflightBuffers = 1;
|
| withBytes:frame.dataV
|
| bytesPerRow:frame.strideV];
|
|
|
| - _offset = offsetForRotation((webrtc::VideoRotation)frame.rotation);
|
| -
|
| return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
|
| }
|
|
|
| +- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
|
| + [renderEncoder setFragmentTexture:_yTexture atIndex:0];
|
| + [renderEncoder setFragmentTexture:_uTexture atIndex:1];
|
| + [renderEncoder setFragmentTexture:_vTexture atIndex:2];
|
| +}
|
| +
|
| @end
|
|
|