| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #import <Foundation/Foundation.h> | 11 #import <Foundation/Foundation.h> |
| 12 | 12 |
| 13 #import "WebRTC/RTCCameraVideoCapturer.h" | 13 #import "WebRTC/RTCCameraVideoCapturer.h" |
| 14 #import "WebRTC/RTCLogging.h" | 14 #import "WebRTC/RTCLogging.h" |
| 15 | 15 |
| 16 #if TARGET_OS_IPHONE | 16 #if TARGET_OS_IPHONE |
| 17 #import "WebRTC/UIDevice+RTCDevice.h" | 17 #import "WebRTC/UIDevice+RTCDevice.h" |
| 18 #endif | 18 #endif |
| 19 | 19 |
| 20 #import "RTCDispatcher+Private.h" | 20 #import "RTCDispatcher+Private.h" |
| 21 | 21 |
| 22 const int64_t kNanosecondsPerSecond = 1000000000; | 22 const int64_t kNanosecondsPerSecond = 1000000000; |
| 23 | 23 |
| 24 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | 24 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
| 25 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | 25 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || |
| 26 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | 26 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); |
| 27 } | 27 } |
| 28 | 28 |
| 29 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> | 29 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> |
| 30 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | 30 @property(nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput; |
| 31 @property(nonatomic, strong) AVCaptureDevice *currentDevice; |
| 32 @property(nonatomic, strong) dispatch_queue_t frameQueue; |
| 33 @property(nonatomic, assign) RTCVideoRotation rotation; |
| 34 // Will the session be running once all asynchronous operations have been comple
ted? |
| 35 @property(nonatomic, assign) BOOL willBeRunning; |
| 36 |
| 31 @end | 37 @end |
| 32 | 38 |
| 33 @implementation RTCCameraVideoCapturer { | 39 @implementation RTCCameraVideoCapturer { |
| 34 AVCaptureVideoDataOutput *_videoDataOutput; | |
| 35 AVCaptureSession *_captureSession; | |
| 36 AVCaptureDevice *_currentDevice; | |
| 37 RTCVideoRotation _rotation; | |
| 38 BOOL _hasRetriedOnFatalError; | 40 BOOL _hasRetriedOnFatalError; |
| 39 BOOL _isRunning; | 41 BOOL _isRunning; |
| 40 // Will the session be running once all asynchronous operations have been comp
leted? | |
| 41 BOOL _willBeRunning; | |
| 42 } | 42 } |
| 43 | 43 |
| 44 @synthesize captureSession = _captureSession; |
| 45 @synthesize currentDevice = _currentDevice; |
| 44 @synthesize frameQueue = _frameQueue; | 46 @synthesize frameQueue = _frameQueue; |
| 45 @synthesize captureSession = _captureSession; | 47 @synthesize rotation = _rotation; |
| 48 @synthesize videoDataOutput = _videoDataOutput; |
| 49 @synthesize willBeRunning = _willBeRunning; |
| 46 | 50 |
| 47 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { | 51 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { |
| 48 if (self = [super initWithDelegate:delegate]) { | 52 if (self = [super initWithDelegate:delegate]) { |
| 49 // Create the capture session and all relevant inputs and outputs. We need | 53 // Create the capture session and all relevant inputs and outputs. We need |
| 50 // to do this in init because the application may want the capture session | 54 // to do this in init because the application may want the capture session |
| 51 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 55 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
| 52 // created here are retained until dealloc and never recreated. | 56 // created here are retained until dealloc and never recreated. |
| 53 if (![self setupCaptureSession]) { | 57 if (![self setupCaptureSession]) { |
| 54 return nil; | 58 return nil; |
| 55 } | 59 } |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 114 } | 118 } |
| 115 | 119 |
| 116 - (void)startCaptureWithDevice:(AVCaptureDevice *)device | 120 - (void)startCaptureWithDevice:(AVCaptureDevice *)device |
| 117 format:(AVCaptureDeviceFormat *)format | 121 format:(AVCaptureDeviceFormat *)format |
| 118 fps:(int)fps { | 122 fps:(int)fps { |
| 119 _willBeRunning = true; | 123 _willBeRunning = true; |
| 120 [RTCDispatcher | 124 [RTCDispatcher |
| 121 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 125 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 122 block:^{ | 126 block:^{ |
| 123 RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, f
ps); | 127 RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, f
ps); |
| 124 | |
| 125 #if TARGET_OS_IPHONE | 128 #if TARGET_OS_IPHONE |
| 126 [[UIDevice currentDevice] beginGeneratingDeviceOrientation
Notifications]; | 129 [[UIDevice currentDevice] beginGeneratingDeviceOrientation
Notifications]; |
| 127 #endif | 130 #endif |
| 128 | 131 _isRunning = [self tryStartCaptureWithDevice:device format
:format fps:fps]; |
| 129 _currentDevice = device; | |
| 130 | |
| 131 NSError *error = nil; | |
| 132 if ([_currentDevice lockForConfiguration:&error]) { | |
| 133 [self updateDeviceCaptureFormat:format fps:fps]; | |
| 134 } else { | |
| 135 RTCLogError(@"Failed to lock device %@. Error: %@", _cur
rentDevice, | |
| 136 error.userInfo); | |
| 137 return; | |
| 138 } | |
| 139 | |
| 140 [self reconfigureCaptureSessionInput]; | |
| 141 [self updateOrientation]; | |
| 142 [_captureSession startRunning]; | |
| 143 | |
| 144 [_currentDevice unlockForConfiguration]; | |
| 145 _isRunning = true; | |
| 146 }]; | 132 }]; |
| 147 } | 133 } |
| 148 | 134 |
| 149 - (void)stopCapture { | 135 - (void)stopCapture { |
| 150 _willBeRunning = false; | 136 _willBeRunning = false; |
| 151 [RTCDispatcher | 137 [RTCDispatcher |
| 152 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 138 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 153 block:^{ | 139 block:^{ |
| 154 RTCLogInfo("Stop"); | 140 RTCLogInfo("Stop"); |
| 155 _currentDevice = nil; | 141 _currentDevice = nil; |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 353 // TODO(denicija): Remove this color conversion and use the original capture
format directly. | 339 // TODO(denicija): Remove this color conversion and use the original capture
format directly. |
| 354 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu
llRange) | 340 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu
llRange) |
| 355 }; | 341 }; |
| 356 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | 342 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; |
| 357 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | 343 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; |
| 358 _videoDataOutput = videoDataOutput; | 344 _videoDataOutput = videoDataOutput; |
| 359 } | 345 } |
| 360 | 346 |
| 361 #pragma mark - Private, called inside capture queue | 347 #pragma mark - Private, called inside capture queue |
| 362 | 348 |
| 349 - (BOOL)tryStartCaptureWithDevice:(AVCaptureDevice *)device |
| 350 format:(AVCaptureDeviceFormat *)format |
| 351 fps:(int)fps { |
| 352 _currentDevice = device; |
| 353 |
| 354 NSError *error = nil; |
| 355 if ([_currentDevice lockForConfiguration:&error]) { |
| 356 [self updateDeviceCaptureFormat:format fps:fps]; |
| 357 } else { |
| 358 RTCLogError(@"Failed to lock device %@. Error: %@", _currentDevice, error.us
erInfo); |
| 359 return NO; |
| 360 } |
| 361 |
| 362 [self reconfigureCaptureSessionInput]; |
| 363 [self updateOrientation]; |
| 364 [_captureSession startRunning]; |
| 365 |
| 366 [_currentDevice unlockForConfiguration]; |
| 367 return YES; |
| 368 } |
| 369 |
| 363 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(int)fps { | 370 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(int)fps { |
| 364 @try { | 371 @try { |
| 365 _currentDevice.activeFormat = format; | 372 _currentDevice.activeFormat = format; |
| 366 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); | 373 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); |
| 367 } @catch (NSException *exception) { | 374 } @catch (NSException *exception) { |
| 368 RTCLogError(@"Failed to set active format!\n User info:%@", exception.userIn
fo); | 375 RTCLogError(@"Failed to set active format!\n User info:%@", exception.userIn
fo); |
| 369 return; | 376 return; |
| 370 } | 377 } |
| 371 } | 378 } |
| 372 | 379 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 385 if ([_captureSession canAddInput:input]) { | 392 if ([_captureSession canAddInput:input]) { |
| 386 [_captureSession addInput:input]; | 393 [_captureSession addInput:input]; |
| 387 } else { | 394 } else { |
| 388 RTCLogError(@"Cannot add camera as an input to the session."); | 395 RTCLogError(@"Cannot add camera as an input to the session."); |
| 389 } | 396 } |
| 390 [_captureSession commitConfiguration]; | 397 [_captureSession commitConfiguration]; |
| 391 } | 398 } |
| 392 | 399 |
| 393 - (void)updateOrientation { | 400 - (void)updateOrientation { |
| 394 #if TARGET_OS_IPHONE | 401 #if TARGET_OS_IPHONE |
| 402 [self updateOrientation:[UIDevice currentDevice].orientation]; |
| 403 #endif |
| 404 } |
| 405 |
| 406 #if TARGET_OS_IPHONE |
| 407 - (void)updateOrientation:(UIDeviceOrientation)orientation { |
| 395 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron
t; | 408 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron
t; |
| 396 switch ([UIDevice currentDevice].orientation) { | 409 switch (orientation) { |
| 397 case UIDeviceOrientationPortrait: | 410 case UIDeviceOrientationPortrait: |
| 398 _rotation = RTCVideoRotation_90; | 411 _rotation = RTCVideoRotation_90; |
| 399 break; | 412 break; |
| 400 case UIDeviceOrientationPortraitUpsideDown: | 413 case UIDeviceOrientationPortraitUpsideDown: |
| 401 _rotation = RTCVideoRotation_270; | 414 _rotation = RTCVideoRotation_270; |
| 402 break; | 415 break; |
| 403 case UIDeviceOrientationLandscapeLeft: | 416 case UIDeviceOrientationLandscapeLeft: |
| 404 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | 417 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; |
| 405 break; | 418 break; |
| 406 case UIDeviceOrientationLandscapeRight: | 419 case UIDeviceOrientationLandscapeRight: |
| 407 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | 420 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; |
| 408 break; | 421 break; |
| 409 case UIDeviceOrientationFaceUp: | 422 case UIDeviceOrientationFaceUp: |
| 410 case UIDeviceOrientationFaceDown: | 423 case UIDeviceOrientationFaceDown: |
| 411 case UIDeviceOrientationUnknown: | 424 case UIDeviceOrientationUnknown: |
| 412 // Ignore. | 425 // Ignore. |
| 413 break; | 426 break; |
| 414 } | 427 } |
| 428 } |
| 415 #endif | 429 #endif |
| 416 } | |
| 417 | 430 |
| 418 @end | 431 @end |
| OLD | NEW |