Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 /* | |
| 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #import "WebRTC/RTCCameraVideoCapturer.h" | |
|
daniela-webrtc
2017/03/26 18:08:58
Systems import first. Then empty line, then local
sakal
2017/03/29 11:45:44
Done.
| |
| 12 | |
| 13 #import <Foundation/Foundation.h> | |
| 14 | |
| 15 #if TARGET_OS_IPHONE | |
| 16 #import "WebRTC/UIDevice+RTCDevice.h" | |
| 17 #endif | |
| 18 | |
| 19 #import "RTCDispatcher+Private.h" | |
| 20 #import "WebRTC/RTCLogging.h" | |
| 21 | |
| 22 const int64_t kNanosecondsPerSecond = 1000000000; | |
| 23 | |
| 24 @interface RTCCameraVideoCapturer () | |
| 25 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | |
| 26 @end | |
| 27 | |
| 28 @implementation RTCCameraVideoCapturer { | |
| 29 AVCaptureVideoDataOutput *_videoDataOutput; | |
| 30 AVCaptureSession *_captureSession; | |
| 31 AVCaptureDevice *_currentDevice; | |
| 32 RTCVideoRotation _rotation; | |
| 33 BOOL _hasRetriedOnFatalError; | |
| 34 } | |
| 35 | |
| 36 @synthesize frameQueue = _frameQueue; | |
| 37 @synthesize captureSession = _captureSession; | |
| 38 | |
| 39 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { | |
| 40 if (self = [super initWithDelegate:delegate]) { | |
| 41 // Create the capture session and all relevant inputs and outputs. We need | |
| 42 // to do this in init because the application may want the capture session | |
| 43 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
| 44 // created here are retained until dealloc and never recreated. | |
| 45 if (![self setupCaptureSession]) { | |
| 46 return nil; | |
| 47 } | |
| 48 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
| 49 #if TARGET_OS_IPHONE | |
| 50 [center addObserver:self | |
| 51 selector:@selector(deviceOrientationDidChange:) | |
| 52 name:UIDeviceOrientationDidChangeNotification | |
| 53 object:nil]; | |
| 54 [center addObserver:self | |
| 55 selector:@selector(handleCaptureSessionInterruption:) | |
| 56 name:AVCaptureSessionWasInterruptedNotification | |
| 57 object:_captureSession]; | |
| 58 [center addObserver:self | |
| 59 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
| 60 name:AVCaptureSessionInterruptionEndedNotification | |
| 61 object:_captureSession]; | |
| 62 [center addObserver:self | |
| 63 selector:@selector(handleApplicationDidBecomeActive:) | |
| 64 name:UIApplicationDidBecomeActiveNotification | |
| 65 object:[UIApplication sharedApplication]]; | |
| 66 #endif | |
| 67 [center addObserver:self | |
| 68 selector:@selector(handleCaptureSessionRuntimeError:) | |
| 69 name:AVCaptureSessionRuntimeErrorNotification | |
| 70 object:_captureSession]; | |
| 71 [center addObserver:self | |
| 72 selector:@selector(handleCaptureSessionDidStartRunning:) | |
| 73 name:AVCaptureSessionDidStartRunningNotification | |
| 74 object:_captureSession]; | |
| 75 [center addObserver:self | |
| 76 selector:@selector(handleCaptureSessionDidStopRunning:) | |
| 77 name:AVCaptureSessionDidStopRunningNotification | |
| 78 object:_captureSession]; | |
| 79 } | |
| 80 return self; | |
| 81 } | |
| 82 | |
| 83 + (NSArray<AVCaptureDevice *> *)captureDevices { | |
| 84 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; | |
| 85 } | |
| 86 | |
| 87 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | |
|
daniela-webrtc
2017/03/26 18:08:58
Usually static function go at the top of the file
sakal
2017/03/29 11:45:44
Done.
| |
| 88 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | |
| 89 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | |
| 90 } | |
| 91 | |
| 92 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device { | |
| 93 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array]; | |
| 94 | |
| 95 for (AVCaptureDeviceFormat *format in device.formats) { | |
| 96 // Filter out subTypes that we currently don't support in the stack | |
| 97 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description); | |
| 98 if (IsMediaSubTypeSupported(mediaSubType)) { | |
| 99 [eligibleDeviceFormats addObject:format]; | |
| 100 } | |
| 101 } | |
| 102 | |
| 103 return eligibleDeviceFormats; | |
| 104 } | |
| 105 | |
| 106 - (void)dealloc { | |
|
daniela-webrtc
2017/03/26 18:08:58
Idea: How about we make sure that the session is n
daniela-webrtc
2017/03/26 18:08:58
We should stop the capture session somewhere as we
sakal
2017/03/29 11:45:44
Capture session is now stopped in stopCapture.
| |
| 107 RTCLogInfo("dealloc"); | |
|
daniela-webrtc
2017/03/26 18:08:57
I don't think this logging is useful.
sakal
2017/03/29 11:45:44
Done.
| |
| 108 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
| 109 } | |
|
daniela-webrtc
2017/03/26 18:08:57
[super dealloc];
at the end of the method.
sakal
2017/03/29 11:45:44
Done.
| |
| 110 | |
| 111 - (dispatch_queue_t)frameQueue { | |
| 112 if (!_frameQueue) { | |
| 113 _frameQueue = | |
| 114 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL); | |
| 115 dispatch_set_target_queue(_frameQueue, | |
| 116 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0)); | |
| 117 } | |
| 118 return _frameQueue; | |
| 119 } | |
| 120 | |
| 121 - (void)startCaptureWithDevice:(AVCaptureDevice *)device | |
|
daniela-webrtc
2017/03/26 18:08:57
In implementation file, keep public methods togeth
sakal
2017/03/29 11:45:44
Done.
| |
| 122 format:(AVCaptureDeviceFormat *)format | |
| 123 fps:(int)fps { | |
| 124 [RTCDispatcher | |
| 125 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
|
daniela-webrtc
2017/03/26 18:08:57
Suggestion: extract some of the functionality from
sakal
2017/03/29 11:45:44
Done.
| |
| 126 block:^{ | |
| 127 RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, f ps); | |
| 128 NSError *error = nil; | |
| 129 | |
| 130 AVCaptureDeviceInput *input = | |
| 131 [AVCaptureDeviceInput deviceInputWithDevice:device err or:&error]; | |
| 132 if (!input) { | |
| 133 RTCLogError(@"Failed to create front camera input: %@", | |
| 134 error.localizedDescription); | |
| 135 // TODO(magjed): Error callback? | |
|
magjed_webrtc
2017/03/27 13:11:48
Remove TODOs if you are not going to add the error
sakal
2017/03/29 11:45:44
Done.
| |
| 136 return; | |
| 137 } | |
| 138 | |
| 139 [_captureSession beginConfiguration]; | |
| 140 _currentDevice = device; | |
| 141 for (AVCaptureDeviceInput *oldInput in _captureSession.inp uts) { | |
| 142 [_captureSession removeInput:oldInput]; | |
| 143 } | |
| 144 if ([_captureSession canAddInput:input]) { | |
| 145 [_captureSession addInput:input]; | |
| 146 } else { | |
| 147 RTCLogError(@"Cannot add camera as an input to the sessi on."); | |
| 148 // TODO(sakal): Error callback? | |
| 149 return; | |
| 150 } | |
| 151 [self updateOrientation]; | |
| 152 if ([device lockForConfiguration:&error]) { | |
| 153 @try { | |
| 154 device.activeFormat = format; | |
| 155 device.activeVideoMinFrameDuration = CMTimeMake(1, fps ); | |
| 156 } @catch (NSException *exception) { | |
| 157 RTCLogError(@"Failed to set active format!\n User info :%@", | |
| 158 exception.userInfo); | |
| 159 // TODO(sakal): Error callback? | |
| 160 return; | |
| 161 } | |
| 162 [device unlockForConfiguration]; | |
| 163 } else { | |
| 164 RTCLogError(@"Failed to lock device %@. Error: %@", devi ce, error.userInfo); | |
| 165 // TODO(sakal): Error callback? | |
| 166 return; | |
| 167 } | |
| 168 [_captureSession commitConfiguration]; | |
| 169 }]; | |
| 170 } | |
| 171 | |
| 172 - (void)stop { | |
|
daniela-webrtc
2017/03/26 18:08:58
I don't see a reason with this type of workload in
magjed_webrtc
2017/03/27 13:11:48
Maybe it's necessary for thread safety?
We need t
| |
| 173 [RTCDispatcher | |
| 174 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 175 block:^{ | |
| 176 RTCLogInfo("Stop"); | |
| 177 [_captureSession beginConfiguration]; | |
|
daniela-webrtc
2017/03/26 18:08:57
No need to wrap the input removal in `beginConfigu
sakal
2017/03/29 11:45:44
Done.
| |
| 178 for (AVCaptureDeviceInput *oldInput in _captureSession.inp uts) { | |
| 179 [_captureSession removeInput:oldInput]; | |
| 180 } | |
| 181 [_captureSession commitConfiguration]; | |
| 182 #if TARGET_OS_IPHONE | |
| 183 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications]; | |
| 184 #endif | |
| 185 }]; | |
| 186 } | |
| 187 | |
| 188 #pragma mark iOS notifications | |
| 189 | |
| 190 #if TARGET_OS_IPHONE | |
| 191 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
| 192 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 193 block:^{ | |
| 194 [self updateOrientation]; | |
| 195 }]; | |
| 196 } | |
| 197 #endif | |
| 198 | |
| 199 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
| 200 | |
| 201 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
| 202 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
| 203 fromConnection:(AVCaptureConnection *)connection { | |
| 204 NSParameterAssert(captureOutput == _videoDataOutput); | |
| 205 | |
| 206 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) || | |
| 207 !CMSampleBufferDataIsReady(sampleBuffer)) { | |
| 208 return; | |
| 209 } | |
| 210 | |
| 211 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
| 212 if (pixelBuffer == nullptr) { | |
| 213 return; | |
| 214 } | |
| 215 | |
| 216 int64_t timeStampNs = CACurrentMediaTime() * kNanosecondsPerSecond; | |
| 217 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBu ffer | |
| 218 rotation:_rotati on | |
| 219 timeStampNs:timeSta mpNs]; | |
| 220 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | |
| 221 } | |
| 222 | |
| 223 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
| 224 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
| 225 fromConnection:(AVCaptureConnection *)connection { | |
| 226 RTCLogError(@"Dropped sample buffer."); | |
| 227 } | |
| 228 | |
| 229 #pragma mark - AVCaptureSession notifications | |
| 230 | |
| 231 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
| 232 NSString *reasonString = nil; | |
| 233 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ | |
| 234 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
| 235 if ([UIDevice isIOS9OrLater]) { | |
| 236 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey]; | |
| 237 if (reason) { | |
| 238 switch (reason.intValue) { | |
| 239 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd: | |
| 240 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
| 241 break; | |
| 242 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
| 243 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
| 244 break; | |
| 245 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
| 246 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
| 247 break; | |
| 248 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps: | |
| 249 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
| 250 break; | |
| 251 } | |
| 252 } | |
| 253 } | |
| 254 #endif | |
| 255 RTCLog(@"Capture session interrupted: %@", reasonString); | |
| 256 // TODO(tkchin): Handle this case. | |
| 257 } | |
| 258 | |
| 259 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
| 260 RTCLog(@"Capture session interruption ended."); | |
| 261 // TODO(tkchin): Handle this case. | |
| 262 } | |
| 263 | |
| 264 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
| 265 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ; | |
| 266 RTCLogError(@"Capture session runtime error: %@", error); | |
| 267 | |
| 268 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 269 block:^{ | |
| 270 #if TARGET_OS_IPHONE | |
| 271 if (error.code == AVErrorMediaServicesWereReset ) { | |
| 272 [self handleNonFatalError]; | |
| 273 } else { | |
| 274 [self handleFatalError]; | |
| 275 } | |
| 276 #else | |
| 277 [self handleFatalError]; | |
| 278 #endif | |
| 279 }]; | |
| 280 } | |
| 281 | |
| 282 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
| 283 RTCLog(@"Capture session started."); | |
| 284 | |
| 285 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 286 block:^{ | |
| 287 // If we successfully restarted after an unknow n error, | |
| 288 // allow future retries on fatal errors. | |
| 289 _hasRetriedOnFatalError = NO; | |
| 290 }]; | |
| 291 } | |
| 292 | |
| 293 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
| 294 RTCLog(@"Capture session stopped."); | |
| 295 } | |
| 296 | |
| 297 - (void)handleFatalError { | |
| 298 [RTCDispatcher | |
| 299 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 300 block:^{ | |
| 301 if (!_hasRetriedOnFatalError) { | |
| 302 RTCLogWarning(@"Attempting to recover from fatal capture error."); | |
| 303 [self handleNonFatalError]; | |
| 304 _hasRetriedOnFatalError = YES; | |
| 305 } else { | |
| 306 RTCLogError(@"Previous fatal error recovery failed."); | |
| 307 } | |
| 308 }]; | |
| 309 } | |
| 310 | |
| 311 - (void)handleNonFatalError { | |
| 312 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 313 block:^{ | |
| 314 RTCLog(@"Restarting capture session after error ."); | |
| 315 [_captureSession startRunning]; | |
| 316 }]; | |
| 317 } | |
| 318 | |
| 319 #if TARGET_OS_IPHONE | |
| 320 | |
| 321 #pragma mark - UIApplication notifications | |
| 322 | |
| 323 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
| 324 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 325 block:^{ | |
| 326 if (!_captureSession.isRunning) { | |
| 327 RTCLog(@"Restarting capture session on active ."); | |
| 328 [_captureSession startRunning]; | |
|
daniela-webrtc
2017/03/26 18:08:57
Not sure if this is still proper way of handling t
sakal
2017/03/29 11:45:44
This is what we used to do and seems to work fine
| |
| 329 } | |
| 330 }]; | |
| 331 } | |
| 332 | |
| 333 #endif // TARGET_OS_IPHONE | |
| 334 | |
| 335 #pragma mark - Private | |
|
daniela-webrtc
2017/03/26 18:08:58
This pragma mark should be way above, there are pl
sakal
2017/03/29 11:45:44
Done.
| |
| 336 | |
| 337 - (BOOL)setupCaptureSession { | |
| 338 _captureSession = [[AVCaptureSession alloc] init]; | |
|
daniela-webrtc
2017/03/26 18:08:58
Add asserts (for instance if the _captureSession a
| |
| 339 #if defined(WEBRTC_IOS) | |
| 340 _captureSession.usesApplicationAudioSession = NO; | |
| 341 #endif | |
| 342 // Add the output. | |
| 343 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; | |
| 344 if (![_captureSession canAddOutput:videoDataOutput]) { | |
| 345 RTCLogError(@"Video data output unsupported."); | |
| 346 return NO; | |
| 347 } | |
| 348 [_captureSession addOutput:videoDataOutput]; | |
| 349 | |
| 350 [RTCDispatcher | |
| 351 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
| 352 block:^{ | |
| 353 [self updateOrientation]; | |
|
daniela-webrtc
2017/03/26 18:08:58
No need to call this here yet. We'll update rotati
sakal
2017/03/29 11:45:44
Done.
| |
| 354 #if TARGET_OS_IPHONE | |
| 355 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications]; | |
|
daniela-webrtc
2017/03/26 18:08:58
Let's remove this from the dispatch block and from
sakal
2017/03/29 11:45:44
Done.
| |
| 356 #endif | |
| 357 [_captureSession startRunning]; | |
| 358 }]; | |
| 359 return YES; | |
| 360 } | |
| 361 | |
| 362 - (AVCaptureVideoDataOutput *)videoDataOutput { | |
| 363 if (!_videoDataOutput) { | |
| 364 // Make the capturer output NV12. Ideally we want I420 but that's not | |
| 365 // currently supported on iPhone / iPad. | |
| 366 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc ] init]; | |
| 367 videoDataOutput.videoSettings = @{ | |
| 368 (NSString *) | |
| 369 // TODO(denicija): Remove this color conversion and use the original captu re format directly. | |
| 370 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar FullRange) | |
| 371 }; | |
| 372 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
| 373 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
| 374 _videoDataOutput = videoDataOutput; | |
| 375 } | |
| 376 return _videoDataOutput; | |
| 377 } | |
| 378 | |
| 379 // Called from capture session queue. | |
| 380 - (void)updateOrientation { | |
| 381 #if TARGET_OS_IPHONE | |
| 382 bool usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t; | |
|
daniela-webrtc
2017/03/26 18:08:57
Preferably use BOOL in ObjC code.
sakal
2017/03/29 11:45:44
Done.
| |
| 383 switch ([UIDevice currentDevice].orientation) { | |
| 384 case UIDeviceOrientationPortrait: | |
| 385 _rotation = RTCVideoRotation_90; | |
| 386 break; | |
| 387 case UIDeviceOrientationPortraitUpsideDown: | |
| 388 _rotation = RTCVideoRotation_270; | |
| 389 break; | |
| 390 case UIDeviceOrientationLandscapeLeft: | |
| 391 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | |
| 392 break; | |
| 393 case UIDeviceOrientationLandscapeRight: | |
| 394 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | |
| 395 break; | |
| 396 case UIDeviceOrientationFaceUp: | |
| 397 case UIDeviceOrientationFaceDown: | |
| 398 case UIDeviceOrientationUnknown: | |
| 399 // Ignore. | |
| 400 break; | |
| 401 } | |
| 402 #endif | |
| 403 } | |
| 404 | |
| 405 @end | |
| OLD | NEW |