Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 21 | 21 |
| 22 #include "avfoundationformatmapper.h" | 22 #include "avfoundationformatmapper.h" |
| 23 | 23 |
| 24 @implementation RTCAVFoundationVideoCapturerInternal { | 24 @implementation RTCAVFoundationVideoCapturerInternal { |
| 25 // Keep pointers to inputs for convenience. | 25 // Keep pointers to inputs for convenience. |
| 26 AVCaptureDeviceInput *_frontCameraInput; | 26 AVCaptureDeviceInput *_frontCameraInput; |
| 27 AVCaptureDeviceInput *_backCameraInput; | 27 AVCaptureDeviceInput *_backCameraInput; |
| 28 AVCaptureVideoDataOutput *_videoDataOutput; | 28 AVCaptureVideoDataOutput *_videoDataOutput; |
| 29 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 29 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
| 30 webrtc::AVFoundationVideoCapturer *_capturer; | 30 webrtc::AVFoundationVideoCapturer *_capturer; |
| 31 webrtc::VideoRotation _rotation; | |
| 32 BOOL _hasRetriedOnFatalError; | 31 BOOL _hasRetriedOnFatalError; |
| 33 BOOL _isRunning; | 32 BOOL _isRunning; |
| 34 BOOL _hasStarted; | 33 BOOL _hasStarted; |
| 35 rtc::CriticalSection _crit; | 34 rtc::CriticalSection _crit; |
| 35 BOOL _switchingCameras; | |
| 36 #if TARGET_OS_IPHONE | |
| 37 UIDeviceOrientation _orientation; | |
| 38 #endif | |
| 36 } | 39 } |
| 37 | 40 |
| 38 @synthesize captureSession = _captureSession; | 41 @synthesize captureSession = _captureSession; |
| 39 @synthesize frameQueue = _frameQueue; | 42 @synthesize frameQueue = _frameQueue; |
| 40 @synthesize useBackCamera = _useBackCamera; | 43 @synthesize useBackCamera = _useBackCamera; |
| 41 | 44 |
| 42 @synthesize isRunning = _isRunning; | 45 @synthesize isRunning = _isRunning; |
| 43 @synthesize hasStarted = _hasStarted; | 46 @synthesize hasStarted = _hasStarted; |
| 44 | 47 |
| 45 // This is called from the thread that creates the video source, which is likely | 48 // This is called from the thread that creates the video source, which is likely |
| 46 // the main thread. | 49 // the main thread. |
| 47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | 50 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { |
| 48 RTC_DCHECK(capturer); | 51 RTC_DCHECK(capturer); |
| 49 if (self = [super init]) { | 52 if (self = [super init]) { |
| 50 _capturer = capturer; | 53 _capturer = capturer; |
| 51 // Create the capture session and all relevant inputs and outputs. We need | 54 // Create the capture session and all relevant inputs and outputs. We need |
| 52 // to do this in init because the application may want the capture session | 55 // to do this in init because the application may want the capture session |
| 53 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 56 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
| 54 // created here are retained until dealloc and never recreated. | 57 // created here are retained until dealloc and never recreated. |
| 55 if (![self setupCaptureSession]) { | 58 if (![self setupCaptureSession]) { |
| 56 return nil; | 59 return nil; |
| 57 } | 60 } |
| 58 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | 61 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
| 62 _switchingCameras = NO; | |
| 59 #if TARGET_OS_IPHONE | 63 #if TARGET_OS_IPHONE |
| 64 _orientation = UIDeviceOrientationPortrait; | |
| 60 [center addObserver:self | 65 [center addObserver:self |
| 61 selector:@selector(deviceOrientationDidChange:) | 66 selector:@selector(deviceOrientationDidChange:) |
| 62 name:UIDeviceOrientationDidChangeNotification | 67 name:UIDeviceOrientationDidChangeNotification |
| 63 object:nil]; | 68 object:nil]; |
| 64 [center addObserver:self | 69 [center addObserver:self |
| 65 selector:@selector(handleCaptureSessionInterruption:) | 70 selector:@selector(handleCaptureSessionInterruption:) |
| 66 name:AVCaptureSessionWasInterruptedNotification | 71 name:AVCaptureSessionWasInterruptedNotification |
| 67 object:_captureSession]; | 72 object:_captureSession]; |
| 68 [center addObserver:self | 73 [center addObserver:self |
| 69 selector:@selector(handleCaptureSessionInterruptionEnded:) | 74 selector:@selector(handleCaptureSessionInterruptionEnded:) |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 150 _useBackCamera = useBackCamera; | 155 _useBackCamera = useBackCamera; |
| 151 [self updateSessionInputForUseBackCamera:useBackCamera]; | 156 [self updateSessionInputForUseBackCamera:useBackCamera]; |
| 152 } | 157 } |
| 153 } | 158 } |
| 154 | 159 |
| 155 // Called from WebRTC thread. | 160 // Called from WebRTC thread. |
| 156 - (void)start { | 161 - (void)start { |
| 157 if (self.hasStarted) { | 162 if (self.hasStarted) { |
| 158 return; | 163 return; |
| 159 } | 164 } |
| 160 self.hasStarted = YES; | |
| 161 [RTCDispatcher | 165 [RTCDispatcher |
| 162 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 166 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 163 block:^{ | 167 block:^{ |
| 164 #if TARGET_OS_IPHONE | |
| 165 // Default to portrait orientation on iPhone. This will be reset in | |
| 166 // updateOrientation unless orientation is unknown/faceu p/facedown. | |
| 167 _rotation = webrtc::kVideoRotation_90; | |
| 168 #else | |
| 169 // No rotation on Mac. | |
| 170 _rotation = webrtc::kVideoRotation_0; | |
| 171 #endif | |
| 172 [self updateOrientation]; | 168 [self updateOrientation]; |
| 173 #if TARGET_OS_IPHONE | 169 #if TARGET_OS_IPHONE |
| 174 [[UIDevice currentDevice] beginGeneratingDeviceOrientati onNotifications]; | 170 [[UIDevice currentDevice] beginGeneratingDeviceOrientati onNotifications]; |
| 175 #endif | 171 #endif |
| 176 AVCaptureSession *captureSession = self.captureSession; | 172 AVCaptureSession *captureSession = self.captureSession; |
| 177 [captureSession startRunning]; | 173 [captureSession startRunning]; |
| 174 self.hasStarted = YES; | |
|
kthelgason
2017/07/20 08:07:54
This change seems unrelated.
jtt_webrtc
2017/07/20 16:26:28
We don't really want to mark the session as starte
| |
| 178 }]; | 175 }]; |
| 179 } | 176 } |
| 180 | 177 |
| 181 // Called from same thread as start. | 178 // Called from same thread as start. |
| 182 - (void)stop { | 179 - (void)stop { |
| 183 if (!self.hasStarted) { | 180 if (!self.hasStarted) { |
| 184 return; | 181 return; |
| 185 } | 182 } |
| 186 self.hasStarted = NO; | 183 self.hasStarted = NO; |
| 187 // Due to this async block, it's possible that the ObjC object outlives the | 184 // Due to this async block, it's possible that the ObjC object outlives the |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 211 | 208 |
| 212 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 209 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
| 213 | 210 |
| 214 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 211 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 215 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 212 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 216 fromConnection:(AVCaptureConnection *)connection { | 213 fromConnection:(AVCaptureConnection *)connection { |
| 217 NSParameterAssert(captureOutput == _videoDataOutput); | 214 NSParameterAssert(captureOutput == _videoDataOutput); |
| 218 if (!self.hasStarted) { | 215 if (!self.hasStarted) { |
| 219 return; | 216 return; |
| 220 } | 217 } |
| 221 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | 218 |
| 219 #if TARGET_OS_IPHONE | |
| 220 // Default to portrait orientation on iPhone. | |
| 221 webrtc::VideoRotation rotation = webrtc::kVideoRotation_90; | |
| 222 AVCaptureDeviceInput *deviceInput = | |
| 223 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.first Object).input; | |
| 224 BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePosition Front; | |
| 225 if (_switchingCameras) { | |
| 226 // Check the image's EXIF for the actual camera the image came from only whe n switching | |
| 227 // cameras as the image may still be from the old camera. | |
| 228 CFDictionaryRef attachments = CMCopyDictionaryOfAttachments( | |
| 229 kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); | |
| 230 if (attachments) { | |
| 231 int size = CFDictionaryGetCount(attachments); | |
| 232 if (size > 0) { | |
| 233 CFDictionaryRef cfExifDictVal = NULL; | |
| 234 if (CFDictionaryGetValueIfPresent( | |
| 235 attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfEx ifDictVal)) { | |
| 236 CFStringRef cfLensModelStrVal; | |
| 237 if (CFDictionaryGetValueIfPresent(cfExifDictVal, | |
| 238 (const void *)CFSTR("LensModel"), | |
| 239 (const void **)&cfLensModelStrVal)) { | |
| 240 if ([(__bridge NSString *)cfLensModelStrVal containsString:@"front"] ) { | |
| 241 usingFrontCamera = YES; | |
| 242 } else if ([(__bridge NSString *)cfLensModelStrVal containsString:@" back"]) { | |
| 243 usingFrontCamera = NO; | |
| 244 } | |
| 245 } | |
| 246 } | |
| 247 } | |
| 248 CFRelease(attachments); | |
| 249 } | |
| 250 } | |
|
kthelgason
2017/07/20 08:07:54
Can we put this logic into it's own class instead
jtt_webrtc
2017/07/20 16:26:28
I moved the EXIF check to it's own class RTCImageH
| |
| 251 switch (_orientation) { | |
| 252 case UIDeviceOrientationPortrait: | |
| 253 rotation = webrtc::kVideoRotation_90; | |
| 254 break; | |
| 255 case UIDeviceOrientationPortraitUpsideDown: | |
| 256 rotation = webrtc::kVideoRotation_270; | |
| 257 break; | |
| 258 case UIDeviceOrientationLandscapeLeft: | |
| 259 rotation = usingFrontCamera ? webrtc::kVideoRotation_180 : webrtc::kVideoR otation_0; | |
| 260 break; | |
| 261 case UIDeviceOrientationLandscapeRight: | |
| 262 rotation = usingFrontCamera ? webrtc::kVideoRotation_0 : webrtc::kVideoRot ation_180; | |
| 263 break; | |
| 264 case UIDeviceOrientationFaceUp: | |
| 265 case UIDeviceOrientationFaceDown: | |
| 266 case UIDeviceOrientationUnknown: | |
| 267 // Ignore. | |
| 268 break; | |
| 269 } | |
| 270 #else | |
| 271 // No rotation on Mac. | |
| 272 webrtc::VideoRotation rotation = webrtc::kVideoRotation_0; | |
| 273 #endif | |
| 274 | |
| 275 _capturer->CaptureSampleBuffer(sampleBuffer, rotation); | |
| 222 } | 276 } |
| 223 | 277 |
| 224 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 278 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 225 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 279 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 226 fromConnection:(AVCaptureConnection *)connection { | 280 fromConnection:(AVCaptureConnection *)connection { |
| 227 RTCLogError(@"Dropped sample buffer."); | 281 RTCLogError(@"Dropped sample buffer."); |
| 228 } | 282 } |
| 229 | 283 |
| 230 #pragma mark - AVCaptureSession notifications | 284 #pragma mark - AVCaptureSession notifications |
| 231 | 285 |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 441 return nil; | 495 return nil; |
| 442 } | 496 } |
| 443 _backCameraInput = backCameraInput; | 497 _backCameraInput = backCameraInput; |
| 444 } | 498 } |
| 445 return _backCameraInput; | 499 return _backCameraInput; |
| 446 } | 500 } |
| 447 | 501 |
| 448 // Called from capture session queue. | 502 // Called from capture session queue. |
| 449 - (void)updateOrientation { | 503 - (void)updateOrientation { |
| 450 #if TARGET_OS_IPHONE | 504 #if TARGET_OS_IPHONE |
| 451 switch ([UIDevice currentDevice].orientation) { | 505 _orientation = [UIDevice currentDevice].orientation; |
| 452 case UIDeviceOrientationPortrait: | |
| 453 _rotation = webrtc::kVideoRotation_90; | |
| 454 break; | |
| 455 case UIDeviceOrientationPortraitUpsideDown: | |
| 456 _rotation = webrtc::kVideoRotation_270; | |
| 457 break; | |
| 458 case UIDeviceOrientationLandscapeLeft: | |
| 459 _rotation = | |
| 460 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 : webrtc::kVi deoRotation_180; | |
| 461 break; | |
| 462 case UIDeviceOrientationLandscapeRight: | |
| 463 _rotation = | |
| 464 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 : webrtc::k VideoRotation_0; | |
| 465 break; | |
| 466 case UIDeviceOrientationFaceUp: | |
| 467 case UIDeviceOrientationFaceDown: | |
| 468 case UIDeviceOrientationUnknown: | |
| 469 // Ignore. | |
| 470 break; | |
| 471 } | |
| 472 #endif | 506 #endif |
| 473 } | 507 } |
| 474 | 508 |
| 475 // Update the current session input to match what's stored in _useBackCamera. | 509 // Update the current session input to match what's stored in _useBackCamera. |
| 476 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | 510 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { |
| 477 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 511 [RTCDispatcher |
| 478 block:^{ | 512 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 479 [_captureSession beginConfiguration]; | 513 block:^{ |
| 480 AVCaptureDeviceInput *oldInput = _backCameraI nput; | 514 _switchingCameras = YES; |
| 481 AVCaptureDeviceInput *newInput = _frontCamera Input; | 515 [_captureSession beginConfiguration]; |
| 482 if (useBackCamera) { | 516 AVCaptureDeviceInput *oldInput = _backCameraInput; |
| 483 oldInput = _frontCameraInput; | 517 AVCaptureDeviceInput *newInput = _frontCameraInput; |
| 484 newInput = _backCameraInput; | 518 if (useBackCamera) { |
| 485 } | 519 oldInput = _frontCameraInput; |
| 486 if (oldInput) { | 520 newInput = _backCameraInput; |
| 487 // Ok to remove this even if it's not attac hed. Will be no-op. | 521 } |
| 488 [_captureSession removeInput:oldInput]; | 522 if (oldInput) { |
| 489 } | 523 // Ok to remove this even if it's not attached. Will be no-op. |
| 490 if (newInput) { | 524 [_captureSession removeInput:oldInput]; |
| 491 [_captureSession addInput:newInput]; | 525 } |
| 492 } | 526 if (newInput) { |
| 493 [self updateOrientation]; | 527 [_captureSession addInput:newInput]; |
| 494 AVCaptureDevice *newDevice = newInput.device; | 528 } |
| 495 const cricket::VideoFormat *format = | 529 [self updateOrientation]; |
| 496 _capturer->GetCaptureFormat(); | 530 AVCaptureDevice *newDevice = newInput.device; |
| 497 webrtc::SetFormatForCaptureDevice( | 531 const cricket::VideoFormat *format = _capturer->GetCapture Format(); |
| 498 newDevice, _captureSession, *format); | 532 webrtc::SetFormatForCaptureDevice(newDevice, _captureSessi on, *format); |
| 499 [_captureSession commitConfiguration]; | 533 [_captureSession commitConfiguration]; |
| 500 }]; | 534 _switchingCameras = NO; |
| 535 }]; | |
| 501 } | 536 } |
| 502 | 537 |
| 503 @end | 538 @end |
| OLD | NEW |