OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 11 matching lines...) Expand all Loading... |
22 | 22 |
23 const int64_t kNanosecondsPerSecond = 1000000000; | 23 const int64_t kNanosecondsPerSecond = 1000000000; |
24 | 24 |
25 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | 25 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
26 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | 26 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || |
27 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | 27 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); |
28 } | 28 } |
29 | 29 |
30 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> | 30 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> |
31 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | 31 @property(nonatomic, readonly) dispatch_queue_t frameQueue; |
| 32 @property(assign) BOOL switchingCameras; |
32 @end | 33 @end |
33 | 34 |
34 @implementation RTCCameraVideoCapturer { | 35 @implementation RTCCameraVideoCapturer { |
35 AVCaptureVideoDataOutput *_videoDataOutput; | 36 AVCaptureVideoDataOutput *_videoDataOutput; |
36 AVCaptureSession *_captureSession; | 37 AVCaptureSession *_captureSession; |
37 AVCaptureDevice *_currentDevice; | 38 AVCaptureDevice *_currentDevice; |
38 RTCVideoRotation _rotation; | |
39 BOOL _hasRetriedOnFatalError; | 39 BOOL _hasRetriedOnFatalError; |
40 BOOL _isRunning; | 40 BOOL _isRunning; |
41 // Will the session be running once all asynchronous operations have been comp
leted? | 41 // Will the session be running once all asynchronous operations have been comp
leted? |
42 BOOL _willBeRunning; | 42 BOOL _willBeRunning; |
| 43 BOOL _switchingCameras; |
| 44 #if TARGET_OS_IPHONE |
| 45 UIDeviceOrientation _orientation; |
| 46 #endif |
43 } | 47 } |
44 | 48 |
45 @synthesize frameQueue = _frameQueue; | 49 @synthesize frameQueue = _frameQueue; |
46 @synthesize captureSession = _captureSession; | 50 @synthesize captureSession = _captureSession; |
| 51 @synthesize switchingCameras = _switchingCameras; |
47 | 52 |
48 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { | 53 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { |
49 if (self = [super initWithDelegate:delegate]) { | 54 if (self = [super initWithDelegate:delegate]) { |
50 // Create the capture session and all relevant inputs and outputs. We need | 55 // Create the capture session and all relevant inputs and outputs. We need |
51 // to do this in init because the application may want the capture session | 56 // to do this in init because the application may want the capture session |
52 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 57 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
53 // created here are retained until dealloc and never recreated. | 58 // created here are retained until dealloc and never recreated. |
54 if (![self setupCaptureSession]) { | 59 if (![self setupCaptureSession]) { |
55 return nil; | 60 return nil; |
56 } | 61 } |
57 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | 62 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
| 63 self.switchingCameras = NO; |
58 #if TARGET_OS_IPHONE | 64 #if TARGET_OS_IPHONE |
| 65 _orientation = UIDeviceOrientationPortrait; |
59 [center addObserver:self | 66 [center addObserver:self |
60 selector:@selector(deviceOrientationDidChange:) | 67 selector:@selector(deviceOrientationDidChange:) |
61 name:UIDeviceOrientationDidChangeNotification | 68 name:UIDeviceOrientationDidChangeNotification |
62 object:nil]; | 69 object:nil]; |
63 [center addObserver:self | 70 [center addObserver:self |
64 selector:@selector(handleCaptureSessionInterruption:) | 71 selector:@selector(handleCaptureSessionInterruption:) |
65 name:AVCaptureSessionWasInterruptedNotification | 72 name:AVCaptureSessionWasInterruptedNotification |
66 object:_captureSession]; | 73 object:_captureSession]; |
67 [center addObserver:self | 74 [center addObserver:self |
68 selector:@selector(handleCaptureSessionInterruptionEnded:) | 75 selector:@selector(handleCaptureSessionInterruptionEnded:) |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 [eligibleDeviceFormats addObject:format]; | 117 [eligibleDeviceFormats addObject:format]; |
111 } | 118 } |
112 } | 119 } |
113 | 120 |
114 return eligibleDeviceFormats; | 121 return eligibleDeviceFormats; |
115 } | 122 } |
116 | 123 |
117 - (void)startCaptureWithDevice:(AVCaptureDevice *)device | 124 - (void)startCaptureWithDevice:(AVCaptureDevice *)device |
118 format:(AVCaptureDeviceFormat *)format | 125 format:(AVCaptureDeviceFormat *)format |
119 fps:(NSInteger)fps { | 126 fps:(NSInteger)fps { |
120 _willBeRunning = true; | 127 _willBeRunning = YES; |
121 [RTCDispatcher | 128 [RTCDispatcher |
122 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 129 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
123 block:^{ | 130 block:^{ |
124 RTCLogInfo("startCaptureWithDevice %@ @ %zd fps", format,
fps); | 131 RTCLogInfo("startCaptureWithDevice %@ @ %zd fps", format,
fps); |
125 | 132 |
126 #if TARGET_OS_IPHONE | 133 #if TARGET_OS_IPHONE |
127 [[UIDevice currentDevice] beginGeneratingDeviceOrientation
Notifications]; | 134 [[UIDevice currentDevice] beginGeneratingDeviceOrientation
Notifications]; |
128 #endif | 135 #endif |
129 | 136 |
130 _currentDevice = device; | 137 _currentDevice = device; |
131 | 138 |
132 NSError *error = nil; | 139 NSError *error = nil; |
133 if (![_currentDevice lockForConfiguration:&error]) { | 140 if (![_currentDevice lockForConfiguration:&error]) { |
134 RTCLogError( | 141 RTCLogError( |
135 @"Failed to lock device %@. Error: %@", _currentDevi
ce, error.userInfo); | 142 @"Failed to lock device %@. Error: %@", _currentDevi
ce, error.userInfo); |
136 return; | 143 return; |
137 } | 144 } |
138 | 145 |
| 146 self.switchingCameras = YES; |
139 [self reconfigureCaptureSessionInput]; | 147 [self reconfigureCaptureSessionInput]; |
140 [self updateOrientation]; | 148 [self updateOrientation]; |
141 [_captureSession startRunning]; | 149 [_captureSession startRunning]; |
142 [self updateDeviceCaptureFormat:format fps:fps]; | 150 [self updateDeviceCaptureFormat:format fps:fps]; |
143 [_currentDevice unlockForConfiguration]; | 151 [_currentDevice unlockForConfiguration]; |
144 _isRunning = true; | 152 self.switchingCameras = NO; |
| 153 _isRunning = YES; |
145 }]; | 154 }]; |
146 } | 155 } |
147 | 156 |
148 - (void)stopCapture { | 157 - (void)stopCapture { |
149 _willBeRunning = false; | 158 _willBeRunning = NO; |
150 [RTCDispatcher | 159 [RTCDispatcher |
151 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 160 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
152 block:^{ | 161 block:^{ |
153 RTCLogInfo("Stop"); | 162 RTCLogInfo("Stop"); |
154 _currentDevice = nil; | 163 _currentDevice = nil; |
155 for (AVCaptureDeviceInput *oldInput in [_captureSession.in
puts copy]) { | 164 for (AVCaptureDeviceInput *oldInput in [_captureSession.in
puts copy]) { |
156 [_captureSession removeInput:oldInput]; | 165 [_captureSession removeInput:oldInput]; |
157 } | 166 } |
158 [_captureSession stopRunning]; | 167 [_captureSession stopRunning]; |
159 | 168 |
160 #if TARGET_OS_IPHONE | 169 #if TARGET_OS_IPHONE |
161 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo
tifications]; | 170 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo
tifications]; |
162 #endif | 171 #endif |
163 _isRunning = false; | 172 _isRunning = NO; |
164 }]; | 173 }]; |
165 } | 174 } |
166 | 175 |
167 #pragma mark iOS notifications | 176 #pragma mark iOS notifications |
168 | 177 |
169 #if TARGET_OS_IPHONE | 178 #if TARGET_OS_IPHONE |
170 - (void)deviceOrientationDidChange:(NSNotification *)notification { | 179 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
171 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 180 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
172 block:^{ | 181 block:^{ |
173 [self updateOrientation]; | 182 [self updateOrientation]; |
(...skipping 11 matching lines...) Expand all Loading... |
185 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s
ampleBuffer) || | 194 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s
ampleBuffer) || |
186 !CMSampleBufferDataIsReady(sampleBuffer)) { | 195 !CMSampleBufferDataIsReady(sampleBuffer)) { |
187 return; | 196 return; |
188 } | 197 } |
189 | 198 |
190 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | 199 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
191 if (pixelBuffer == nil) { | 200 if (pixelBuffer == nil) { |
192 return; | 201 return; |
193 } | 202 } |
194 | 203 |
| 204 #if TARGET_OS_IPHONE |
| 205 // Default to portrait orientation on iPhone. |
| 206 RTCVideoRotation rotation = RTCVideoRotation_90; |
| 207 // Check here, which camera this frame is from, to avoid any race conditions. |
| 208 AVCaptureDeviceInput *deviceInput = |
| 209 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.first
Object).input; |
| 210 BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePosition
Front; |
| 211 if (self.switchingCameras) { |
| 212 // Check the image's EXIF for the actual camera the image came from only whe
n switching |
| 213 // cameras as the image may still be from the old camera. |
| 214 CFDictionaryRef attachments = CMCopyDictionaryOfAttachments( |
| 215 kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); |
| 216 if (attachments) { |
| 217 int size = CFDictionaryGetCount(attachments); |
| 218 if (size > 0) { |
| 219 CFDictionaryRef cfExifDictVal = NULL; |
| 220 if (CFDictionaryGetValueIfPresent( |
| 221 attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfEx
ifDictVal)) { |
| 222 CFStringRef cfLensModelStrVal; |
| 223 if (CFDictionaryGetValueIfPresent(cfExifDictVal, |
| 224 (const void *)CFSTR("LensModel"), |
| 225 (const void **)&cfLensModelStrVal))
{ |
| 226 if ([(__bridge NSString *)cfLensModelStrVal containsString:@"front"]
) { |
| 227 usingFrontCamera = YES; |
| 228 } else if ([(__bridge NSString *)cfLensModelStrVal containsString:@"
back"]) { |
| 229 usingFrontCamera = NO; |
| 230 } |
| 231 } |
| 232 } |
| 233 } |
| 234 CFRelease(attachments); |
| 235 } |
| 236 } |
| 237 switch (_orientation) { |
| 238 case UIDeviceOrientationPortrait: |
| 239 rotation = RTCVideoRotation_90; |
| 240 break; |
| 241 case UIDeviceOrientationPortraitUpsideDown: |
| 242 rotation = RTCVideoRotation_270; |
| 243 break; |
| 244 case UIDeviceOrientationLandscapeLeft: |
| 245 rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; |
| 246 break; |
| 247 case UIDeviceOrientationLandscapeRight: |
| 248 rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; |
| 249 break; |
| 250 case UIDeviceOrientationFaceUp: |
| 251 case UIDeviceOrientationFaceDown: |
| 252 case UIDeviceOrientationUnknown: |
| 253 // Ignore. |
| 254 break; |
| 255 } |
| 256 #else |
| 257 // No rotation on Mac. |
| 258 RTCVideoRotation rotation = RTCVideoRotation_0; |
| 259 #endif |
| 260 |
195 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff
er:pixelBuffer]; | 261 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff
er:pixelBuffer]; |
196 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(
sampleBuffer)) * | 262 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(
sampleBuffer)) * |
197 kNanosecondsPerSecond; | 263 kNanosecondsPerSecond; |
198 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff
er | 264 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff
er |
199 rotation:_rotation | 265 rotation:rotation |
200 timeStampNs:timeStampNs]
; | 266 timeStampNs:timeStampNs]
; |
201 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | 267 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
202 } | 268 } |
203 | 269 |
204 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 270 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
205 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 271 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
206 fromConnection:(AVCaptureConnection *)connection { | 272 fromConnection:(AVCaptureConnection *)connection { |
207 RTCLogError(@"Dropped sample buffer."); | 273 RTCLogError(@"Dropped sample buffer."); |
208 } | 274 } |
209 | 275 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
392 } else { | 458 } else { |
393 RTCLogError(@"Cannot add camera as an input to the session."); | 459 RTCLogError(@"Cannot add camera as an input to the session."); |
394 } | 460 } |
395 [_captureSession commitConfiguration]; | 461 [_captureSession commitConfiguration]; |
396 } | 462 } |
397 | 463 |
398 - (void)updateOrientation { | 464 - (void)updateOrientation { |
399 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], | 465 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], |
400 @"updateOrientation must be called on the capture queue."); | 466 @"updateOrientation must be called on the capture queue."); |
401 #if TARGET_OS_IPHONE | 467 #if TARGET_OS_IPHONE |
402 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron
t; | 468 _orientation = [UIDevice currentDevice].orientation; |
403 switch ([UIDevice currentDevice].orientation) { | |
404 case UIDeviceOrientationPortrait: | |
405 _rotation = RTCVideoRotation_90; | |
406 break; | |
407 case UIDeviceOrientationPortraitUpsideDown: | |
408 _rotation = RTCVideoRotation_270; | |
409 break; | |
410 case UIDeviceOrientationLandscapeLeft: | |
411 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | |
412 break; | |
413 case UIDeviceOrientationLandscapeRight: | |
414 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | |
415 break; | |
416 case UIDeviceOrientationFaceUp: | |
417 case UIDeviceOrientationFaceDown: | |
418 case UIDeviceOrientationUnknown: | |
419 // Ignore. | |
420 break; | |
421 } | |
422 #endif | 469 #endif |
423 } | 470 } |
424 | 471 |
425 @end | 472 @end |
OLD | NEW |