OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 17 matching lines...) Expand all Loading... | |
28 } | 28 } |
29 | 29 |
30 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e> | 30 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e> |
31 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | 31 @property(nonatomic, readonly) dispatch_queue_t frameQueue; |
32 @end | 32 @end |
33 | 33 |
34 @implementation RTCCameraVideoCapturer { | 34 @implementation RTCCameraVideoCapturer { |
35 AVCaptureVideoDataOutput *_videoDataOutput; | 35 AVCaptureVideoDataOutput *_videoDataOutput; |
36 AVCaptureSession *_captureSession; | 36 AVCaptureSession *_captureSession; |
37 AVCaptureDevice *_currentDevice; | 37 AVCaptureDevice *_currentDevice; |
38 RTCVideoRotation _rotation; | |
39 BOOL _hasRetriedOnFatalError; | 38 BOOL _hasRetriedOnFatalError; |
40 BOOL _isRunning; | 39 BOOL _isRunning; |
41 // Will the session be running once all asynchronous operations have been comp leted? | 40 // Will the session be running once all asynchronous operations have been comp leted? |
42 BOOL _willBeRunning; | 41 BOOL _willBeRunning; |
42 BOOL _changingCamera; | |
43 #if TARGET_OS_IPHONE | |
44 UIDeviceOrientation _orientation; | |
45 #endif | |
43 } | 46 } |
44 | 47 |
45 @synthesize frameQueue = _frameQueue; | 48 @synthesize frameQueue = _frameQueue; |
46 @synthesize captureSession = _captureSession; | 49 @synthesize captureSession = _captureSession; |
47 | 50 |
48 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { | 51 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { |
49 if (self = [super initWithDelegate:delegate]) { | 52 if (self = [super initWithDelegate:delegate]) { |
50 // Create the capture session and all relevant inputs and outputs. We need | 53 // Create the capture session and all relevant inputs and outputs. We need |
51 // to do this in init because the application may want the capture session | 54 // to do this in init because the application may want the capture session |
52 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 55 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
53 // created here are retained until dealloc and never recreated. | 56 // created here are retained until dealloc and never recreated. |
54 if (![self setupCaptureSession]) { | 57 if (![self setupCaptureSession]) { |
55 return nil; | 58 return nil; |
56 } | 59 } |
57 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | 60 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
61 _changingCamera = false; | |
58 #if TARGET_OS_IPHONE | 62 #if TARGET_OS_IPHONE |
63 _orientation = UIDeviceOrientationPortrait; | |
59 [center addObserver:self | 64 [center addObserver:self |
60 selector:@selector(deviceOrientationDidChange:) | 65 selector:@selector(deviceOrientationDidChange:) |
61 name:UIDeviceOrientationDidChangeNotification | 66 name:UIDeviceOrientationDidChangeNotification |
62 object:nil]; | 67 object:nil]; |
63 [center addObserver:self | 68 [center addObserver:self |
64 selector:@selector(handleCaptureSessionInterruption:) | 69 selector:@selector(handleCaptureSessionInterruption:) |
65 name:AVCaptureSessionWasInterruptedNotification | 70 name:AVCaptureSessionWasInterruptedNotification |
66 object:_captureSession]; | 71 object:_captureSession]; |
67 [center addObserver:self | 72 [center addObserver:self |
68 selector:@selector(handleCaptureSessionInterruptionEnded:) | 73 selector:@selector(handleCaptureSessionInterruptionEnded:) |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
129 | 134 |
130 _currentDevice = device; | 135 _currentDevice = device; |
131 | 136 |
132 NSError *error = nil; | 137 NSError *error = nil; |
133 if (![_currentDevice lockForConfiguration:&error]) { | 138 if (![_currentDevice lockForConfiguration:&error]) { |
134 RTCLogError( | 139 RTCLogError( |
135 @"Failed to lock device %@. Error: %@", _currentDevi ce, error.userInfo); | 140 @"Failed to lock device %@. Error: %@", _currentDevi ce, error.userInfo); |
136 return; | 141 return; |
137 } | 142 } |
138 | 143 |
144 _changingCamera = true; | |
tkchin_webrtc
2017/07/19 21:08:01
YES / NO not true / false
| |
139 [self reconfigureCaptureSessionInput]; | 145 [self reconfigureCaptureSessionInput]; |
140 [self updateOrientation]; | 146 [self updateOrientation]; |
141 [_captureSession startRunning]; | 147 [_captureSession startRunning]; |
142 [self updateDeviceCaptureFormat:format fps:fps]; | 148 [self updateDeviceCaptureFormat:format fps:fps]; |
143 [_currentDevice unlockForConfiguration]; | 149 [_currentDevice unlockForConfiguration]; |
144 _isRunning = true; | 150 _isRunning = true; |
151 _changingCamera = false; | |
145 }]; | 152 }]; |
146 } | 153 } |
147 | 154 |
148 - (void)stopCapture { | 155 - (void)stopCapture { |
149 _willBeRunning = false; | 156 _willBeRunning = false; |
150 [RTCDispatcher | 157 [RTCDispatcher |
151 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 158 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
152 block:^{ | 159 block:^{ |
153 RTCLogInfo("Stop"); | 160 RTCLogInfo("Stop"); |
154 _currentDevice = nil; | 161 _currentDevice = nil; |
(...skipping 19 matching lines...) Expand all Loading... | |
174 }]; | 181 }]; |
175 } | 182 } |
176 #endif | 183 #endif |
177 | 184 |
178 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 185 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
179 | 186 |
180 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 187 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
181 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 188 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
182 fromConnection:(AVCaptureConnection *)connection { | 189 fromConnection:(AVCaptureConnection *)connection { |
183 NSParameterAssert(captureOutput == _videoDataOutput); | 190 NSParameterAssert(captureOutput == _videoDataOutput); |
191 NSParameterAssert(1 == connection.inputPorts.count); | |
184 | 192 |
185 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) || | 193 if (_changingCamera || CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |
186 !CMSampleBufferDataIsReady(sampleBuffer)) { | 194 !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(sampleB uffer)) { |
187 return; | 195 return; |
188 } | 196 } |
189 | 197 |
190 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | 198 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
191 if (pixelBuffer == nil) { | 199 if (pixelBuffer == nil) { |
192 return; | 200 return; |
193 } | 201 } |
194 | 202 |
203 #if TARGET_OS_IPHONE | |
204 // Default to portrait orientation on iPhone. | |
205 RTCVideoRotation rotation = RTCVideoRotation_90; | |
206 // Check here, which camera this frame is from, to avoid any race conditions. | |
207 AVCaptureDeviceInput *deviceInput = | |
208 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.first Object).input; | |
209 BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePosition Front; | |
210 switch (_orientation) { | |
211 case UIDeviceOrientationPortrait: | |
212 rotation = RTCVideoRotation_90; | |
213 break; | |
214 case UIDeviceOrientationPortraitUpsideDown: | |
215 rotation = RTCVideoRotation_270; | |
216 break; | |
217 case UIDeviceOrientationLandscapeLeft: | |
218 rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | |
219 break; | |
220 case UIDeviceOrientationLandscapeRight: | |
221 rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | |
222 break; | |
223 case UIDeviceOrientationFaceUp: | |
224 case UIDeviceOrientationFaceDown: | |
225 case UIDeviceOrientationUnknown: | |
226 // Ignore. | |
227 break; | |
228 } | |
229 #else | |
230 // No rotation on Mac. | |
231 RTCVideoRotation rotation = RTCVideoRotation_0; | |
232 #endif | |
233 | |
195 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff er:pixelBuffer]; | 234 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff er:pixelBuffer]; |
196 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) * | 235 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) * |
197 kNanosecondsPerSecond; | 236 kNanosecondsPerSecond; |
198 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff er | 237 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff er |
199 rotation:_rotation | 238 rotation:rotation |
200 timeStampNs:timeStampNs] ; | 239 timeStampNs:timeStampNs] ; |
201 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | 240 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
202 } | 241 } |
203 | 242 |
204 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 243 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
205 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 244 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
206 fromConnection:(AVCaptureConnection *)connection { | 245 fromConnection:(AVCaptureConnection *)connection { |
207 RTCLogError(@"Dropped sample buffer."); | 246 RTCLogError(@"Dropped sample buffer."); |
208 } | 247 } |
209 | 248 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
392 } else { | 431 } else { |
393 RTCLogError(@"Cannot add camera as an input to the session."); | 432 RTCLogError(@"Cannot add camera as an input to the session."); |
394 } | 433 } |
395 [_captureSession commitConfiguration]; | 434 [_captureSession commitConfiguration]; |
396 } | 435 } |
397 | 436 |
398 - (void)updateOrientation { | 437 - (void)updateOrientation { |
399 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], | 438 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], |
400 @"updateOrientation must be called on the capture queue."); | 439 @"updateOrientation must be called on the capture queue."); |
401 #if TARGET_OS_IPHONE | 440 #if TARGET_OS_IPHONE |
402 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t; | 441 _orientation = [UIDevice currentDevice].orientation; |
403 switch ([UIDevice currentDevice].orientation) { | |
404 case UIDeviceOrientationPortrait: | |
405 _rotation = RTCVideoRotation_90; | |
406 break; | |
407 case UIDeviceOrientationPortraitUpsideDown: | |
408 _rotation = RTCVideoRotation_270; | |
409 break; | |
410 case UIDeviceOrientationLandscapeLeft: | |
411 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | |
412 break; | |
413 case UIDeviceOrientationLandscapeRight: | |
414 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | |
415 break; | |
416 case UIDeviceOrientationFaceUp: | |
417 case UIDeviceOrientationFaceDown: | |
418 case UIDeviceOrientationUnknown: | |
419 // Ignore. | |
420 break; | |
421 } | |
422 #endif | 442 #endif |
423 } | 443 } |
424 | 444 |
425 @end | 445 @end |
OLD | NEW |