OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCAVFoundationVideoCapturerInternal.h" | |
12 | |
13 #import <Foundation/Foundation.h> | |
14 #if TARGET_OS_IPHONE | |
15 #import <UIKit/UIKit.h> | |
16 #import "WebRTC/UIDevice+RTCDevice.h" | |
17 #endif | |
18 | |
19 #import "RTCDispatcher+Private.h" | |
20 #import "WebRTC/RTCLogging.h" | |
21 | |
22 #include "avfoundationformatmapper.h" | |
23 | |
24 @implementation RTCAVFoundationVideoCapturerInternal { | |
25 // Keep pointers to inputs for convenience. | |
26 AVCaptureDeviceInput *_frontCameraInput; | |
27 AVCaptureDeviceInput *_backCameraInput; | |
28 AVCaptureVideoDataOutput *_videoDataOutput; | |
29 // The cricket::VideoCapturer that owns this class. Should never be NULL. | |
30 webrtc::AVFoundationVideoCapturer *_capturer; | |
31 webrtc::VideoRotation _rotation; | |
32 BOOL _hasRetriedOnFatalError; | |
33 BOOL _isRunning; | |
34 BOOL _hasStarted; | |
35 rtc::CriticalSection _crit; | |
36 } | |
37 | |
38 @synthesize captureSession = _captureSession; | |
39 @synthesize frameQueue = _frameQueue; | |
40 @synthesize useBackCamera = _useBackCamera; | |
41 | |
42 @synthesize isRunning = _isRunning; | |
43 @synthesize hasStarted = _hasStarted; | |
44 | |
45 // This is called from the thread that creates the video source, which is likely | |
46 // the main thread. | |
47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | |
48 RTC_DCHECK(capturer); | |
49 if (self = [super init]) { | |
50 _capturer = capturer; | |
51 // Create the capture session and all relevant inputs and outputs. We need | |
52 // to do this in init because the application may want the capture session | |
53 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
54 // created here are retained until dealloc and never recreated. | |
55 if (![self setupCaptureSession]) { | |
56 return nil; | |
57 } | |
58 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
59 #if TARGET_OS_IPHONE | |
60 [center addObserver:self | |
61 selector:@selector(deviceOrientationDidChange:) | |
62 name:UIDeviceOrientationDidChangeNotification | |
63 object:nil]; | |
64 [center addObserver:self | |
65 selector:@selector(handleCaptureSessionInterruption:) | |
66 name:AVCaptureSessionWasInterruptedNotification | |
67 object:_captureSession]; | |
68 [center addObserver:self | |
69 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
70 name:AVCaptureSessionInterruptionEndedNotification | |
71 object:_captureSession]; | |
72 [center addObserver:self | |
73 selector:@selector(handleApplicationDidBecomeActive:) | |
74 name:UIApplicationDidBecomeActiveNotification | |
75 object:[UIApplication sharedApplication]]; | |
76 #endif | |
77 [center addObserver:self | |
78 selector:@selector(handleCaptureSessionRuntimeError:) | |
79 name:AVCaptureSessionRuntimeErrorNotification | |
80 object:_captureSession]; | |
81 [center addObserver:self | |
82 selector:@selector(handleCaptureSessionDidStartRunning:) | |
83 name:AVCaptureSessionDidStartRunningNotification | |
84 object:_captureSession]; | |
85 [center addObserver:self | |
86 selector:@selector(handleCaptureSessionDidStopRunning:) | |
87 name:AVCaptureSessionDidStopRunningNotification | |
88 object:_captureSession]; | |
89 } | |
90 return self; | |
91 } | |
92 | |
93 - (void)dealloc { | |
94 RTC_DCHECK(!self.hasStarted); | |
95 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
96 _capturer = nullptr; | |
97 } | |
98 | |
99 - (AVCaptureSession *)captureSession { | |
100 return _captureSession; | |
101 } | |
102 | |
103 - (AVCaptureDevice *)getActiveCaptureDevice { | |
104 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device
; | |
105 } | |
106 | |
107 - (nullable AVCaptureDevice *)frontCaptureDevice { | |
108 return _frontCameraInput.device; | |
109 } | |
110 | |
111 - (nullable AVCaptureDevice *)backCaptureDevice { | |
112 return _backCameraInput.device; | |
113 } | |
114 | |
115 - (dispatch_queue_t)frameQueue { | |
116 if (!_frameQueue) { | |
117 _frameQueue = | |
118 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP
ATCH_QUEUE_SERIAL); | |
119 dispatch_set_target_queue(_frameQueue, | |
120 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_
HIGH, 0)); | |
121 } | |
122 return _frameQueue; | |
123 } | |
124 | |
125 // Called from any thread (likely main thread). | |
126 - (BOOL)canUseBackCamera { | |
127 return _backCameraInput != nil; | |
128 } | |
129 | |
130 // Called from any thread (likely main thread). | |
131 - (BOOL)useBackCamera { | |
132 @synchronized(self) { | |
133 return _useBackCamera; | |
134 } | |
135 } | |
136 | |
137 // Called from any thread (likely main thread). | |
138 - (void)setUseBackCamera:(BOOL)useBackCamera { | |
139 if (!self.canUseBackCamera) { | |
140 if (useBackCamera) { | |
141 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" | |
142 "not switching."); | |
143 } | |
144 return; | |
145 } | |
146 @synchronized(self) { | |
147 if (_useBackCamera == useBackCamera) { | |
148 return; | |
149 } | |
150 _useBackCamera = useBackCamera; | |
151 [self updateSessionInputForUseBackCamera:useBackCamera]; | |
152 } | |
153 } | |
154 | |
155 // Called from WebRTC thread. | |
156 - (void)start { | |
157 if (self.hasStarted) { | |
158 return; | |
159 } | |
160 self.hasStarted = YES; | |
161 [RTCDispatcher | |
162 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
163 block:^{ | |
164 #if TARGET_OS_IPHONE | |
165 // Default to portrait orientation on iPhone. This will
be reset in | |
166 // updateOrientation unless orientation is unknown/faceu
p/facedown. | |
167 _rotation = webrtc::kVideoRotation_90; | |
168 #else | |
169 // No rotation on Mac. | |
170 _rotation = webrtc::kVideoRotation_0; | |
171 #endif | |
172 [self updateOrientation]; | |
173 #if TARGET_OS_IPHONE | |
174 [[UIDevice currentDevice] beginGeneratingDeviceOrientati
onNotifications]; | |
175 #endif | |
176 AVCaptureSession *captureSession = self.captureSession; | |
177 [captureSession startRunning]; | |
178 }]; | |
179 } | |
180 | |
181 // Called from same thread as start. | |
182 - (void)stop { | |
183 if (!self.hasStarted) { | |
184 return; | |
185 } | |
186 self.hasStarted = NO; | |
187 // Due to this async block, it's possible that the ObjC object outlives the | |
188 // C++ one. In order to not invoke functions on the C++ object, we set | |
189 // hasStarted immediately instead of dispatching it async. | |
190 [RTCDispatcher | |
191 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
192 block:^{ | |
193 [_videoDataOutput setSampleBufferDelegate:nil queue:null
ptr]; | |
194 [_captureSession stopRunning]; | |
195 #if TARGET_OS_IPHONE | |
196 [[UIDevice currentDevice] endGeneratingDeviceOrientation
Notifications]; | |
197 #endif | |
198 }]; | |
199 } | |
200 | |
201 #pragma mark iOS notifications | |
202 | |
203 #if TARGET_OS_IPHONE | |
204 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
205 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
206 block:^{ | |
207 [self updateOrientation]; | |
208 }]; | |
209 } | |
210 #endif | |
211 | |
212 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
213 | |
214 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
215 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
216 fromConnection:(AVCaptureConnection *)connection { | |
217 NSParameterAssert(captureOutput == _videoDataOutput); | |
218 if (!self.hasStarted) { | |
219 return; | |
220 } | |
221 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | |
222 } | |
223 | |
224 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
225 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
226 fromConnection:(AVCaptureConnection *)connection { | |
227 RTCLogError(@"Dropped sample buffer."); | |
228 } | |
229 | |
230 #pragma mark - AVCaptureSession notifications | |
231 | |
232 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
233 NSString *reasonString = nil; | |
234 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ | |
235 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
236 if ([UIDevice isIOS9OrLater]) { | |
237 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK
ey]; | |
238 if (reason) { | |
239 switch (reason.intValue) { | |
240 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou
nd: | |
241 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
242 break; | |
243 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
244 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
245 break; | |
246 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
247 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
248 break; | |
249 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip
leForegroundApps: | |
250 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
251 break; | |
252 } | |
253 } | |
254 } | |
255 #endif | |
256 RTCLog(@"Capture session interrupted: %@", reasonString); | |
257 // TODO(tkchin): Handle this case. | |
258 } | |
259 | |
260 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
261 RTCLog(@"Capture session interruption ended."); | |
262 // TODO(tkchin): Handle this case. | |
263 } | |
264 | |
265 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
266 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]
; | |
267 RTCLogError(@"Capture session runtime error: %@", error); | |
268 | |
269 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
270 block:^{ | |
271 #if TARGET_OS_IPHONE | |
272 if (error.code == AVErrorMediaServicesWereRes
et) { | |
273 [self handleNonFatalError]; | |
274 } else { | |
275 [self handleFatalError]; | |
276 } | |
277 #else | |
278 [self handleFatalError]; | |
279 #endif | |
280 }]; | |
281 } | |
282 | |
283 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
284 RTCLog(@"Capture session started."); | |
285 | |
286 self.isRunning = YES; | |
287 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
288 block:^{ | |
289 // If we successfully restarted after an unkn
own error, | |
290 // allow future retries on fatal errors. | |
291 _hasRetriedOnFatalError = NO; | |
292 }]; | |
293 } | |
294 | |
295 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
296 RTCLog(@"Capture session stopped."); | |
297 self.isRunning = NO; | |
298 } | |
299 | |
300 - (void)handleFatalError { | |
301 [RTCDispatcher | |
302 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
303 block:^{ | |
304 if (!_hasRetriedOnFatalError) { | |
305 RTCLogWarning(@"Attempting to recover from fatal captu
re error."); | |
306 [self handleNonFatalError]; | |
307 _hasRetriedOnFatalError = YES; | |
308 } else { | |
309 RTCLogError(@"Previous fatal error recovery failed."); | |
310 } | |
311 }]; | |
312 } | |
313 | |
314 - (void)handleNonFatalError { | |
315 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
316 block:^{ | |
317 if (self.hasStarted) { | |
318 RTCLog(@"Restarting capture session after e
rror."); | |
319 [self.captureSession startRunning]; | |
320 } | |
321 }]; | |
322 } | |
323 | |
324 #if TARGET_OS_IPHONE | |
325 | |
326 #pragma mark - UIApplication notifications | |
327 | |
328 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
329 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
330 block:^{ | |
331 if (self.hasStarted && !self.captureSession.i
sRunning) { | |
332 RTCLog(@"Restarting capture session on acti
ve."); | |
333 [self.captureSession startRunning]; | |
334 } | |
335 }]; | |
336 } | |
337 | |
338 #endif // TARGET_OS_IPHONE | |
339 | |
340 #pragma mark - Private | |
341 | |
342 - (BOOL)setupCaptureSession { | |
343 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | |
344 #if defined(WEBRTC_IOS) | |
345 captureSession.usesApplicationAudioSession = NO; | |
346 #endif | |
347 // Add the output. | |
348 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; | |
349 if (![captureSession canAddOutput:videoDataOutput]) { | |
350 RTCLogError(@"Video data output unsupported."); | |
351 return NO; | |
352 } | |
353 [captureSession addOutput:videoDataOutput]; | |
354 | |
355 // Get the front and back cameras. If there isn't a front camera | |
356 // give up. | |
357 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; | |
358 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; | |
359 if (!frontCameraInput) { | |
360 RTCLogError(@"No front camera for capture session."); | |
361 return NO; | |
362 } | |
363 | |
364 // Add the inputs. | |
365 if (![captureSession canAddInput:frontCameraInput] || | |
366 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { | |
367 RTCLogError(@"Session does not support capture inputs."); | |
368 return NO; | |
369 } | |
370 AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCame
raInput; | |
371 [captureSession addInput:input]; | |
372 | |
373 _captureSession = captureSession; | |
374 return YES; | |
375 } | |
376 | |
377 - (AVCaptureVideoDataOutput *)videoDataOutput { | |
378 if (!_videoDataOutput) { | |
379 // Make the capturer output NV12. Ideally we want I420 but that's not | |
380 // currently supported on iPhone / iPad. | |
381 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc
] init]; | |
382 videoDataOutput.videoSettings = @{ | |
383 (NSString *) | |
384 // TODO(denicija): Remove this color conversion and use the original captu
re format directly. | |
385 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar
FullRange) | |
386 }; | |
387 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
388 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
389 _videoDataOutput = videoDataOutput; | |
390 } | |
391 return _videoDataOutput; | |
392 } | |
393 | |
394 - (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)posi
tion { | |
395 for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:A
VMediaTypeVideo]) { | |
396 if (captureDevice.position == position) { | |
397 return captureDevice; | |
398 } | |
399 } | |
400 return nil; | |
401 } | |
402 | |
403 - (AVCaptureDeviceInput *)frontCameraInput { | |
404 if (!_frontCameraInput) { | |
405 #if TARGET_OS_IPHONE | |
406 AVCaptureDevice *frontCameraDevice = | |
407 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; | |
408 #else | |
409 AVCaptureDevice *frontCameraDevice = | |
410 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
411 #endif | |
412 if (!frontCameraDevice) { | |
413 RTCLogWarning(@"Failed to find front capture device."); | |
414 return nil; | |
415 } | |
416 NSError *error = nil; | |
417 AVCaptureDeviceInput *frontCameraInput = | |
418 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&err
or]; | |
419 if (!frontCameraInput) { | |
420 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes
cription); | |
421 return nil; | |
422 } | |
423 _frontCameraInput = frontCameraInput; | |
424 } | |
425 return _frontCameraInput; | |
426 } | |
427 | |
428 - (AVCaptureDeviceInput *)backCameraInput { | |
429 if (!_backCameraInput) { | |
430 AVCaptureDevice *backCameraDevice = | |
431 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; | |
432 if (!backCameraDevice) { | |
433 RTCLogWarning(@"Failed to find front capture device."); | |
434 return nil; | |
435 } | |
436 NSError *error = nil; | |
437 AVCaptureDeviceInput *backCameraInput = | |
438 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&erro
r]; | |
439 if (!backCameraInput) { | |
440 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes
cription); | |
441 return nil; | |
442 } | |
443 _backCameraInput = backCameraInput; | |
444 } | |
445 return _backCameraInput; | |
446 } | |
447 | |
448 // Called from capture session queue. | |
449 - (void)updateOrientation { | |
450 #if TARGET_OS_IPHONE | |
451 switch ([UIDevice currentDevice].orientation) { | |
452 case UIDeviceOrientationPortrait: | |
453 _rotation = webrtc::kVideoRotation_90; | |
454 break; | |
455 case UIDeviceOrientationPortraitUpsideDown: | |
456 _rotation = webrtc::kVideoRotation_270; | |
457 break; | |
458 case UIDeviceOrientationLandscapeLeft: | |
459 _rotation = | |
460 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 : webrtc::kVi
deoRotation_180; | |
461 break; | |
462 case UIDeviceOrientationLandscapeRight: | |
463 _rotation = | |
464 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 : webrtc::k
VideoRotation_0; | |
465 break; | |
466 case UIDeviceOrientationFaceUp: | |
467 case UIDeviceOrientationFaceDown: | |
468 case UIDeviceOrientationUnknown: | |
469 // Ignore. | |
470 break; | |
471 } | |
472 #endif | |
473 } | |
474 | |
475 // Update the current session input to match what's stored in _useBackCamera. | |
476 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | |
477 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
478 block:^{ | |
479 [_captureSession beginConfiguration]; | |
480 AVCaptureDeviceInput *oldInput = _backCameraI
nput; | |
481 AVCaptureDeviceInput *newInput = _frontCamera
Input; | |
482 if (useBackCamera) { | |
483 oldInput = _frontCameraInput; | |
484 newInput = _backCameraInput; | |
485 } | |
486 if (oldInput) { | |
487 // Ok to remove this even if it's not attac
hed. Will be no-op. | |
488 [_captureSession removeInput:oldInput]; | |
489 } | |
490 if (newInput) { | |
491 [_captureSession addInput:newInput]; | |
492 } | |
493 [self updateOrientation]; | |
494 AVCaptureDevice *newDevice = newInput.device; | |
495 const cricket::VideoFormat *format = | |
496 _capturer->GetCaptureFormat(); | |
497 webrtc::SetFormatForCaptureDevice( | |
498 newDevice, _captureSession, *format); | |
499 [_captureSession commitConfiguration]; | |
500 }]; | |
501 } | |
502 | |
503 @end | |
OLD | NEW |