OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCAVFoundationVideoCapturerInternal.h" | |
12 | |
13 #import <Foundation/Foundation.h> | |
14 #if TARGET_OS_IPHONE | |
15 #import <UIKit/UIKit.h> | |
16 #import "WebRTC/UIDevice+RTCDevice.h" | |
17 #endif | |
18 | |
19 #import "AVCaptureSession+DevicePosition.h" | |
20 #import "RTCDispatcher+Private.h" | |
21 #import "WebRTC/RTCLogging.h" | |
22 | |
23 #include "avfoundationformatmapper.h" | |
24 | |
25 @implementation RTCAVFoundationVideoCapturerInternal { | |
26 // Keep pointers to inputs for convenience. | |
27 AVCaptureDeviceInput *_frontCameraInput; | |
28 AVCaptureDeviceInput *_backCameraInput; | |
29 AVCaptureVideoDataOutput *_videoDataOutput; | |
30 // The cricket::VideoCapturer that owns this class. Should never be NULL. | |
31 webrtc::AVFoundationVideoCapturer *_capturer; | |
32 BOOL _hasRetriedOnFatalError; | |
33 BOOL _isRunning; | |
34 BOOL _hasStarted; | |
35 rtc::CriticalSection _crit; | |
36 #if TARGET_OS_IPHONE | |
37 UIDeviceOrientation _orientation; | |
38 #endif | |
39 } | |
40 | |
41 @synthesize captureSession = _captureSession; | |
42 @synthesize frameQueue = _frameQueue; | |
43 @synthesize useBackCamera = _useBackCamera; | |
44 | |
45 @synthesize isRunning = _isRunning; | |
46 @synthesize hasStarted = _hasStarted; | |
47 | |
48 // This is called from the thread that creates the video source, which is likely | |
49 // the main thread. | |
50 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | |
51 RTC_DCHECK(capturer); | |
52 if (self = [super init]) { | |
53 _capturer = capturer; | |
54 // Create the capture session and all relevant inputs and outputs. We need | |
55 // to do this in init because the application may want the capture session | |
56 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
57 // created here are retained until dealloc and never recreated. | |
58 if (![self setupCaptureSession]) { | |
59 return nil; | |
60 } | |
61 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
62 #if TARGET_OS_IPHONE | |
63 _orientation = UIDeviceOrientationPortrait; | |
64 [center addObserver:self | |
65 selector:@selector(deviceOrientationDidChange:) | |
66 name:UIDeviceOrientationDidChangeNotification | |
67 object:nil]; | |
68 [center addObserver:self | |
69 selector:@selector(handleCaptureSessionInterruption:) | |
70 name:AVCaptureSessionWasInterruptedNotification | |
71 object:_captureSession]; | |
72 [center addObserver:self | |
73 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
74 name:AVCaptureSessionInterruptionEndedNotification | |
75 object:_captureSession]; | |
76 [center addObserver:self | |
77 selector:@selector(handleApplicationDidBecomeActive:) | |
78 name:UIApplicationDidBecomeActiveNotification | |
79 object:[UIApplication sharedApplication]]; | |
80 #endif | |
81 [center addObserver:self | |
82 selector:@selector(handleCaptureSessionRuntimeError:) | |
83 name:AVCaptureSessionRuntimeErrorNotification | |
84 object:_captureSession]; | |
85 [center addObserver:self | |
86 selector:@selector(handleCaptureSessionDidStartRunning:) | |
87 name:AVCaptureSessionDidStartRunningNotification | |
88 object:_captureSession]; | |
89 [center addObserver:self | |
90 selector:@selector(handleCaptureSessionDidStopRunning:) | |
91 name:AVCaptureSessionDidStopRunningNotification | |
92 object:_captureSession]; | |
93 } | |
94 return self; | |
95 } | |
96 | |
97 - (void)dealloc { | |
98 RTC_DCHECK(!self.hasStarted); | |
99 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
100 _capturer = nullptr; | |
101 } | |
102 | |
103 - (AVCaptureSession *)captureSession { | |
104 return _captureSession; | |
105 } | |
106 | |
107 - (AVCaptureDevice *)getActiveCaptureDevice { | |
108 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device
; | |
109 } | |
110 | |
111 - (nullable AVCaptureDevice *)frontCaptureDevice { | |
112 return _frontCameraInput.device; | |
113 } | |
114 | |
115 - (nullable AVCaptureDevice *)backCaptureDevice { | |
116 return _backCameraInput.device; | |
117 } | |
118 | |
119 - (dispatch_queue_t)frameQueue { | |
120 if (!_frameQueue) { | |
121 _frameQueue = | |
122 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP
ATCH_QUEUE_SERIAL); | |
123 dispatch_set_target_queue(_frameQueue, | |
124 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_
HIGH, 0)); | |
125 } | |
126 return _frameQueue; | |
127 } | |
128 | |
129 // Called from any thread (likely main thread). | |
130 - (BOOL)canUseBackCamera { | |
131 return _backCameraInput != nil; | |
132 } | |
133 | |
134 // Called from any thread (likely main thread). | |
135 - (BOOL)useBackCamera { | |
136 @synchronized(self) { | |
137 return _useBackCamera; | |
138 } | |
139 } | |
140 | |
141 // Called from any thread (likely main thread). | |
142 - (void)setUseBackCamera:(BOOL)useBackCamera { | |
143 if (!self.canUseBackCamera) { | |
144 if (useBackCamera) { | |
145 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" | |
146 "not switching."); | |
147 } | |
148 return; | |
149 } | |
150 @synchronized(self) { | |
151 if (_useBackCamera == useBackCamera) { | |
152 return; | |
153 } | |
154 _useBackCamera = useBackCamera; | |
155 [self updateSessionInputForUseBackCamera:useBackCamera]; | |
156 } | |
157 } | |
158 | |
159 // Called from WebRTC thread. | |
160 - (void)start { | |
161 if (self.hasStarted) { | |
162 return; | |
163 } | |
164 self.hasStarted = YES; | |
165 [RTCDispatcher | |
166 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
167 block:^{ | |
168 [self updateOrientation]; | |
169 #if TARGET_OS_IPHONE | |
170 [[UIDevice currentDevice] beginGeneratingDeviceOrientati
onNotifications]; | |
171 #endif | |
172 AVCaptureSession *captureSession = self.captureSession; | |
173 [captureSession startRunning]; | |
174 }]; | |
175 } | |
176 | |
177 // Called from same thread as start. | |
178 - (void)stop { | |
179 if (!self.hasStarted) { | |
180 return; | |
181 } | |
182 self.hasStarted = NO; | |
183 // Due to this async block, it's possible that the ObjC object outlives the | |
184 // C++ one. In order to not invoke functions on the C++ object, we set | |
185 // hasStarted immediately instead of dispatching it async. | |
186 [RTCDispatcher | |
187 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
188 block:^{ | |
189 [_videoDataOutput setSampleBufferDelegate:nil queue:null
ptr]; | |
190 [_captureSession stopRunning]; | |
191 #if TARGET_OS_IPHONE | |
192 [[UIDevice currentDevice] endGeneratingDeviceOrientation
Notifications]; | |
193 #endif | |
194 }]; | |
195 } | |
196 | |
197 #pragma mark iOS notifications | |
198 | |
199 #if TARGET_OS_IPHONE | |
200 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
201 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
202 block:^{ | |
203 [self updateOrientation]; | |
204 }]; | |
205 } | |
206 #endif | |
207 | |
208 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
209 | |
210 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
211 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
212 fromConnection:(AVCaptureConnection *)connection { | |
213 NSParameterAssert(captureOutput == _videoDataOutput); | |
214 if (!self.hasStarted) { | |
215 return; | |
216 } | |
217 | |
218 #if TARGET_OS_IPHONE | |
219 // Default to portrait orientation on iPhone. | |
220 webrtc::VideoRotation rotation = webrtc::kVideoRotation_90; | |
221 BOOL usingFrontCamera = NO; | |
222 // Check the image's EXIF for the camera the image came from as the image coul
d have been | |
223 // delayed as we set alwaysDiscardsLateVideoFrames to NO. | |
224 AVCaptureDevicePosition cameraPosition = | |
225 [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; | |
226 if (cameraPosition != AVCaptureDevicePositionUnspecified) { | |
227 usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition; | |
228 } else { | |
229 AVCaptureDeviceInput *deviceInput = | |
230 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.fir
stObject).input; | |
231 usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.positi
on; | |
232 } | |
233 switch (_orientation) { | |
234 case UIDeviceOrientationPortrait: | |
235 rotation = webrtc::kVideoRotation_90; | |
236 break; | |
237 case UIDeviceOrientationPortraitUpsideDown: | |
238 rotation = webrtc::kVideoRotation_270; | |
239 break; | |
240 case UIDeviceOrientationLandscapeLeft: | |
241 rotation = usingFrontCamera ? webrtc::kVideoRotation_180 : webrtc::kVideoR
otation_0; | |
242 break; | |
243 case UIDeviceOrientationLandscapeRight: | |
244 rotation = usingFrontCamera ? webrtc::kVideoRotation_0 : webrtc::kVideoRot
ation_180; | |
245 break; | |
246 case UIDeviceOrientationFaceUp: | |
247 case UIDeviceOrientationFaceDown: | |
248 case UIDeviceOrientationUnknown: | |
249 // Ignore. | |
250 break; | |
251 } | |
252 #else | |
253 // No rotation on Mac. | |
254 webrtc::VideoRotation rotation = webrtc::kVideoRotation_0; | |
255 #endif | |
256 | |
257 _capturer->CaptureSampleBuffer(sampleBuffer, rotation); | |
258 } | |
259 | |
260 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
261 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
262 fromConnection:(AVCaptureConnection *)connection { | |
263 RTCLogError(@"Dropped sample buffer."); | |
264 } | |
265 | |
266 #pragma mark - AVCaptureSession notifications | |
267 | |
268 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
269 NSString *reasonString = nil; | |
270 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ | |
271 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
272 if ([UIDevice isIOS9OrLater]) { | |
273 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK
ey]; | |
274 if (reason) { | |
275 switch (reason.intValue) { | |
276 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou
nd: | |
277 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
278 break; | |
279 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
280 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
281 break; | |
282 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
283 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
284 break; | |
285 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip
leForegroundApps: | |
286 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
287 break; | |
288 } | |
289 } | |
290 } | |
291 #endif | |
292 RTCLog(@"Capture session interrupted: %@", reasonString); | |
293 // TODO(tkchin): Handle this case. | |
294 } | |
295 | |
296 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
297 RTCLog(@"Capture session interruption ended."); | |
298 // TODO(tkchin): Handle this case. | |
299 } | |
300 | |
301 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
302 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]
; | |
303 RTCLogError(@"Capture session runtime error: %@", error); | |
304 | |
305 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
306 block:^{ | |
307 #if TARGET_OS_IPHONE | |
308 if (error.code == AVErrorMediaServicesWereRes
et) { | |
309 [self handleNonFatalError]; | |
310 } else { | |
311 [self handleFatalError]; | |
312 } | |
313 #else | |
314 [self handleFatalError]; | |
315 #endif | |
316 }]; | |
317 } | |
318 | |
319 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
320 RTCLog(@"Capture session started."); | |
321 | |
322 self.isRunning = YES; | |
323 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
324 block:^{ | |
325 // If we successfully restarted after an unkn
own error, | |
326 // allow future retries on fatal errors. | |
327 _hasRetriedOnFatalError = NO; | |
328 }]; | |
329 } | |
330 | |
331 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
332 RTCLog(@"Capture session stopped."); | |
333 self.isRunning = NO; | |
334 } | |
335 | |
336 - (void)handleFatalError { | |
337 [RTCDispatcher | |
338 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
339 block:^{ | |
340 if (!_hasRetriedOnFatalError) { | |
341 RTCLogWarning(@"Attempting to recover from fatal captu
re error."); | |
342 [self handleNonFatalError]; | |
343 _hasRetriedOnFatalError = YES; | |
344 } else { | |
345 RTCLogError(@"Previous fatal error recovery failed."); | |
346 } | |
347 }]; | |
348 } | |
349 | |
350 - (void)handleNonFatalError { | |
351 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
352 block:^{ | |
353 if (self.hasStarted) { | |
354 RTCLog(@"Restarting capture session after e
rror."); | |
355 [self.captureSession startRunning]; | |
356 } | |
357 }]; | |
358 } | |
359 | |
360 #if TARGET_OS_IPHONE | |
361 | |
362 #pragma mark - UIApplication notifications | |
363 | |
364 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
365 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
366 block:^{ | |
367 if (self.hasStarted && !self.captureSession.i
sRunning) { | |
368 RTCLog(@"Restarting capture session on acti
ve."); | |
369 [self.captureSession startRunning]; | |
370 } | |
371 }]; | |
372 } | |
373 | |
374 #endif // TARGET_OS_IPHONE | |
375 | |
376 #pragma mark - Private | |
377 | |
378 - (BOOL)setupCaptureSession { | |
379 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | |
380 #if defined(WEBRTC_IOS) | |
381 captureSession.usesApplicationAudioSession = NO; | |
382 #endif | |
383 // Add the output. | |
384 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; | |
385 if (![captureSession canAddOutput:videoDataOutput]) { | |
386 RTCLogError(@"Video data output unsupported."); | |
387 return NO; | |
388 } | |
389 [captureSession addOutput:videoDataOutput]; | |
390 | |
391 // Get the front and back cameras. If there isn't a front camera | |
392 // give up. | |
393 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; | |
394 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; | |
395 if (!frontCameraInput) { | |
396 RTCLogError(@"No front camera for capture session."); | |
397 return NO; | |
398 } | |
399 | |
400 // Add the inputs. | |
401 if (![captureSession canAddInput:frontCameraInput] || | |
402 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { | |
403 RTCLogError(@"Session does not support capture inputs."); | |
404 return NO; | |
405 } | |
406 AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCame
raInput; | |
407 [captureSession addInput:input]; | |
408 | |
409 _captureSession = captureSession; | |
410 return YES; | |
411 } | |
412 | |
413 - (AVCaptureVideoDataOutput *)videoDataOutput { | |
414 if (!_videoDataOutput) { | |
415 // Make the capturer output NV12. Ideally we want I420 but that's not | |
416 // currently supported on iPhone / iPad. | |
417 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc
] init]; | |
418 videoDataOutput.videoSettings = @{ | |
419 (NSString *) | |
420 // TODO(denicija): Remove this color conversion and use the original captu
re format directly. | |
421 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar
FullRange) | |
422 }; | |
423 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
424 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
425 _videoDataOutput = videoDataOutput; | |
426 } | |
427 return _videoDataOutput; | |
428 } | |
429 | |
430 - (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)posi
tion { | |
431 for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:A
VMediaTypeVideo]) { | |
432 if (captureDevice.position == position) { | |
433 return captureDevice; | |
434 } | |
435 } | |
436 return nil; | |
437 } | |
438 | |
439 - (AVCaptureDeviceInput *)frontCameraInput { | |
440 if (!_frontCameraInput) { | |
441 #if TARGET_OS_IPHONE | |
442 AVCaptureDevice *frontCameraDevice = | |
443 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; | |
444 #else | |
445 AVCaptureDevice *frontCameraDevice = | |
446 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
447 #endif | |
448 if (!frontCameraDevice) { | |
449 RTCLogWarning(@"Failed to find front capture device."); | |
450 return nil; | |
451 } | |
452 NSError *error = nil; | |
453 AVCaptureDeviceInput *frontCameraInput = | |
454 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&err
or]; | |
455 if (!frontCameraInput) { | |
456 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes
cription); | |
457 return nil; | |
458 } | |
459 _frontCameraInput = frontCameraInput; | |
460 } | |
461 return _frontCameraInput; | |
462 } | |
463 | |
464 - (AVCaptureDeviceInput *)backCameraInput { | |
465 if (!_backCameraInput) { | |
466 AVCaptureDevice *backCameraDevice = | |
467 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; | |
468 if (!backCameraDevice) { | |
469 RTCLogWarning(@"Failed to find front capture device."); | |
470 return nil; | |
471 } | |
472 NSError *error = nil; | |
473 AVCaptureDeviceInput *backCameraInput = | |
474 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&erro
r]; | |
475 if (!backCameraInput) { | |
476 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes
cription); | |
477 return nil; | |
478 } | |
479 _backCameraInput = backCameraInput; | |
480 } | |
481 return _backCameraInput; | |
482 } | |
483 | |
484 // Called from capture session queue. | |
485 - (void)updateOrientation { | |
486 #if TARGET_OS_IPHONE | |
487 _orientation = [UIDevice currentDevice].orientation; | |
488 #endif | |
489 } | |
490 | |
491 // Update the current session input to match what's stored in _useBackCamera. | |
492 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | |
493 [RTCDispatcher | |
494 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
495 block:^{ | |
496 [_captureSession beginConfiguration]; | |
497 AVCaptureDeviceInput *oldInput = _backCameraInput; | |
498 AVCaptureDeviceInput *newInput = _frontCameraInput; | |
499 if (useBackCamera) { | |
500 oldInput = _frontCameraInput; | |
501 newInput = _backCameraInput; | |
502 } | |
503 if (oldInput) { | |
504 // Ok to remove this even if it's not attached. Will be
no-op. | |
505 [_captureSession removeInput:oldInput]; | |
506 } | |
507 if (newInput) { | |
508 [_captureSession addInput:newInput]; | |
509 } | |
510 [self updateOrientation]; | |
511 AVCaptureDevice *newDevice = newInput.device; | |
512 const cricket::VideoFormat *format = _capturer->GetCapture
Format(); | |
513 webrtc::SetFormatForCaptureDevice(newDevice, _captureSessi
on, *format); | |
514 [_captureSession commitConfiguration]; | |
515 }]; | |
516 } | |
517 | |
518 @end | |
OLD | NEW |