OLD | NEW |
---|---|
(Empty) | |
1 /* | |
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import <Foundation/Foundation.h> | |
12 | |
13 #import "WebRTC/RTCCameraVideoCapturer.h" | |
14 #import "WebRTC/RTCLogging.h" | |
15 | |
16 #if TARGET_OS_IPHONE | |
17 #import "WebRTC/UIDevice+RTCDevice.h" | |
18 #endif | |
19 | |
20 #import "RTCDispatcher+Private.h" | |
21 | |
22 const int64_t kNanosecondsPerSecond = 1000000000; | |
23 | |
24 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | |
25 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | |
26 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | |
27 } | |
28 | |
29 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e> | |
30 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | |
31 @end | |
32 | |
33 @implementation RTCCameraVideoCapturer { | |
34 AVCaptureVideoDataOutput *_videoDataOutput; | |
35 AVCaptureSession *_captureSession; | |
36 AVCaptureDevice *_currentDevice; | |
37 RTCVideoRotation _rotation; | |
38 BOOL _hasRetriedOnFatalError; | |
39 BOOL _isRunning; | |
40 // Will the session be running once all asynchronous operations have been comp leted? | |
41 BOOL _willBeRunning; | |
42 } | |
43 | |
44 @synthesize frameQueue = _frameQueue; | |
45 @synthesize captureSession = _captureSession; | |
46 | |
47 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { | |
48 if (self = [super initWithDelegate:delegate]) { | |
49 // Create the capture session and all relevant inputs and outputs. We need | |
50 // to do this in init because the application may want the capture session | |
51 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
52 // created here are retained until dealloc and never recreated. | |
53 if (![self setupCaptureSession]) { | |
54 return nil; | |
55 } | |
56 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
57 #if TARGET_OS_IPHONE | |
58 [center addObserver:self | |
59 selector:@selector(deviceOrientationDidChange:) | |
60 name:UIDeviceOrientationDidChangeNotification | |
61 object:nil]; | |
62 [center addObserver:self | |
63 selector:@selector(handleCaptureSessionInterruption:) | |
64 name:AVCaptureSessionWasInterruptedNotification | |
65 object:_captureSession]; | |
66 [center addObserver:self | |
67 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
68 name:AVCaptureSessionInterruptionEndedNotification | |
69 object:_captureSession]; | |
70 [center addObserver:self | |
71 selector:@selector(handleApplicationDidBecomeActive:) | |
72 name:UIApplicationDidBecomeActiveNotification | |
73 object:[UIApplication sharedApplication]]; | |
74 #endif | |
75 [center addObserver:self | |
76 selector:@selector(handleCaptureSessionRuntimeError:) | |
77 name:AVCaptureSessionRuntimeErrorNotification | |
78 object:_captureSession]; | |
79 [center addObserver:self | |
80 selector:@selector(handleCaptureSessionDidStartRunning:) | |
81 name:AVCaptureSessionDidStartRunningNotification | |
82 object:_captureSession]; | |
83 [center addObserver:self | |
84 selector:@selector(handleCaptureSessionDidStopRunning:) | |
85 name:AVCaptureSessionDidStopRunningNotification | |
86 object:_captureSession]; | |
87 } | |
88 return self; | |
89 } | |
90 | |
91 - (void)dealloc { | |
92 NSAssert( | |
daniela-webrtc
2017/03/29 14:17:46
On second thought NSAssert might be bit aggressive
sakal
2017/03/30 11:57:29
RTC_DCHECK is not available because this is a pure
daniela-webrtc
2017/03/31 09:24:19
Ah forgot about that :) In that case it's not wort
| |
93 !_willBeRunning, | |
94 @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to c all stopCapture?"); | |
95 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
96 } | |
97 | |
98 + (NSArray<AVCaptureDevice *> *)captureDevices { | |
99 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; | |
100 } | |
101 | |
102 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device { | |
103 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array]; | |
104 | |
105 for (AVCaptureDeviceFormat *format in device.formats) { | |
106 // Filter out subTypes that we currently don't support in the stack | |
107 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description); | |
108 if (IsMediaSubTypeSupported(mediaSubType)) { | |
109 [eligibleDeviceFormats addObject:format]; | |
110 } | |
111 } | |
112 | |
113 return eligibleDeviceFormats; | |
114 } | |
115 | |
116 - (void)startCaptureWithDevice:(AVCaptureDevice *)device | |
117 format:(AVCaptureDeviceFormat *)format | |
118 fps:(int)fps { | |
119 _willBeRunning = true; | |
120 [RTCDispatcher | |
121 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
122 block:^{ | |
123 RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, f ps); | |
124 | |
125 #if TARGET_OS_IPHONE | |
126 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications]; | |
127 #endif | |
128 | |
129 _currentDevice = device; | |
130 [self updateDeviceCaptureFormat:format fps:fps]; | |
131 [self reconfigureCaptureSessionInput]; | |
132 [self updateOrientation]; | |
133 [_captureSession startRunning]; | |
134 [_currentDevice unlockForConfiguration]; | |
daniela-webrtc
2017/03/29 14:17:46
This should be in [updateDeviceCaptureFormat..]. L
sakal
2017/03/30 11:57:29
The reason I moved it outside, quote from Apple's
daniela-webrtc
2017/03/31 09:24:19
I see. In that case, can we move the `lockForConfi
sakal
2017/03/31 09:42:17
Done.
| |
135 _isRunning = true; | |
136 }]; | |
137 } | |
138 | |
139 - (void)stopCapture { | |
140 _willBeRunning = false; | |
141 [RTCDispatcher | |
142 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
143 block:^{ | |
144 RTCLogInfo("Stop"); | |
145 _currentDevice = nil; | |
146 for (AVCaptureDeviceInput *oldInput in _captureSession.inp uts) { | |
147 [_captureSession removeInput:oldInput]; | |
148 } | |
149 [_captureSession stopRunning]; | |
150 | |
151 #if TARGET_OS_IPHONE | |
152 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications]; | |
153 #endif | |
154 _isRunning = false; | |
155 }]; | |
156 } | |
157 | |
158 #pragma mark iOS notifications | |
159 | |
160 #if TARGET_OS_IPHONE | |
161 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
162 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
163 block:^{ | |
164 [self updateOrientation]; | |
165 }]; | |
166 } | |
167 #endif | |
168 | |
169 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
170 | |
171 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
172 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
173 fromConnection:(AVCaptureConnection *)connection { | |
174 NSParameterAssert(captureOutput == _videoDataOutput); | |
175 | |
176 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) || | |
177 !CMSampleBufferDataIsReady(sampleBuffer)) { | |
178 return; | |
179 } | |
180 | |
181 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
182 if (pixelBuffer == nullptr) { | |
183 return; | |
184 } | |
185 | |
186 int64_t timeStampNs = CACurrentMediaTime() * kNanosecondsPerSecond; | |
187 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBu ffer | |
188 rotation:_rotati on | |
189 timeStampNs:timeSta mpNs]; | |
190 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | |
191 } | |
192 | |
193 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
194 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
195 fromConnection:(AVCaptureConnection *)connection { | |
196 RTCLogError(@"Dropped sample buffer."); | |
197 } | |
198 | |
199 #pragma mark - AVCaptureSession notifications | |
200 | |
201 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
202 NSString *reasonString = nil; | |
203 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ | |
204 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
205 if ([UIDevice isIOS9OrLater]) { | |
206 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey]; | |
207 if (reason) { | |
208 switch (reason.intValue) { | |
209 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd: | |
210 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
211 break; | |
212 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
213 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
214 break; | |
215 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
216 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
217 break; | |
218 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps: | |
219 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
220 break; | |
221 } | |
222 } | |
223 } | |
224 #endif | |
225 RTCLog(@"Capture session interrupted: %@", reasonString); | |
226 // TODO(tkchin): Handle this case. | |
227 } | |
228 | |
229 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
230 RTCLog(@"Capture session interruption ended."); | |
231 // TODO(tkchin): Handle this case. | |
232 } | |
233 | |
234 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
235 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ; | |
236 RTCLogError(@"Capture session runtime error: %@", error); | |
237 | |
238 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
239 block:^{ | |
240 #if TARGET_OS_IPHONE | |
241 if (error.code == AVErrorMediaServicesWereReset ) { | |
242 [self handleNonFatalError]; | |
243 } else { | |
244 [self handleFatalError]; | |
245 } | |
246 #else | |
247 [self handleFatalError]; | |
248 #endif | |
249 }]; | |
250 } | |
251 | |
252 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
253 RTCLog(@"Capture session started."); | |
254 | |
255 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
256 block:^{ | |
257 // If we successfully restarted after an unknow n error, | |
258 // allow future retries on fatal errors. | |
259 _hasRetriedOnFatalError = NO; | |
260 }]; | |
261 } | |
262 | |
263 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
264 RTCLog(@"Capture session stopped."); | |
265 } | |
266 | |
267 - (void)handleFatalError { | |
268 [RTCDispatcher | |
269 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
270 block:^{ | |
271 if (!_hasRetriedOnFatalError) { | |
272 RTCLogWarning(@"Attempting to recover from fatal capture error."); | |
273 [self handleNonFatalError]; | |
274 _hasRetriedOnFatalError = YES; | |
275 } else { | |
276 RTCLogError(@"Previous fatal error recovery failed."); | |
277 } | |
278 }]; | |
279 } | |
280 | |
281 - (void)handleNonFatalError { | |
282 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
283 block:^{ | |
284 RTCLog(@"Restarting capture session after error ."); | |
285 if (_isRunning) { | |
286 [_captureSession startRunning]; | |
287 } | |
288 }]; | |
289 } | |
290 | |
291 #if TARGET_OS_IPHONE | |
292 | |
293 #pragma mark - UIApplication notifications | |
294 | |
295 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
296 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
297 block:^{ | |
298 if (_isRunning && !_captureSession.isRunning) { | |
299 RTCLog(@"Restarting capture session on active ."); | |
300 [_captureSession startRunning]; | |
301 } | |
302 }]; | |
303 } | |
304 | |
305 #endif // TARGET_OS_IPHONE | |
306 | |
307 #pragma mark - Private | |
308 | |
309 - (dispatch_queue_t)frameQueue { | |
310 if (!_frameQueue) { | |
311 _frameQueue = | |
312 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL); | |
313 dispatch_set_target_queue(_frameQueue, | |
314 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0)); | |
315 } | |
316 return _frameQueue; | |
317 } | |
318 | |
319 - (BOOL)setupCaptureSession { | |
320 NSAssert(_captureSession == nil, @"Setup capture session called twice."); | |
321 _captureSession = [[AVCaptureSession alloc] init]; | |
322 _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority; | |
323 #if defined(WEBRTC_IOS) | |
324 _captureSession.usesApplicationAudioSession = NO; | |
325 #endif | |
326 [self setupVideoDataOutput]; | |
327 // Add the output. | |
328 if (![_captureSession canAddOutput:_videoDataOutput]) { | |
329 RTCLogError(@"Video data output unsupported."); | |
330 return NO; | |
331 } | |
332 [_captureSession addOutput:_videoDataOutput]; | |
333 | |
334 return YES; | |
335 } | |
336 | |
337 - (void)setupVideoDataOutput { | |
338 NSAssert(_videoDataOutput == nil, @"Setup video data output called twice."); | |
339 // Make the capturer output NV12. Ideally we want I420 but that's not | |
340 // currently supported on iPhone / iPad. | |
341 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; | |
342 videoDataOutput.videoSettings = @{ | |
343 (NSString *) | |
344 // TODO(denicija): Remove this color conversion and use the original capture format directly. | |
345 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu llRange) | |
346 }; | |
347 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
348 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
349 _videoDataOutput = videoDataOutput; | |
350 } | |
351 | |
352 #pragma mark - Private, called inside capture queue | |
353 | |
354 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(int)fps { | |
355 NSError *error = nil; | |
356 if ([_currentDevice lockForConfiguration:&error]) { | |
357 @try { | |
358 _currentDevice.activeFormat = format; | |
359 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); | |
360 } @catch (NSException *exception) { | |
361 RTCLogError(@"Failed to set active format!\n User info:%@", exception.user Info); | |
362 return; | |
363 } | |
364 } else { | |
365 RTCLogError(@"Failed to lock device %@. Error: %@", _currentDevice, error.us erInfo); | |
366 return; | |
367 } | |
368 } | |
369 | |
370 - (void)reconfigureCaptureSessionInput { | |
371 NSError *error = nil; | |
372 AVCaptureDeviceInput *input = | |
373 [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; | |
374 if (!input) { | |
375 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescr iption); | |
376 return; | |
377 } | |
378 [_captureSession beginConfiguration]; | |
daniela-webrtc
2017/03/29 14:17:46
Documentation doesn't state that addInput should b
sakal
2017/03/30 11:57:29
I guess not.
| |
379 for (AVCaptureDeviceInput *oldInput in _captureSession.inputs) { | |
380 [_captureSession removeInput:oldInput]; | |
381 } | |
382 if ([_captureSession canAddInput:input]) { | |
383 [_captureSession addInput:input]; | |
384 } else { | |
385 RTCLogError(@"Cannot add camera as an input to the session."); | |
386 return; | |
387 } | |
388 [_captureSession commitConfiguration]; | |
389 } | |
390 | |
391 - (void)updateOrientation { | |
392 #if TARGET_OS_IPHONE | |
393 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t; | |
394 switch ([UIDevice currentDevice].orientation) { | |
395 case UIDeviceOrientationPortrait: | |
396 _rotation = RTCVideoRotation_90; | |
397 break; | |
398 case UIDeviceOrientationPortraitUpsideDown: | |
399 _rotation = RTCVideoRotation_270; | |
400 break; | |
401 case UIDeviceOrientationLandscapeLeft: | |
402 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | |
403 break; | |
404 case UIDeviceOrientationLandscapeRight: | |
405 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; | |
406 break; | |
407 case UIDeviceOrientationFaceUp: | |
408 case UIDeviceOrientationFaceDown: | |
409 case UIDeviceOrientationUnknown: | |
410 // Ignore. | |
411 break; | |
412 } | |
413 #endif | |
414 } | |
415 | |
416 @end | |
OLD | NEW |