OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "avfoundationvideocapturer.h" | 11 #include "avfoundationvideocapturer.h" |
12 | 12 |
13 #import <AVFoundation/AVFoundation.h> | 13 #import <AVFoundation/AVFoundation.h> |
14 #import <Foundation/Foundation.h> | |
15 #if TARGET_OS_IPHONE | |
16 #import <UIKit/UIKit.h> | |
17 #endif | |
18 | 14 |
| 15 #import "RTCAVFoundationFormatMapper.h" |
| 16 #import "RTCAVFoundationVideoCapturerInternal.h" |
19 #import "RTCDispatcher+Private.h" | 17 #import "RTCDispatcher+Private.h" |
20 #import "WebRTC/RTCLogging.h" | 18 #import "WebRTC/RTCLogging.h" |
21 #if TARGET_OS_IPHONE | |
22 #import "WebRTC/UIDevice+RTCDevice.h" | |
23 #endif | |
24 | 19 |
25 #include "libyuv/rotate.h" | 20 #include "libyuv/rotate.h" |
26 | 21 |
27 #include "webrtc/base/bind.h" | 22 #include "webrtc/base/bind.h" |
28 #include "webrtc/base/checks.h" | 23 #include "webrtc/base/checks.h" |
29 #include "webrtc/base/logging.h" | 24 #include "webrtc/base/logging.h" |
30 #include "webrtc/base/thread.h" | 25 #include "webrtc/base/thread.h" |
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h" | 26 #include "webrtc/common_video/include/corevideo_frame_buffer.h" |
32 #include "webrtc/common_video/rotation.h" | 27 #include "webrtc/common_video/rotation.h" |
33 | 28 |
34 // TODO(denicija): add support for higher frame rates. | |
35 // See http://crbug/webrtc/6355 for more info. | |
36 static const int kFramesPerSecond = 30; | |
37 | |
38 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | |
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | |
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | |
41 } | |
42 | |
43 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) { | |
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps; | |
45 } | |
46 | |
47 // Returns filtered array of device formats based on predefined constraints our | |
48 // stack imposes. | |
49 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats( | |
50 const AVCaptureDevice *device, | |
51 int supportedFps) { | |
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = | |
53 [NSMutableArray array]; | |
54 | |
55 for (AVCaptureDeviceFormat *format in device.formats) { | |
56 // Filter out subTypes that we currently don't support in the stack | |
57 FourCharCode mediaSubType = | |
58 CMFormatDescriptionGetMediaSubType(format.formatDescription); | |
59 if (!IsMediaSubTypeSupported(mediaSubType)) { | |
60 continue; | |
61 } | |
62 | |
63 // Filter out frame rate ranges that we currently don't support in the stack | |
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange
s) { | |
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) { | |
66 [eligibleDeviceFormats addObject:format]; | |
67 break; | |
68 } | |
69 } | |
70 } | |
71 | |
72 return [eligibleDeviceFormats copy]; | |
73 } | |
74 | |
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat. | |
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat( | |
77 const AVCaptureDevice *device, | |
78 const cricket::VideoFormat &videoFormat) { | |
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil; | |
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | |
81 GetEligibleDeviceFormats(device, videoFormat.framerate()); | |
82 | |
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | |
84 CMVideoDimensions dimension = | |
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | |
86 FourCharCode mediaSubType = | |
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription); | |
88 | |
89 if (videoFormat.width == dimension.width && | |
90 videoFormat.height == dimension.height) { | |
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { | |
92 // This is the preferred format so no need to wait for better option. | |
93 return deviceFormat; | |
94 } else { | |
95 // This is good candidate, but let's wait for something better. | |
96 desiredDeviceFormat = deviceFormat; | |
97 } | |
98 } | |
99 } | |
100 | |
101 return desiredDeviceFormat; | |
102 } | |
103 | |
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input | |
105 // device. | |
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice( | |
107 AVCaptureDevice *device) { | |
108 std::set<cricket::VideoFormat> supportedFormats; | |
109 | |
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | |
111 GetEligibleDeviceFormats(device, kFramesPerSecond); | |
112 | |
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | |
114 CMVideoDimensions dimension = | |
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | |
116 cricket::VideoFormat format = cricket::VideoFormat( | |
117 dimension.width, dimension.height, | |
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond), | |
119 cricket::FOURCC_NV12); | |
120 supportedFormats.insert(format); | |
121 } | |
122 | |
123 return supportedFormats; | |
124 } | |
125 | |
126 // Sets device format for the provided capture device. Returns YES/NO depending
on success. | |
127 // TODO(denicija): When this file is split this static method should be reconsid
ered. | |
128 // Perhaps adding a category on AVCaptureDevice would be better. | |
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device, | |
130 AVCaptureSession *session, | |
131 const cricket::VideoFormat &format) { | |
132 AVCaptureDeviceFormat *deviceFormat = | |
133 GetDeviceFormatForVideoFormat(device, format); | |
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval); | |
135 | |
136 NSError *error = nil; | |
137 BOOL success = YES; | |
138 [session beginConfiguration]; | |
139 if ([device lockForConfiguration:&error]) { | |
140 @try { | |
141 device.activeFormat = deviceFormat; | |
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps); | |
143 } @catch (NSException *exception) { | |
144 RTCLogError( | |
145 @"Failed to set active format!\n User info:%@", | |
146 exception.userInfo); | |
147 success = NO; | |
148 } | |
149 | |
150 [device unlockForConfiguration]; | |
151 } else { | |
152 RTCLogError( | |
153 @"Failed to lock device %@. Error: %@", | |
154 device, error.userInfo); | |
155 success = NO; | |
156 } | |
157 [session commitConfiguration]; | |
158 | |
159 return success; | |
160 } | |
161 | |
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | |
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | |
164 // because other webrtc objects own cricket::VideoCapturer, which is not | |
165 // ref counted. To prevent bad behavior we do not expose this class directly. | |
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject | |
167 <AVCaptureVideoDataOutputSampleBufferDelegate> | |
168 | |
169 @property(nonatomic, readonly) AVCaptureSession *captureSession; | |
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | |
171 @property(nonatomic, readonly) BOOL canUseBackCamera; | |
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | |
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run
ning. | |
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star
t. | |
175 | |
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | |
177 // when we receive frames. This is safe because this object should be owned by | |
178 // it. | |
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | |
180 - (AVCaptureDevice *)getActiveCaptureDevice; | |
181 | |
182 - (nullable AVCaptureDevice *)frontCaptureDevice; | |
183 - (nullable AVCaptureDevice *)backCaptureDevice; | |
184 | |
185 // Starts and stops the capture session asynchronously. We cannot do this | |
186 // synchronously without blocking a WebRTC thread. | |
187 - (void)start; | |
188 - (void)stop; | |
189 | |
190 @end | |
191 | |
192 @implementation RTCAVFoundationVideoCapturerInternal { | |
193 // Keep pointers to inputs for convenience. | |
194 AVCaptureDeviceInput *_frontCameraInput; | |
195 AVCaptureDeviceInput *_backCameraInput; | |
196 AVCaptureVideoDataOutput *_videoDataOutput; | |
197 // The cricket::VideoCapturer that owns this class. Should never be NULL. | |
198 webrtc::AVFoundationVideoCapturer *_capturer; | |
199 webrtc::VideoRotation _rotation; | |
200 BOOL _hasRetriedOnFatalError; | |
201 BOOL _isRunning; | |
202 BOOL _hasStarted; | |
203 rtc::CriticalSection _crit; | |
204 } | |
205 | |
206 @synthesize captureSession = _captureSession; | |
207 @synthesize frameQueue = _frameQueue; | |
208 @synthesize useBackCamera = _useBackCamera; | |
209 | |
210 @synthesize isRunning = _isRunning; | |
211 @synthesize hasStarted = _hasStarted; | |
212 | |
213 // This is called from the thread that creates the video source, which is likely | |
214 // the main thread. | |
215 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | |
216 RTC_DCHECK(capturer); | |
217 if (self = [super init]) { | |
218 _capturer = capturer; | |
219 // Create the capture session and all relevant inputs and outputs. We need | |
220 // to do this in init because the application may want the capture session | |
221 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
222 // created here are retained until dealloc and never recreated. | |
223 if (![self setupCaptureSession]) { | |
224 return nil; | |
225 } | |
226 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
227 #if TARGET_OS_IPHONE | |
228 [center addObserver:self | |
229 selector:@selector(deviceOrientationDidChange:) | |
230 name:UIDeviceOrientationDidChangeNotification | |
231 object:nil]; | |
232 [center addObserver:self | |
233 selector:@selector(handleCaptureSessionInterruption:) | |
234 name:AVCaptureSessionWasInterruptedNotification | |
235 object:_captureSession]; | |
236 [center addObserver:self | |
237 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
238 name:AVCaptureSessionInterruptionEndedNotification | |
239 object:_captureSession]; | |
240 [center addObserver:self | |
241 selector:@selector(handleApplicationDidBecomeActive:) | |
242 name:UIApplicationDidBecomeActiveNotification | |
243 object:[UIApplication sharedApplication]]; | |
244 #endif | |
245 [center addObserver:self | |
246 selector:@selector(handleCaptureSessionRuntimeError:) | |
247 name:AVCaptureSessionRuntimeErrorNotification | |
248 object:_captureSession]; | |
249 [center addObserver:self | |
250 selector:@selector(handleCaptureSessionDidStartRunning:) | |
251 name:AVCaptureSessionDidStartRunningNotification | |
252 object:_captureSession]; | |
253 [center addObserver:self | |
254 selector:@selector(handleCaptureSessionDidStopRunning:) | |
255 name:AVCaptureSessionDidStopRunningNotification | |
256 object:_captureSession]; | |
257 } | |
258 return self; | |
259 } | |
260 | |
261 - (void)dealloc { | |
262 RTC_DCHECK(!self.hasStarted); | |
263 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
264 _capturer = nullptr; | |
265 } | |
266 | |
267 - (AVCaptureSession *)captureSession { | |
268 return _captureSession; | |
269 } | |
270 | |
271 - (AVCaptureDevice *)getActiveCaptureDevice { | |
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device
; | |
273 } | |
274 | |
275 - (AVCaptureDevice *)frontCaptureDevice { | |
276 return _frontCameraInput.device; | |
277 } | |
278 | |
279 - (AVCaptureDevice *)backCaptureDevice { | |
280 return _backCameraInput.device; | |
281 } | |
282 | |
283 - (dispatch_queue_t)frameQueue { | |
284 if (!_frameQueue) { | |
285 _frameQueue = | |
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", | |
287 DISPATCH_QUEUE_SERIAL); | |
288 dispatch_set_target_queue( | |
289 _frameQueue, | |
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); | |
291 } | |
292 return _frameQueue; | |
293 } | |
294 | |
295 // Called from any thread (likely main thread). | |
296 - (BOOL)canUseBackCamera { | |
297 return _backCameraInput != nil; | |
298 } | |
299 | |
300 // Called from any thread (likely main thread). | |
301 - (BOOL)useBackCamera { | |
302 @synchronized(self) { | |
303 return _useBackCamera; | |
304 } | |
305 } | |
306 | |
307 // Called from any thread (likely main thread). | |
308 - (void)setUseBackCamera:(BOOL)useBackCamera { | |
309 if (!self.canUseBackCamera) { | |
310 if (useBackCamera) { | |
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" | |
312 "not switching."); | |
313 } | |
314 return; | |
315 } | |
316 @synchronized(self) { | |
317 if (_useBackCamera == useBackCamera) { | |
318 return; | |
319 } | |
320 _useBackCamera = useBackCamera; | |
321 [self updateSessionInputForUseBackCamera:useBackCamera]; | |
322 } | |
323 } | |
324 | |
325 // Called from WebRTC thread. | |
326 - (void)start { | |
327 if (self.hasStarted) { | |
328 return; | |
329 } | |
330 self.hasStarted = YES; | |
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
332 block:^{ | |
333 #if TARGET_OS_IPHONE | |
334 // Default to portrait orientation on iPhone. This will be reset in | |
335 // updateOrientation unless orientation is unknown/faceup/facedown. | |
336 _rotation = webrtc::kVideoRotation_90; | |
337 #else | |
338 // No rotation on Mac. | |
339 _rotation = webrtc::kVideoRotation_0; | |
340 #endif | |
341 [self updateOrientation]; | |
342 #if TARGET_OS_IPHONE | |
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | |
344 #endif | |
345 AVCaptureSession *captureSession = self.captureSession; | |
346 [captureSession startRunning]; | |
347 }]; | |
348 } | |
349 | |
350 // Called from same thread as start. | |
351 - (void)stop { | |
352 if (!self.hasStarted) { | |
353 return; | |
354 } | |
355 self.hasStarted = NO; | |
356 // Due to this async block, it's possible that the ObjC object outlives the | |
357 // C++ one. In order to not invoke functions on the C++ object, we set | |
358 // hasStarted immediately instead of dispatching it async. | |
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
360 block:^{ | |
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; | |
362 [_captureSession stopRunning]; | |
363 #if TARGET_OS_IPHONE | |
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | |
365 #endif | |
366 }]; | |
367 } | |
368 | |
369 #pragma mark iOS notifications | |
370 | |
371 #if TARGET_OS_IPHONE | |
372 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
374 block:^{ | |
375 [self updateOrientation]; | |
376 }]; | |
377 } | |
378 #endif | |
379 | |
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
381 | |
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
384 fromConnection:(AVCaptureConnection *)connection { | |
385 NSParameterAssert(captureOutput == _videoDataOutput); | |
386 if (!self.hasStarted) { | |
387 return; | |
388 } | |
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | |
390 } | |
391 | |
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
394 fromConnection:(AVCaptureConnection *)connection { | |
395 RTCLogError(@"Dropped sample buffer."); | |
396 } | |
397 | |
398 #pragma mark - AVCaptureSession notifications | |
399 | |
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
401 NSString *reasonString = nil; | |
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \ | |
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
404 NSNumber *reason = | |
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; | |
406 if (reason) { | |
407 switch (reason.intValue) { | |
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground
: | |
409 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
410 break; | |
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
412 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
413 break; | |
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
415 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
416 break; | |
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple
ForegroundApps: | |
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
419 break; | |
420 } | |
421 } | |
422 #endif | |
423 RTCLog(@"Capture session interrupted: %@", reasonString); | |
424 // TODO(tkchin): Handle this case. | |
425 } | |
426 | |
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
428 RTCLog(@"Capture session interruption ended."); | |
429 // TODO(tkchin): Handle this case. | |
430 } | |
431 | |
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
433 NSError *error = | |
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; | |
435 RTCLogError(@"Capture session runtime error: %@", error); | |
436 | |
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
438 block:^{ | |
439 #if TARGET_OS_IPHONE | |
440 if (error.code == AVErrorMediaServicesWereReset) { | |
441 [self handleNonFatalError]; | |
442 } else { | |
443 [self handleFatalError]; | |
444 } | |
445 #else | |
446 [self handleFatalError]; | |
447 #endif | |
448 }]; | |
449 } | |
450 | |
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
452 RTCLog(@"Capture session started."); | |
453 | |
454 self.isRunning = YES; | |
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
456 block:^{ | |
457 // If we successfully restarted after an unknown error, allow future | |
458 // retries on fatal errors. | |
459 _hasRetriedOnFatalError = NO; | |
460 }]; | |
461 } | |
462 | |
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
464 RTCLog(@"Capture session stopped."); | |
465 self.isRunning = NO; | |
466 } | |
467 | |
468 - (void)handleFatalError { | |
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
470 block:^{ | |
471 if (!_hasRetriedOnFatalError) { | |
472 RTCLogWarning(@"Attempting to recover from fatal capture error."); | |
473 [self handleNonFatalError]; | |
474 _hasRetriedOnFatalError = YES; | |
475 } else { | |
476 RTCLogError(@"Previous fatal error recovery failed."); | |
477 } | |
478 }]; | |
479 } | |
480 | |
481 - (void)handleNonFatalError { | |
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
483 block:^{ | |
484 if (self.hasStarted) { | |
485 RTCLog(@"Restarting capture session after error."); | |
486 [self.captureSession startRunning]; | |
487 } | |
488 }]; | |
489 } | |
490 | |
491 #if TARGET_OS_IPHONE | |
492 | |
493 #pragma mark - UIApplication notifications | |
494 | |
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
497 block:^{ | |
498 if (self.hasStarted && !self.captureSession.isRunning) { | |
499 RTCLog(@"Restarting capture session on active."); | |
500 [self.captureSession startRunning]; | |
501 } | |
502 }]; | |
503 } | |
504 | |
505 #endif // TARGET_OS_IPHONE | |
506 | |
507 #pragma mark - Private | |
508 | |
509 - (BOOL)setupCaptureSession { | |
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | |
511 #if defined(WEBRTC_IOS) | |
512 captureSession.usesApplicationAudioSession = NO; | |
513 #endif | |
514 // Add the output. | |
515 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; | |
516 if (![captureSession canAddOutput:videoDataOutput]) { | |
517 RTCLogError(@"Video data output unsupported."); | |
518 return NO; | |
519 } | |
520 [captureSession addOutput:videoDataOutput]; | |
521 | |
522 // Get the front and back cameras. If there isn't a front camera | |
523 // give up. | |
524 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; | |
525 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; | |
526 if (!frontCameraInput) { | |
527 RTCLogError(@"No front camera for capture session."); | |
528 return NO; | |
529 } | |
530 | |
531 // Add the inputs. | |
532 if (![captureSession canAddInput:frontCameraInput] || | |
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { | |
534 RTCLogError(@"Session does not support capture inputs."); | |
535 return NO; | |
536 } | |
537 AVCaptureDeviceInput *input = self.useBackCamera ? | |
538 backCameraInput : frontCameraInput; | |
539 [captureSession addInput:input]; | |
540 | |
541 _captureSession = captureSession; | |
542 return YES; | |
543 } | |
544 | |
545 - (AVCaptureVideoDataOutput *)videoDataOutput { | |
546 if (!_videoDataOutput) { | |
547 // Make the capturer output NV12. Ideally we want I420 but that's not | |
548 // currently supported on iPhone / iPad. | |
549 AVCaptureVideoDataOutput *videoDataOutput = | |
550 [[AVCaptureVideoDataOutput alloc] init]; | |
551 videoDataOutput.videoSettings = @{ | |
552 (NSString *)kCVPixelBufferPixelFormatTypeKey : | |
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | |
554 }; | |
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
557 _videoDataOutput = videoDataOutput; | |
558 } | |
559 return _videoDataOutput; | |
560 } | |
561 | |
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition: | |
563 (AVCaptureDevicePosition)position { | |
564 for (AVCaptureDevice *captureDevice in | |
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | |
566 if (captureDevice.position == position) { | |
567 return captureDevice; | |
568 } | |
569 } | |
570 return nil; | |
571 } | |
572 | |
573 - (AVCaptureDeviceInput *)frontCameraInput { | |
574 if (!_frontCameraInput) { | |
575 #if TARGET_OS_IPHONE | |
576 AVCaptureDevice *frontCameraDevice = | |
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; | |
578 #else | |
579 AVCaptureDevice *frontCameraDevice = | |
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
581 #endif | |
582 if (!frontCameraDevice) { | |
583 RTCLogWarning(@"Failed to find front capture device."); | |
584 return nil; | |
585 } | |
586 NSError *error = nil; | |
587 AVCaptureDeviceInput *frontCameraInput = | |
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice | |
589 error:&error]; | |
590 if (!frontCameraInput) { | |
591 RTCLogError(@"Failed to create front camera input: %@", | |
592 error.localizedDescription); | |
593 return nil; | |
594 } | |
595 _frontCameraInput = frontCameraInput; | |
596 } | |
597 return _frontCameraInput; | |
598 } | |
599 | |
600 - (AVCaptureDeviceInput *)backCameraInput { | |
601 if (!_backCameraInput) { | |
602 AVCaptureDevice *backCameraDevice = | |
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; | |
604 if (!backCameraDevice) { | |
605 RTCLogWarning(@"Failed to find front capture device."); | |
606 return nil; | |
607 } | |
608 NSError *error = nil; | |
609 AVCaptureDeviceInput *backCameraInput = | |
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice | |
611 error:&error]; | |
612 if (!backCameraInput) { | |
613 RTCLogError(@"Failed to create front camera input: %@", | |
614 error.localizedDescription); | |
615 return nil; | |
616 } | |
617 _backCameraInput = backCameraInput; | |
618 } | |
619 return _backCameraInput; | |
620 } | |
621 | |
622 // Called from capture session queue. | |
623 - (void)updateOrientation { | |
624 #if TARGET_OS_IPHONE | |
625 switch ([UIDevice currentDevice].orientation) { | |
626 case UIDeviceOrientationPortrait: | |
627 _rotation = webrtc::kVideoRotation_90; | |
628 break; | |
629 case UIDeviceOrientationPortraitUpsideDown: | |
630 _rotation = webrtc::kVideoRotation_270; | |
631 break; | |
632 case UIDeviceOrientationLandscapeLeft: | |
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 | |
634 : webrtc::kVideoRotation_180; | |
635 break; | |
636 case UIDeviceOrientationLandscapeRight: | |
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 | |
638 : webrtc::kVideoRotation_0; | |
639 break; | |
640 case UIDeviceOrientationFaceUp: | |
641 case UIDeviceOrientationFaceDown: | |
642 case UIDeviceOrientationUnknown: | |
643 // Ignore. | |
644 break; | |
645 } | |
646 #endif | |
647 } | |
648 | |
649 // Update the current session input to match what's stored in _useBackCamera. | |
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | |
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
652 block:^{ | |
653 [_captureSession beginConfiguration]; | |
654 AVCaptureDeviceInput *oldInput = _backCameraInput; | |
655 AVCaptureDeviceInput *newInput = _frontCameraInput; | |
656 if (useBackCamera) { | |
657 oldInput = _frontCameraInput; | |
658 newInput = _backCameraInput; | |
659 } | |
660 if (oldInput) { | |
661 // Ok to remove this even if it's not attached. Will be no-op. | |
662 [_captureSession removeInput:oldInput]; | |
663 } | |
664 if (newInput) { | |
665 [_captureSession addInput:newInput]; | |
666 } | |
667 [self updateOrientation]; | |
668 AVCaptureDevice *newDevice = newInput.device; | |
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat(); | |
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format); | |
671 [_captureSession commitConfiguration]; | |
672 }]; | |
673 } | |
674 | |
675 @end | |
676 | |
677 namespace webrtc { | 29 namespace webrtc { |
678 | 30 |
679 enum AVFoundationVideoCapturerMessageType : uint32_t { | 31 enum AVFoundationVideoCapturerMessageType : uint32_t { |
680 kMessageTypeFrame, | 32 kMessageTypeFrame, |
681 }; | 33 }; |
682 | 34 |
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { | 35 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { |
684 _capturer = | 36 _capturer = |
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | 37 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
686 | 38 |
687 std::set<cricket::VideoFormat> front_camera_video_formats = | 39 std::set<cricket::VideoFormat> front_camera_video_formats = |
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]); | 40 [RTCAVFoundationFormatMapper supportedVideoFormatsForDevice:[_capturer fro
ntCaptureDevice]]; |
689 | |
690 std::set<cricket::VideoFormat> back_camera_video_formats = | 41 std::set<cricket::VideoFormat> back_camera_video_formats = |
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]); | 42 [RTCAVFoundationFormatMapper supportedVideoFormatsForDevice:[_capturer bac
kCaptureDevice]]; |
692 | 43 |
693 std::vector<cricket::VideoFormat> intersection_video_formats; | 44 std::vector<cricket::VideoFormat> intersection_video_formats; |
694 if (back_camera_video_formats.empty()) { | 45 if (back_camera_video_formats.empty()) { |
695 intersection_video_formats.assign(front_camera_video_formats.begin(), | 46 intersection_video_formats.assign(front_camera_video_formats.begin(), |
696 front_camera_video_formats.end()); | 47 front_camera_video_formats.end()); |
697 | 48 |
698 } else if (front_camera_video_formats.empty()) { | 49 } else if (front_camera_video_formats.empty()) { |
699 intersection_video_formats.assign(back_camera_video_formats.begin(), | 50 intersection_video_formats.assign(back_camera_video_formats.begin(), |
700 back_camera_video_formats.end()); | 51 back_camera_video_formats.end()); |
701 } else { | 52 } else { |
(...skipping 16 matching lines...) Expand all Loading... |
718 return cricket::CaptureState::CS_FAILED; | 69 return cricket::CaptureState::CS_FAILED; |
719 } | 70 } |
720 if (_capturer.isRunning) { | 71 if (_capturer.isRunning) { |
721 LOG(LS_ERROR) << "The capturer is already running."; | 72 LOG(LS_ERROR) << "The capturer is already running."; |
722 return cricket::CaptureState::CS_FAILED; | 73 return cricket::CaptureState::CS_FAILED; |
723 } | 74 } |
724 | 75 |
725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice]; | 76 AVCaptureDevice* device = [_capturer getActiveCaptureDevice]; |
726 AVCaptureSession* session = _capturer.captureSession; | 77 AVCaptureSession* session = _capturer.captureSession; |
727 | 78 |
728 if (!SetFormatForCaptureDevice(device, session, format)) { | 79 if (![RTCAVFoundationFormatMapper setFormat:format |
| 80 forCaptureDevice:device |
| 81 captureSession:session]) { |
729 return cricket::CaptureState::CS_FAILED; | 82 return cricket::CaptureState::CS_FAILED; |
730 } | 83 } |
731 | 84 |
732 SetCaptureFormat(&format); | 85 SetCaptureFormat(&format); |
733 // This isn't super accurate because it takes a while for the AVCaptureSession | 86 // This isn't super accurate because it takes a while for the AVCaptureSession |
734 // to spin up, and this call returns async. | 87 // to spin up, and this call returns async. |
735 // TODO(tkchin): make this better. | 88 // TODO(tkchin): make this better. |
736 [_capturer start]; | 89 [_capturer start]; |
737 SetCaptureState(cricket::CaptureState::CS_RUNNING); | 90 SetCaptureState(cricket::CaptureState::CS_RUNNING); |
738 | 91 |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
821 buffer->width(), buffer->height(), | 174 buffer->width(), buffer->height(), |
822 static_cast<libyuv::RotationMode>(rotation)); | 175 static_cast<libyuv::RotationMode>(rotation)); |
823 buffer = rotated_buffer; | 176 buffer = rotated_buffer; |
824 } | 177 } |
825 | 178 |
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), | 179 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), |
827 captured_width, captured_height); | 180 captured_width, captured_height); |
828 } | 181 } |
829 | 182 |
830 } // namespace webrtc | 183 } // namespace webrtc |
OLD | NEW |