OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "avfoundationvideocapturer.h" | 11 #import "RTCAVFoundationFormatMapper.h" |
12 | 12 |
13 #import <AVFoundation/AVFoundation.h> | 13 //#import "RTCDispatcher+Private.h" |
14 #import <Foundation/Foundation.h> | |
15 #if TARGET_OS_IPHONE | |
16 #import <UIKit/UIKit.h> | |
17 #endif | |
18 | |
19 #import "RTCDispatcher+Private.h" | |
20 #import "WebRTC/RTCLogging.h" | 14 #import "WebRTC/RTCLogging.h" |
21 #if TARGET_OS_IPHONE | |
22 #import "WebRTC/UIDevice+RTCDevice.h" | |
23 #endif | |
24 | |
25 #include "libyuv/rotate.h" | |
26 | |
27 #include "webrtc/base/bind.h" | |
28 #include "webrtc/base/checks.h" | |
29 #include "webrtc/base/logging.h" | |
30 #include "webrtc/base/thread.h" | |
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h" | |
32 #include "webrtc/common_video/rotation.h" | |
33 | 15 |
34 // TODO(denicija): add support for higher frame rates. | 16 // TODO(denicija): add support for higher frame rates. |
35 // See http://crbug/webrtc/6355 for more info. | 17 // See http://crbug/webrtc/6355 for more info. |
36 static const int kFramesPerSecond = 30; | 18 static const int kFramesPerSecond = 30; |
37 | 19 |
38 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | 20 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | 21 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || |
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | 22 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); |
41 } | 23 } |
42 | 24 |
43 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) { | 25 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) { |
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps; | 26 return range.minFrameRate <= fps && range.maxFrameRate >= fps; |
45 } | 27 } |
46 | 28 |
47 // Returns filtered array of device formats based on predefined constraints our | 29 @implementation RTCAVFoundationFormatMapper |
48 // stack imposes. | 30 |
49 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats( | 31 + (NSArray<AVCaptureDeviceFormat *> *)eligibleFormatsForDevice:(const AVCaptureD
evice *)device |
50 const AVCaptureDevice *device, | 32 supportedFps:(int)fps { |
51 int supportedFps) { | 33 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr
ay array]; |
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = | |
53 [NSMutableArray array]; | |
54 | 34 |
55 for (AVCaptureDeviceFormat *format in device.formats) { | 35 for (AVCaptureDeviceFormat *format in device.formats) { |
56 // Filter out subTypes that we currently don't support in the stack | 36 // Filter out subTypes that we currently don't support in the stack |
57 FourCharCode mediaSubType = | 37 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format
Description); |
58 CMFormatDescriptionGetMediaSubType(format.formatDescription); | |
59 if (!IsMediaSubTypeSupported(mediaSubType)) { | 38 if (!IsMediaSubTypeSupported(mediaSubType)) { |
60 continue; | 39 continue; |
61 } | 40 } |
62 | 41 |
63 // Filter out frame rate ranges that we currently don't support in the stack | 42 // Filter out frame rate ranges that we currently don't support in the stack |
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange
s) { | 43 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange
s) { |
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) { | 44 if (IsFrameRateWithinRange(fps, frameRateRange)) { |
66 [eligibleDeviceFormats addObject:format]; | 45 [eligibleDeviceFormats addObject:format]; |
67 break; | 46 break; |
68 } | 47 } |
69 } | 48 } |
70 } | 49 } |
71 | 50 |
72 return [eligibleDeviceFormats copy]; | 51 return [eligibleDeviceFormats copy]; |
73 } | 52 } |
74 | 53 |
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat. | 54 + (AVCaptureDeviceFormat *)formatForDevice:(AVCaptureDevice *)device |
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat( | 55 videoFormat:(const cricket::VideoFormat &)videoFo
rmat { |
77 const AVCaptureDevice *device, | |
78 const cricket::VideoFormat &videoFormat) { | |
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil; | 56 AVCaptureDeviceFormat *desiredDeviceFormat = nil; |
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | 57 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = |
81 GetEligibleDeviceFormats(device, videoFormat.framerate()); | 58 [RTCAVFoundationFormatMapper eligibleFormatsForDevice:device |
| 59 supportedFps:videoFormat.framerat
e()]; |
82 | 60 |
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | 61 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { |
84 CMVideoDimensions dimension = | 62 CMVideoDimensions dimension = |
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | 63 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); |
86 FourCharCode mediaSubType = | 64 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(deviceFormat.
formatDescription); |
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription); | |
88 | 65 |
89 if (videoFormat.width == dimension.width && | 66 if (videoFormat.width == dimension.width && videoFormat.height == dimension.
height) { |
90 videoFormat.height == dimension.height) { | |
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { | 67 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { |
92 // This is the preferred format so no need to wait for better option. | 68 // This is the preferred format so no need to wait for better option. |
93 return deviceFormat; | 69 return deviceFormat; |
94 } else { | 70 } else { |
95 // This is good candidate, but let's wait for something better. | 71 // This is good candidate, but let's wait for something better. |
96 desiredDeviceFormat = deviceFormat; | 72 desiredDeviceFormat = deviceFormat; |
97 } | 73 } |
98 } | 74 } |
99 } | 75 } |
100 | 76 |
101 return desiredDeviceFormat; | 77 return desiredDeviceFormat; |
102 } | 78 } |
103 | 79 |
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input | 80 + (std::set<cricket::VideoFormat>)supportedVideoFormatsForDevice:(AVCaptureDevic
e *)device { |
105 // device. | |
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice( | |
107 AVCaptureDevice *device) { | |
108 std::set<cricket::VideoFormat> supportedFormats; | 81 std::set<cricket::VideoFormat> supportedFormats; |
109 | 82 |
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | 83 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = |
111 GetEligibleDeviceFormats(device, kFramesPerSecond); | 84 [RTCAVFoundationFormatMapper eligibleFormatsForDevice:device supportedFps:
kFramesPerSecond]; |
112 | 85 |
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | 86 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { |
114 CMVideoDimensions dimension = | 87 CMVideoDimensions dimension = |
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | 88 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); |
116 cricket::VideoFormat format = cricket::VideoFormat( | 89 cricket::VideoFormat format = cricket::VideoFormat( |
117 dimension.width, dimension.height, | 90 dimension.width, dimension.height, cricket::VideoFormat::FpsToInterval(k
FramesPerSecond), |
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond), | |
119 cricket::FOURCC_NV12); | 91 cricket::FOURCC_NV12); |
120 supportedFormats.insert(format); | 92 supportedFormats.insert(format); |
121 } | 93 } |
122 | 94 |
123 return supportedFormats; | 95 return supportedFormats; |
124 } | 96 } |
125 | 97 |
126 // Sets device format for the provided capture device. Returns YES/NO depending
on success. | 98 + (BOOL)setFormat:(const cricket::VideoFormat)format |
127 // TODO(denicija): When this file is split this static method should be reconsid
ered. | 99 forCaptureDevice:(AVCaptureDevice *)device |
128 // Perhaps adding a category on AVCaptureDevice would be better. | 100 captureSession:(AVCaptureSession *)session { |
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device, | |
130 AVCaptureSession *session, | |
131 const cricket::VideoFormat &format) { | |
132 AVCaptureDeviceFormat *deviceFormat = | 101 AVCaptureDeviceFormat *deviceFormat = |
133 GetDeviceFormatForVideoFormat(device, format); | 102 [RTCAVFoundationFormatMapper formatForDevice:device videoFormat:format]; |
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval); | 103 const int fps = cricket::VideoFormat::IntervalToFps(format.interval); |
135 | 104 |
136 NSError *error = nil; | 105 NSError *error = nil; |
137 BOOL success = YES; | 106 BOOL success = YES; |
138 [session beginConfiguration]; | 107 [session beginConfiguration]; |
139 if ([device lockForConfiguration:&error]) { | 108 if ([device lockForConfiguration:&error]) { |
140 @try { | 109 @try { |
141 device.activeFormat = deviceFormat; | 110 device.activeFormat = deviceFormat; |
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps); | 111 device.activeVideoMinFrameDuration = CMTimeMake(1, fps); |
143 } @catch (NSException *exception) { | 112 } @catch (NSException *exception) { |
144 RTCLogError( | 113 RTCLogError(@"Failed to set active format!\n User info:%@", exception.user
Info); |
145 @"Failed to set active format!\n User info:%@", | |
146 exception.userInfo); | |
147 success = NO; | 114 success = NO; |
148 } | 115 } |
149 | 116 |
150 [device unlockForConfiguration]; | 117 [device unlockForConfiguration]; |
151 } else { | 118 } else { |
152 RTCLogError( | 119 RTCLogError(@"Failed to lock device %@. Error: %@", device, error.userInfo); |
153 @"Failed to lock device %@. Error: %@", | |
154 device, error.userInfo); | |
155 success = NO; | 120 success = NO; |
156 } | 121 } |
157 [session commitConfiguration]; | 122 [session commitConfiguration]; |
158 | 123 |
159 return success; | 124 return success; |
160 } | 125 } |
161 | 126 |
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | |
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | |
164 // because other webrtc objects own cricket::VideoCapturer, which is not | |
165 // ref counted. To prevent bad behavior we do not expose this class directly. | |
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject | |
167 <AVCaptureVideoDataOutputSampleBufferDelegate> | |
168 | |
169 @property(nonatomic, readonly) AVCaptureSession *captureSession; | |
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | |
171 @property(nonatomic, readonly) BOOL canUseBackCamera; | |
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | |
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run
ning. | |
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star
t. | |
175 | |
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | |
177 // when we receive frames. This is safe because this object should be owned by | |
178 // it. | |
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | |
180 - (AVCaptureDevice *)getActiveCaptureDevice; | |
181 | |
182 - (nullable AVCaptureDevice *)frontCaptureDevice; | |
183 - (nullable AVCaptureDevice *)backCaptureDevice; | |
184 | |
185 // Starts and stops the capture session asynchronously. We cannot do this | |
186 // synchronously without blocking a WebRTC thread. | |
187 - (void)start; | |
188 - (void)stop; | |
189 | |
190 @end | 127 @end |
191 | |
192 @implementation RTCAVFoundationVideoCapturerInternal { | |
193 // Keep pointers to inputs for convenience. | |
194 AVCaptureDeviceInput *_frontCameraInput; | |
195 AVCaptureDeviceInput *_backCameraInput; | |
196 AVCaptureVideoDataOutput *_videoDataOutput; | |
197 // The cricket::VideoCapturer that owns this class. Should never be NULL. | |
198 webrtc::AVFoundationVideoCapturer *_capturer; | |
199 webrtc::VideoRotation _rotation; | |
200 BOOL _hasRetriedOnFatalError; | |
201 BOOL _isRunning; | |
202 BOOL _hasStarted; | |
203 rtc::CriticalSection _crit; | |
204 } | |
205 | |
206 @synthesize captureSession = _captureSession; | |
207 @synthesize frameQueue = _frameQueue; | |
208 @synthesize useBackCamera = _useBackCamera; | |
209 | |
210 @synthesize isRunning = _isRunning; | |
211 @synthesize hasStarted = _hasStarted; | |
212 | |
213 // This is called from the thread that creates the video source, which is likely | |
214 // the main thread. | |
215 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | |
216 RTC_DCHECK(capturer); | |
217 if (self = [super init]) { | |
218 _capturer = capturer; | |
219 // Create the capture session and all relevant inputs and outputs. We need | |
220 // to do this in init because the application may want the capture session | |
221 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | |
222 // created here are retained until dealloc and never recreated. | |
223 if (![self setupCaptureSession]) { | |
224 return nil; | |
225 } | |
226 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | |
227 #if TARGET_OS_IPHONE | |
228 [center addObserver:self | |
229 selector:@selector(deviceOrientationDidChange:) | |
230 name:UIDeviceOrientationDidChangeNotification | |
231 object:nil]; | |
232 [center addObserver:self | |
233 selector:@selector(handleCaptureSessionInterruption:) | |
234 name:AVCaptureSessionWasInterruptedNotification | |
235 object:_captureSession]; | |
236 [center addObserver:self | |
237 selector:@selector(handleCaptureSessionInterruptionEnded:) | |
238 name:AVCaptureSessionInterruptionEndedNotification | |
239 object:_captureSession]; | |
240 [center addObserver:self | |
241 selector:@selector(handleApplicationDidBecomeActive:) | |
242 name:UIApplicationDidBecomeActiveNotification | |
243 object:[UIApplication sharedApplication]]; | |
244 #endif | |
245 [center addObserver:self | |
246 selector:@selector(handleCaptureSessionRuntimeError:) | |
247 name:AVCaptureSessionRuntimeErrorNotification | |
248 object:_captureSession]; | |
249 [center addObserver:self | |
250 selector:@selector(handleCaptureSessionDidStartRunning:) | |
251 name:AVCaptureSessionDidStartRunningNotification | |
252 object:_captureSession]; | |
253 [center addObserver:self | |
254 selector:@selector(handleCaptureSessionDidStopRunning:) | |
255 name:AVCaptureSessionDidStopRunningNotification | |
256 object:_captureSession]; | |
257 } | |
258 return self; | |
259 } | |
260 | |
261 - (void)dealloc { | |
262 RTC_DCHECK(!self.hasStarted); | |
263 [[NSNotificationCenter defaultCenter] removeObserver:self]; | |
264 _capturer = nullptr; | |
265 } | |
266 | |
267 - (AVCaptureSession *)captureSession { | |
268 return _captureSession; | |
269 } | |
270 | |
271 - (AVCaptureDevice *)getActiveCaptureDevice { | |
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device
; | |
273 } | |
274 | |
275 - (AVCaptureDevice *)frontCaptureDevice { | |
276 return _frontCameraInput.device; | |
277 } | |
278 | |
279 - (AVCaptureDevice *)backCaptureDevice { | |
280 return _backCameraInput.device; | |
281 } | |
282 | |
283 - (dispatch_queue_t)frameQueue { | |
284 if (!_frameQueue) { | |
285 _frameQueue = | |
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", | |
287 DISPATCH_QUEUE_SERIAL); | |
288 dispatch_set_target_queue( | |
289 _frameQueue, | |
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); | |
291 } | |
292 return _frameQueue; | |
293 } | |
294 | |
295 // Called from any thread (likely main thread). | |
296 - (BOOL)canUseBackCamera { | |
297 return _backCameraInput != nil; | |
298 } | |
299 | |
300 // Called from any thread (likely main thread). | |
301 - (BOOL)useBackCamera { | |
302 @synchronized(self) { | |
303 return _useBackCamera; | |
304 } | |
305 } | |
306 | |
307 // Called from any thread (likely main thread). | |
308 - (void)setUseBackCamera:(BOOL)useBackCamera { | |
309 if (!self.canUseBackCamera) { | |
310 if (useBackCamera) { | |
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" | |
312 "not switching."); | |
313 } | |
314 return; | |
315 } | |
316 @synchronized(self) { | |
317 if (_useBackCamera == useBackCamera) { | |
318 return; | |
319 } | |
320 _useBackCamera = useBackCamera; | |
321 [self updateSessionInputForUseBackCamera:useBackCamera]; | |
322 } | |
323 } | |
324 | |
325 // Called from WebRTC thread. | |
326 - (void)start { | |
327 if (self.hasStarted) { | |
328 return; | |
329 } | |
330 self.hasStarted = YES; | |
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
332 block:^{ | |
333 #if TARGET_OS_IPHONE | |
334 // Default to portrait orientation on iPhone. This will be reset in | |
335 // updateOrientation unless orientation is unknown/faceup/facedown. | |
336 _rotation = webrtc::kVideoRotation_90; | |
337 #else | |
338 // No rotation on Mac. | |
339 _rotation = webrtc::kVideoRotation_0; | |
340 #endif | |
341 [self updateOrientation]; | |
342 #if TARGET_OS_IPHONE | |
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | |
344 #endif | |
345 AVCaptureSession *captureSession = self.captureSession; | |
346 [captureSession startRunning]; | |
347 }]; | |
348 } | |
349 | |
350 // Called from same thread as start. | |
351 - (void)stop { | |
352 if (!self.hasStarted) { | |
353 return; | |
354 } | |
355 self.hasStarted = NO; | |
356 // Due to this async block, it's possible that the ObjC object outlives the | |
357 // C++ one. In order to not invoke functions on the C++ object, we set | |
358 // hasStarted immediately instead of dispatching it async. | |
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
360 block:^{ | |
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; | |
362 [_captureSession stopRunning]; | |
363 #if TARGET_OS_IPHONE | |
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | |
365 #endif | |
366 }]; | |
367 } | |
368 | |
369 #pragma mark iOS notifications | |
370 | |
371 #if TARGET_OS_IPHONE | |
372 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
374 block:^{ | |
375 [self updateOrientation]; | |
376 }]; | |
377 } | |
378 #endif | |
379 | |
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | |
381 | |
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
384 fromConnection:(AVCaptureConnection *)connection { | |
385 NSParameterAssert(captureOutput == _videoDataOutput); | |
386 if (!self.hasStarted) { | |
387 return; | |
388 } | |
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | |
390 } | |
391 | |
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput | |
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | |
394 fromConnection:(AVCaptureConnection *)connection { | |
395 RTCLogError(@"Dropped sample buffer."); | |
396 } | |
397 | |
398 #pragma mark - AVCaptureSession notifications | |
399 | |
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
401 NSString *reasonString = nil; | |
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \ | |
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
404 NSNumber *reason = | |
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; | |
406 if (reason) { | |
407 switch (reason.intValue) { | |
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground
: | |
409 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
410 break; | |
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
412 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
413 break; | |
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
415 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
416 break; | |
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple
ForegroundApps: | |
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
419 break; | |
420 } | |
421 } | |
422 #endif | |
423 RTCLog(@"Capture session interrupted: %@", reasonString); | |
424 // TODO(tkchin): Handle this case. | |
425 } | |
426 | |
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
428 RTCLog(@"Capture session interruption ended."); | |
429 // TODO(tkchin): Handle this case. | |
430 } | |
431 | |
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
433 NSError *error = | |
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; | |
435 RTCLogError(@"Capture session runtime error: %@", error); | |
436 | |
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
438 block:^{ | |
439 #if TARGET_OS_IPHONE | |
440 if (error.code == AVErrorMediaServicesWereReset) { | |
441 [self handleNonFatalError]; | |
442 } else { | |
443 [self handleFatalError]; | |
444 } | |
445 #else | |
446 [self handleFatalError]; | |
447 #endif | |
448 }]; | |
449 } | |
450 | |
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
452 RTCLog(@"Capture session started."); | |
453 | |
454 self.isRunning = YES; | |
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
456 block:^{ | |
457 // If we successfully restarted after an unknown error, allow future | |
458 // retries on fatal errors. | |
459 _hasRetriedOnFatalError = NO; | |
460 }]; | |
461 } | |
462 | |
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
464 RTCLog(@"Capture session stopped."); | |
465 self.isRunning = NO; | |
466 } | |
467 | |
468 - (void)handleFatalError { | |
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
470 block:^{ | |
471 if (!_hasRetriedOnFatalError) { | |
472 RTCLogWarning(@"Attempting to recover from fatal capture error."); | |
473 [self handleNonFatalError]; | |
474 _hasRetriedOnFatalError = YES; | |
475 } else { | |
476 RTCLogError(@"Previous fatal error recovery failed."); | |
477 } | |
478 }]; | |
479 } | |
480 | |
481 - (void)handleNonFatalError { | |
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
483 block:^{ | |
484 if (self.hasStarted) { | |
485 RTCLog(@"Restarting capture session after error."); | |
486 [self.captureSession startRunning]; | |
487 } | |
488 }]; | |
489 } | |
490 | |
491 #if TARGET_OS_IPHONE | |
492 | |
493 #pragma mark - UIApplication notifications | |
494 | |
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | |
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
497 block:^{ | |
498 if (self.hasStarted && !self.captureSession.isRunning) { | |
499 RTCLog(@"Restarting capture session on active."); | |
500 [self.captureSession startRunning]; | |
501 } | |
502 }]; | |
503 } | |
504 | |
505 #endif // TARGET_OS_IPHONE | |
506 | |
507 #pragma mark - Private | |
508 | |
509 - (BOOL)setupCaptureSession { | |
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | |
511 #if defined(WEBRTC_IOS) | |
512 captureSession.usesApplicationAudioSession = NO; | |
513 #endif | |
514 // Add the output. | |
515 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; | |
516 if (![captureSession canAddOutput:videoDataOutput]) { | |
517 RTCLogError(@"Video data output unsupported."); | |
518 return NO; | |
519 } | |
520 [captureSession addOutput:videoDataOutput]; | |
521 | |
522 // Get the front and back cameras. If there isn't a front camera | |
523 // give up. | |
524 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; | |
525 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; | |
526 if (!frontCameraInput) { | |
527 RTCLogError(@"No front camera for capture session."); | |
528 return NO; | |
529 } | |
530 | |
531 // Add the inputs. | |
532 if (![captureSession canAddInput:frontCameraInput] || | |
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { | |
534 RTCLogError(@"Session does not support capture inputs."); | |
535 return NO; | |
536 } | |
537 AVCaptureDeviceInput *input = self.useBackCamera ? | |
538 backCameraInput : frontCameraInput; | |
539 [captureSession addInput:input]; | |
540 | |
541 _captureSession = captureSession; | |
542 return YES; | |
543 } | |
544 | |
545 - (AVCaptureVideoDataOutput *)videoDataOutput { | |
546 if (!_videoDataOutput) { | |
547 // Make the capturer output NV12. Ideally we want I420 but that's not | |
548 // currently supported on iPhone / iPad. | |
549 AVCaptureVideoDataOutput *videoDataOutput = | |
550 [[AVCaptureVideoDataOutput alloc] init]; | |
551 videoDataOutput.videoSettings = @{ | |
552 (NSString *)kCVPixelBufferPixelFormatTypeKey : | |
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | |
554 }; | |
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | |
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | |
557 _videoDataOutput = videoDataOutput; | |
558 } | |
559 return _videoDataOutput; | |
560 } | |
561 | |
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition: | |
563 (AVCaptureDevicePosition)position { | |
564 for (AVCaptureDevice *captureDevice in | |
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | |
566 if (captureDevice.position == position) { | |
567 return captureDevice; | |
568 } | |
569 } | |
570 return nil; | |
571 } | |
572 | |
573 - (AVCaptureDeviceInput *)frontCameraInput { | |
574 if (!_frontCameraInput) { | |
575 #if TARGET_OS_IPHONE | |
576 AVCaptureDevice *frontCameraDevice = | |
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; | |
578 #else | |
579 AVCaptureDevice *frontCameraDevice = | |
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | |
581 #endif | |
582 if (!frontCameraDevice) { | |
583 RTCLogWarning(@"Failed to find front capture device."); | |
584 return nil; | |
585 } | |
586 NSError *error = nil; | |
587 AVCaptureDeviceInput *frontCameraInput = | |
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice | |
589 error:&error]; | |
590 if (!frontCameraInput) { | |
591 RTCLogError(@"Failed to create front camera input: %@", | |
592 error.localizedDescription); | |
593 return nil; | |
594 } | |
595 _frontCameraInput = frontCameraInput; | |
596 } | |
597 return _frontCameraInput; | |
598 } | |
599 | |
600 - (AVCaptureDeviceInput *)backCameraInput { | |
601 if (!_backCameraInput) { | |
602 AVCaptureDevice *backCameraDevice = | |
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; | |
604 if (!backCameraDevice) { | |
605 RTCLogWarning(@"Failed to find front capture device."); | |
606 return nil; | |
607 } | |
608 NSError *error = nil; | |
609 AVCaptureDeviceInput *backCameraInput = | |
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice | |
611 error:&error]; | |
612 if (!backCameraInput) { | |
613 RTCLogError(@"Failed to create front camera input: %@", | |
614 error.localizedDescription); | |
615 return nil; | |
616 } | |
617 _backCameraInput = backCameraInput; | |
618 } | |
619 return _backCameraInput; | |
620 } | |
621 | |
622 // Called from capture session queue. | |
623 - (void)updateOrientation { | |
624 #if TARGET_OS_IPHONE | |
625 switch ([UIDevice currentDevice].orientation) { | |
626 case UIDeviceOrientationPortrait: | |
627 _rotation = webrtc::kVideoRotation_90; | |
628 break; | |
629 case UIDeviceOrientationPortraitUpsideDown: | |
630 _rotation = webrtc::kVideoRotation_270; | |
631 break; | |
632 case UIDeviceOrientationLandscapeLeft: | |
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 | |
634 : webrtc::kVideoRotation_180; | |
635 break; | |
636 case UIDeviceOrientationLandscapeRight: | |
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 | |
638 : webrtc::kVideoRotation_0; | |
639 break; | |
640 case UIDeviceOrientationFaceUp: | |
641 case UIDeviceOrientationFaceDown: | |
642 case UIDeviceOrientationUnknown: | |
643 // Ignore. | |
644 break; | |
645 } | |
646 #endif | |
647 } | |
648 | |
649 // Update the current session input to match what's stored in _useBackCamera. | |
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | |
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
652 block:^{ | |
653 [_captureSession beginConfiguration]; | |
654 AVCaptureDeviceInput *oldInput = _backCameraInput; | |
655 AVCaptureDeviceInput *newInput = _frontCameraInput; | |
656 if (useBackCamera) { | |
657 oldInput = _frontCameraInput; | |
658 newInput = _backCameraInput; | |
659 } | |
660 if (oldInput) { | |
661 // Ok to remove this even if it's not attached. Will be no-op. | |
662 [_captureSession removeInput:oldInput]; | |
663 } | |
664 if (newInput) { | |
665 [_captureSession addInput:newInput]; | |
666 } | |
667 [self updateOrientation]; | |
668 AVCaptureDevice *newDevice = newInput.device; | |
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat(); | |
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format); | |
671 [_captureSession commitConfiguration]; | |
672 }]; | |
673 } | |
674 | |
675 @end | |
676 | |
677 namespace webrtc { | |
678 | |
679 enum AVFoundationVideoCapturerMessageType : uint32_t { | |
680 kMessageTypeFrame, | |
681 }; | |
682 | |
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { | |
684 _capturer = | |
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | |
686 | |
687 std::set<cricket::VideoFormat> front_camera_video_formats = | |
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]); | |
689 | |
690 std::set<cricket::VideoFormat> back_camera_video_formats = | |
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]); | |
692 | |
693 std::vector<cricket::VideoFormat> intersection_video_formats; | |
694 if (back_camera_video_formats.empty()) { | |
695 intersection_video_formats.assign(front_camera_video_formats.begin(), | |
696 front_camera_video_formats.end()); | |
697 | |
698 } else if (front_camera_video_formats.empty()) { | |
699 intersection_video_formats.assign(back_camera_video_formats.begin(), | |
700 back_camera_video_formats.end()); | |
701 } else { | |
702 std::set_intersection( | |
703 front_camera_video_formats.begin(), front_camera_video_formats.end(), | |
704 back_camera_video_formats.begin(), back_camera_video_formats.end(), | |
705 std::back_inserter(intersection_video_formats)); | |
706 } | |
707 SetSupportedFormats(intersection_video_formats); | |
708 } | |
709 | |
710 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { | |
711 _capturer = nil; | |
712 } | |
713 | |
714 cricket::CaptureState AVFoundationVideoCapturer::Start( | |
715 const cricket::VideoFormat& format) { | |
716 if (!_capturer) { | |
717 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; | |
718 return cricket::CaptureState::CS_FAILED; | |
719 } | |
720 if (_capturer.isRunning) { | |
721 LOG(LS_ERROR) << "The capturer is already running."; | |
722 return cricket::CaptureState::CS_FAILED; | |
723 } | |
724 | |
725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice]; | |
726 AVCaptureSession* session = _capturer.captureSession; | |
727 | |
728 if (!SetFormatForCaptureDevice(device, session, format)) { | |
729 return cricket::CaptureState::CS_FAILED; | |
730 } | |
731 | |
732 SetCaptureFormat(&format); | |
733 // This isn't super accurate because it takes a while for the AVCaptureSession | |
734 // to spin up, and this call returns async. | |
735 // TODO(tkchin): make this better. | |
736 [_capturer start]; | |
737 SetCaptureState(cricket::CaptureState::CS_RUNNING); | |
738 | |
739 return cricket::CaptureState::CS_STARTING; | |
740 } | |
741 | |
742 void AVFoundationVideoCapturer::Stop() { | |
743 [_capturer stop]; | |
744 SetCaptureFormat(NULL); | |
745 } | |
746 | |
747 bool AVFoundationVideoCapturer::IsRunning() { | |
748 return _capturer.isRunning; | |
749 } | |
750 | |
751 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { | |
752 return _capturer.captureSession; | |
753 } | |
754 | |
755 bool AVFoundationVideoCapturer::CanUseBackCamera() const { | |
756 return _capturer.canUseBackCamera; | |
757 } | |
758 | |
759 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | |
760 _capturer.useBackCamera = useBackCamera; | |
761 } | |
762 | |
763 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | |
764 return _capturer.useBackCamera; | |
765 } | |
766 | |
767 void AVFoundationVideoCapturer::CaptureSampleBuffer( | |
768 CMSampleBufferRef sample_buffer, VideoRotation rotation) { | |
769 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 || | |
770 !CMSampleBufferIsValid(sample_buffer) || | |
771 !CMSampleBufferDataIsReady(sample_buffer)) { | |
772 return; | |
773 } | |
774 | |
775 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer); | |
776 if (image_buffer == NULL) { | |
777 return; | |
778 } | |
779 | |
780 const int captured_width = CVPixelBufferGetWidth(image_buffer); | |
781 const int captured_height = CVPixelBufferGetHeight(image_buffer); | |
782 | |
783 int adapted_width; | |
784 int adapted_height; | |
785 int crop_width; | |
786 int crop_height; | |
787 int crop_x; | |
788 int crop_y; | |
789 int64_t translated_camera_time_us; | |
790 | |
791 if (!AdaptFrame(captured_width, captured_height, | |
792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec, | |
793 rtc::TimeMicros(), &adapted_width, &adapted_height, | |
794 &crop_width, &crop_height, &crop_x, &crop_y, | |
795 &translated_camera_time_us)) { | |
796 return; | |
797 } | |
798 | |
799 rtc::scoped_refptr<VideoFrameBuffer> buffer = | |
800 new rtc::RefCountedObject<CoreVideoFrameBuffer>( | |
801 image_buffer, | |
802 adapted_width, adapted_height, | |
803 crop_width, crop_height, | |
804 crop_x, crop_y); | |
805 | |
806 // Applying rotation is only supported for legacy reasons and performance is | |
807 // not critical here. | |
808 if (apply_rotation() && rotation != kVideoRotation_0) { | |
809 buffer = buffer->NativeToI420Buffer(); | |
810 rtc::scoped_refptr<I420Buffer> rotated_buffer = | |
811 (rotation == kVideoRotation_180) | |
812 ? I420Buffer::Create(adapted_width, adapted_height) | |
813 : I420Buffer::Create(adapted_height, adapted_width); | |
814 libyuv::I420Rotate( | |
815 buffer->DataY(), buffer->StrideY(), | |
816 buffer->DataU(), buffer->StrideU(), | |
817 buffer->DataV(), buffer->StrideV(), | |
818 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(), | |
819 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(), | |
820 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(), | |
821 buffer->width(), buffer->height(), | |
822 static_cast<libyuv::RotationMode>(rotation)); | |
823 buffer = rotated_buffer; | |
824 } | |
825 | |
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), | |
827 captured_width, captured_height); | |
828 } | |
829 | |
830 } // namespace webrtc | |
OLD | NEW |