Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(302)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/RTCCameraVideoCapturer.m

Issue 2862543002: Split iOS sdk in to separate targets (Closed)
Patch Set: rebase and minor fixes Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #import <Foundation/Foundation.h>
12
13 #import "WebRTC/RTCCameraVideoCapturer.h"
14 #import "WebRTC/RTCLogging.h"
15
16 #if TARGET_OS_IPHONE
17 #import "WebRTC/UIDevice+RTCDevice.h"
18 #endif
19
20 #import "RTCDispatcher+Private.h"
21
22 const int64_t kNanosecondsPerSecond = 1000000000;
23
24 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
25 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
26 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
27 }
28
29 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e>
30 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
31 @end
32
33 @implementation RTCCameraVideoCapturer {
34 AVCaptureVideoDataOutput *_videoDataOutput;
35 AVCaptureSession *_captureSession;
36 AVCaptureDevice *_currentDevice;
37 RTCVideoRotation _rotation;
38 BOOL _hasRetriedOnFatalError;
39 BOOL _isRunning;
40 // Will the session be running once all asynchronous operations have been comp leted?
41 BOOL _willBeRunning;
42 }
43
44 @synthesize frameQueue = _frameQueue;
45 @synthesize captureSession = _captureSession;
46
47 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
48 if (self = [super initWithDelegate:delegate]) {
49 // Create the capture session and all relevant inputs and outputs. We need
50 // to do this in init because the application may want the capture session
51 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
52 // created here are retained until dealloc and never recreated.
53 if (![self setupCaptureSession]) {
54 return nil;
55 }
56 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
57 #if TARGET_OS_IPHONE
58 [center addObserver:self
59 selector:@selector(deviceOrientationDidChange:)
60 name:UIDeviceOrientationDidChangeNotification
61 object:nil];
62 [center addObserver:self
63 selector:@selector(handleCaptureSessionInterruption:)
64 name:AVCaptureSessionWasInterruptedNotification
65 object:_captureSession];
66 [center addObserver:self
67 selector:@selector(handleCaptureSessionInterruptionEnded:)
68 name:AVCaptureSessionInterruptionEndedNotification
69 object:_captureSession];
70 [center addObserver:self
71 selector:@selector(handleApplicationDidBecomeActive:)
72 name:UIApplicationDidBecomeActiveNotification
73 object:[UIApplication sharedApplication]];
74 #endif
75 [center addObserver:self
76 selector:@selector(handleCaptureSessionRuntimeError:)
77 name:AVCaptureSessionRuntimeErrorNotification
78 object:_captureSession];
79 [center addObserver:self
80 selector:@selector(handleCaptureSessionDidStartRunning:)
81 name:AVCaptureSessionDidStartRunningNotification
82 object:_captureSession];
83 [center addObserver:self
84 selector:@selector(handleCaptureSessionDidStopRunning:)
85 name:AVCaptureSessionDidStopRunningNotification
86 object:_captureSession];
87 }
88 return self;
89 }
90
91 - (void)dealloc {
92 NSAssert(
93 !_willBeRunning,
94 @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to c all stopCapture?");
95 [[NSNotificationCenter defaultCenter] removeObserver:self];
96 }
97
98 + (NSArray<AVCaptureDevice *> *)captureDevices {
99 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
100 }
101
102 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
103 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array];
104
105 for (AVCaptureDeviceFormat *format in device.formats) {
106 // Filter out subTypes that we currently don't support in the stack
107 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description);
108 if (IsMediaSubTypeSupported(mediaSubType)) {
109 [eligibleDeviceFormats addObject:format];
110 }
111 }
112
113 return eligibleDeviceFormats;
114 }
115
116 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
117 format:(AVCaptureDeviceFormat *)format
118 fps:(NSInteger)fps {
119 _willBeRunning = true;
120 [RTCDispatcher
121 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
122 block:^{
123 RTCLogInfo("startCaptureWithDevice %@ @ %zd fps", format, fps);
124
125 #if TARGET_OS_IPHONE
126 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications];
127 #endif
128
129 _currentDevice = device;
130
131 NSError *error = nil;
132 if ([_currentDevice lockForConfiguration:&error]) {
133 [self updateDeviceCaptureFormat:format fps:fps];
134 } else {
135 RTCLogError(@"Failed to lock device %@. Error: %@", _cur rentDevice,
136 error.userInfo);
137 return;
138 }
139
140 [self reconfigureCaptureSessionInput];
141 [self updateOrientation];
142 [_captureSession startRunning];
143
144 [_currentDevice unlockForConfiguration];
145 _isRunning = true;
146 }];
147 }
148
149 - (void)stopCapture {
150 _willBeRunning = false;
151 [RTCDispatcher
152 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
153 block:^{
154 RTCLogInfo("Stop");
155 _currentDevice = nil;
156 for (AVCaptureDeviceInput *oldInput in [_captureSession.in puts copy]) {
157 [_captureSession removeInput:oldInput];
158 }
159 [_captureSession stopRunning];
160
161 #if TARGET_OS_IPHONE
162 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications];
163 #endif
164 _isRunning = false;
165 }];
166 }
167
168 #pragma mark iOS notifications
169
170 #if TARGET_OS_IPHONE
171 - (void)deviceOrientationDidChange:(NSNotification *)notification {
172 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
173 block:^{
174 [self updateOrientation];
175 }];
176 }
177 #endif
178
179 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
180
181 - (void)captureOutput:(AVCaptureOutput *)captureOutput
182 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
183 fromConnection:(AVCaptureConnection *)connection {
184 NSParameterAssert(captureOutput == _videoDataOutput);
185
186 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) ||
187 !CMSampleBufferDataIsReady(sampleBuffer)) {
188 return;
189 }
190
191 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
192 if (pixelBuffer == nil) {
193 return;
194 }
195
196 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) *
197 kNanosecondsPerSecond;
198 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBu ffer
199 rotation:_rotati on
200 timeStampNs:timeSta mpNs];
201 [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
202 }
203
204 - (void)captureOutput:(AVCaptureOutput *)captureOutput
205 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
206 fromConnection:(AVCaptureConnection *)connection {
207 RTCLogError(@"Dropped sample buffer.");
208 }
209
210 #pragma mark - AVCaptureSession notifications
211
212 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
213 NSString *reasonString = nil;
214 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
215 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
216 if ([UIDevice isIOS9OrLater]) {
217 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey];
218 if (reason) {
219 switch (reason.intValue) {
220 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd:
221 reasonString = @"VideoDeviceNotAvailableInBackground";
222 break;
223 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
224 reasonString = @"AudioDeviceInUseByAnotherClient";
225 break;
226 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
227 reasonString = @"VideoDeviceInUseByAnotherClient";
228 break;
229 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps:
230 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
231 break;
232 }
233 }
234 }
235 #endif
236 RTCLog(@"Capture session interrupted: %@", reasonString);
237 }
238
239 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
240 RTCLog(@"Capture session interruption ended.");
241 }
242
243 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
244 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ;
245 RTCLogError(@"Capture session runtime error: %@", error);
246
247 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
248 block:^{
249 #if TARGET_OS_IPHONE
250 if (error.code == AVErrorMediaServicesWereReset ) {
251 [self handleNonFatalError];
252 } else {
253 [self handleFatalError];
254 }
255 #else
256 [self handleFatalError];
257 #endif
258 }];
259 }
260
261 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
262 RTCLog(@"Capture session started.");
263
264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
265 block:^{
266 // If we successfully restarted after an unknow n error,
267 // allow future retries on fatal errors.
268 _hasRetriedOnFatalError = NO;
269 }];
270 }
271
272 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
273 RTCLog(@"Capture session stopped.");
274 }
275
276 - (void)handleFatalError {
277 [RTCDispatcher
278 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
279 block:^{
280 if (!_hasRetriedOnFatalError) {
281 RTCLogWarning(@"Attempting to recover from fatal capture error.");
282 [self handleNonFatalError];
283 _hasRetriedOnFatalError = YES;
284 } else {
285 RTCLogError(@"Previous fatal error recovery failed.");
286 }
287 }];
288 }
289
290 - (void)handleNonFatalError {
291 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
292 block:^{
293 RTCLog(@"Restarting capture session after error .");
294 if (_isRunning) {
295 [_captureSession startRunning];
296 }
297 }];
298 }
299
300 #if TARGET_OS_IPHONE
301
302 #pragma mark - UIApplication notifications
303
304 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
305 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
306 block:^{
307 if (_isRunning && !_captureSession.isRunning) {
308 RTCLog(@"Restarting capture session on active .");
309 [_captureSession startRunning];
310 }
311 }];
312 }
313
314 #endif // TARGET_OS_IPHONE
315
316 #pragma mark - Private
317
318 - (dispatch_queue_t)frameQueue {
319 if (!_frameQueue) {
320 _frameQueue =
321 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL);
322 dispatch_set_target_queue(_frameQueue,
323 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0));
324 }
325 return _frameQueue;
326 }
327
328 - (BOOL)setupCaptureSession {
329 NSAssert(_captureSession == nil, @"Setup capture session called twice.");
330 _captureSession = [[AVCaptureSession alloc] init];
331 #if defined(WEBRTC_IOS)
332 _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
333 _captureSession.usesApplicationAudioSession = NO;
334 #endif
335 [self setupVideoDataOutput];
336 // Add the output.
337 if (![_captureSession canAddOutput:_videoDataOutput]) {
338 RTCLogError(@"Video data output unsupported.");
339 return NO;
340 }
341 [_captureSession addOutput:_videoDataOutput];
342
343 return YES;
344 }
345
346 - (void)setupVideoDataOutput {
347 NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
348 // Make the capturer output NV12. Ideally we want I420 but that's not
349 // currently supported on iPhone / iPad.
350 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
351 videoDataOutput.videoSettings = @{
352 (NSString *)
353 // TODO(denicija): Remove this color conversion and use the original capture format directly.
354 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu llRange)
355 };
356 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
357 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
358 _videoDataOutput = videoDataOutput;
359 }
360
361 #pragma mark - Private, called inside capture queue
362
363 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger )fps {
364 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
365 @"updateDeviceCaptureFormat must be called on the capture queue.");
366 @try {
367 _currentDevice.activeFormat = format;
368 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
369 } @catch (NSException *exception) {
370 RTCLogError(@"Failed to set active format!\n User info:%@", exception.userIn fo);
371 return;
372 }
373 }
374
375 - (void)reconfigureCaptureSessionInput {
376 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
377 @"reconfigureCaptureSessionInput must be called on the capture queue. ");
378 NSError *error = nil;
379 AVCaptureDeviceInput *input =
380 [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
381 if (!input) {
382 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescr iption);
383 return;
384 }
385 [_captureSession beginConfiguration];
386 for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
387 [_captureSession removeInput:oldInput];
388 }
389 if ([_captureSession canAddInput:input]) {
390 [_captureSession addInput:input];
391 } else {
392 RTCLogError(@"Cannot add camera as an input to the session.");
393 }
394 [_captureSession commitConfiguration];
395 }
396
397 - (void)updateOrientation {
398 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
399 @"updateOrientation must be called on the capture queue.");
400 #if TARGET_OS_IPHONE
401 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t;
402 switch ([UIDevice currentDevice].orientation) {
403 case UIDeviceOrientationPortrait:
404 _rotation = RTCVideoRotation_90;
405 break;
406 case UIDeviceOrientationPortraitUpsideDown:
407 _rotation = RTCVideoRotation_270;
408 break;
409 case UIDeviceOrientationLandscapeLeft:
410 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
411 break;
412 case UIDeviceOrientationLandscapeRight:
413 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
414 break;
415 case UIDeviceOrientationFaceUp:
416 case UIDeviceOrientationFaceDown:
417 case UIDeviceOrientationUnknown:
418 // Ignore.
419 break;
420 }
421 #endif
422 }
423
424 @end
OLDNEW
« no previous file with comments | « webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m ('k') | webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698