Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(231)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoCapturerInternal.mm

Issue 2488973002: Split avfoundationcapturer classes in separate files. (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved.
magjed_webrtc 2016/11/10 10:32:52 Try 'git cl upload --similarity=10' to get the dif
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #import "RTCAVFoundationVideoCapturerInternal.h"
12
13 #import <Foundation/Foundation.h>
14 #if TARGET_OS_IPHONE
15 #import <UIKit/UIKit.h>
16 #endif
17
18 #import "RTCDispatcher+Private.h"
19 #import "WebRTC/RTCLogging.h"
20
21
22 NS_ASSUME_NONNULL_BEGIN
23 @implementation RTCAVFoundationVideoCapturerInternal {
24 // Keep pointers to inputs for convenience.
25 AVCaptureDeviceInput *_frontCameraInput;
26 AVCaptureDeviceInput *_backCameraInput;
27 AVCaptureVideoDataOutput *_videoDataOutput;
28 // The cricket::VideoCapturer that owns this class. Should never be NULL.
29 webrtc::AVFoundationVideoCapturer *_capturer;
30 webrtc::VideoRotation _rotation;
31 BOOL _hasRetriedOnFatalError;
32 BOOL _isRunning;
33 BOOL _hasStarted;
34 rtc::CriticalSection _crit;
35 }
36
37 @synthesize captureSession = _captureSession;
38 @synthesize frameQueue = _frameQueue;
39 @synthesize useBackCamera = _useBackCamera;
40
41 @synthesize isRunning = _isRunning;
42 @synthesize hasStarted = _hasStarted;
43
44 // This is called from the thread that creates the video source, which is likely
45 // the main thread.
46 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
47 RTC_DCHECK(capturer);
48 if (self = [super init]) {
49 _capturer = capturer;
50 // Create the capture session and all relevant inputs and outputs. We need
51 // to do this in init because the application may want the capture session
52 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
53 // created here are retained until dealloc and never recreated.
54 if (![self setupCaptureSession]) {
55 return nil;
56 }
57 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
58 #if TARGET_OS_IPHONE
59 [center addObserver:self
60 selector:@selector(deviceOrientationDidChange:)
61 name:UIDeviceOrientationDidChangeNotification
62 object:nil];
63 [center addObserver:self
64 selector:@selector(handleCaptureSessionInterruption:)
65 name:AVCaptureSessionWasInterruptedNotification
66 object:_captureSession];
67 [center addObserver:self
68 selector:@selector(handleCaptureSessionInterruptionEnded:)
69 name:AVCaptureSessionInterruptionEndedNotification
70 object:_captureSession];
71 [center addObserver:self
72 selector:@selector(handleApplicationDidBecomeActive:)
73 name:UIApplicationDidBecomeActiveNotification
74 object:[UIApplication sharedApplication]];
75 #endif
76 [center addObserver:self
77 selector:@selector(handleCaptureSessionRuntimeError:)
78 name:AVCaptureSessionRuntimeErrorNotification
79 object:_captureSession];
80 [center addObserver:self
81 selector:@selector(handleCaptureSessionDidStartRunning:)
82 name:AVCaptureSessionDidStartRunningNotification
83 object:_captureSession];
84 [center addObserver:self
85 selector:@selector(handleCaptureSessionDidStopRunning:)
86 name:AVCaptureSessionDidStopRunningNotification
87 object:_captureSession];
88 }
89 return self;
90 }
91
92 - (void)dealloc {
93 RTC_DCHECK(!self.hasStarted);
94 [[NSNotificationCenter defaultCenter] removeObserver:self];
95 _capturer = nullptr;
96 }
97
98 - (AVCaptureSession *)captureSession {
99 return _captureSession;
100 }
101
102 - (AVCaptureDevice *)getActiveCaptureDevice {
103 return self.useBackCamera ? _backCameraInput.device
104 : _frontCameraInput.device;
105 }
106
107 - (nullable AVCaptureDevice *)frontCaptureDevice {
108 return _frontCameraInput.device;
109 }
110
111 - (nullable AVCaptureDevice *)backCaptureDevice {
112 return _backCameraInput.device;
113 }
114
115 - (dispatch_queue_t)frameQueue {
116 if (!_frameQueue) {
117 _frameQueue = dispatch_queue_create(
118 "org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
119 dispatch_set_target_queue(
120 _frameQueue,
121 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
122 }
123 return _frameQueue;
124 }
125
126 // Called from any thread (likely main thread).
127 - (BOOL)canUseBackCamera {
128 return _backCameraInput != nil;
kthelgason 2016/11/10 09:28:10 doesn't this have to be @synchronized as well?
daniela-webrtc 2016/11/11 13:08:58 Not entirely sure :/ seems like it does not need t
129 }
130
131 // Called from any thread (likely main thread).
132 - (BOOL)useBackCamera {
133 @synchronized(self) {
134 return _useBackCamera;
135 }
136 }
137
138 // Called from any thread (likely main thread).
139 - (void)setUseBackCamera:(BOOL)useBackCamera {
140 if (!self.canUseBackCamera) {
141 if (useBackCamera) {
142 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
143 "not switching.");
144 }
145 return;
146 }
147 @synchronized(self) {
148 if (_useBackCamera == useBackCamera) {
149 return;
150 }
151 _useBackCamera = useBackCamera;
152 [self updateSessionInputForUseBackCamera:useBackCamera];
153 }
154 }
155
156 // Called from WebRTC thread.
157 - (void)start {
158 if (self.hasStarted) {
159 return;
160 }
161 self.hasStarted = YES;
162 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
163 block:^{
164 #if TARGET_OS_IPHONE
165 // Default to portrait orientation on iPhone. T his will be reset in
166 // updateOrientation unless orientation is unkn own/faceup/facedown.
167 _rotation = webrtc::kVideoRotation_90;
168 #else
169 // No rotation on Mac.
170 _rotation = webrtc::kVideoRotation_0;
171 #endif
172 [self updateOrientation];
173 #if TARGET_OS_IPHONE
174 [[UIDevice currentDevice] beginGeneratingDevice OrientationNotifications];
175 #endif
176 AVCaptureSession *captureSession = self.capture Session;
177 [captureSession startRunning];
178 }];
kthelgason 2016/11/10 09:28:10 This indentation is off. Should be 4 spaces accord
daniela-webrtc 2016/11/11 13:08:58 Done.
179 }
180
181 // Called from same thread as start.
182 - (void)stop {
183 if (!self.hasStarted) {
184 return;
185 }
186 self.hasStarted = NO;
187 // Due to this async block, it's possible that the ObjC object outlives the
188 // C++ one. In order to not invoke functions on the C++ object, we set
189 // hasStarted immediately instead of dispatching it async.
190 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
191 block:^{
192 [_videoDataOutput setSampleBufferDelegate:nil q ueue:nullptr];
193 [_captureSession stopRunning];
194 #if TARGET_OS_IPHONE
195 [[UIDevice currentDevice] endGeneratingDeviceOr ientationNotifications];
196 #endif
197 }];
kthelgason 2016/11/10 09:28:10 Ditto indentation.
daniela-webrtc 2016/11/11 13:08:58 Done.
198 }
199
200 #pragma mark iOS notifications
201
202 #if TARGET_OS_IPHONE
203 - (void)deviceOrientationDidChange:(NSNotification *)notification {
204 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
205 block:^{
206 [self updateOrientation];
207 }];
208 }
209 #endif
210
211 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
212
213 - (void)captureOutput:(AVCaptureOutput *)captureOutput
214 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
215 fromConnection:(AVCaptureConnection *)connection {
216 NSParameterAssert(captureOutput == _videoDataOutput);
217 if (!self.hasStarted) {
218 return;
219 }
220 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
221 }
222
223 - (void)captureOutput:(AVCaptureOutput *)captureOutput
224 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
225 fromConnection:(AVCaptureConnection *)connection {
226 RTCLogError(@"Dropped sample buffer.");
227 }
228
229 #pragma mark - AVCaptureSession notifications
230
231 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
232 NSString *reasonString = nil;
233 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
234 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
235 NSNumber *reason =
236 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
237 if (reason) {
238 switch (reason.intValue) {
239 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground :
240 reasonString = @"VideoDeviceNotAvailableInBackground";
241 break;
242 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
243 reasonString = @"AudioDeviceInUseByAnotherClient";
244 break;
245 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
246 reasonString = @"VideoDeviceInUseByAnotherClient";
247 break;
248 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps:
249 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
250 break;
251 }
252 }
253 #endif
254 RTCLog(@"Capture session interrupted: %@", reasonString);
255 // TODO(tkchin): Handle this case.
256 }
257
258 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
259 RTCLog(@"Capture session interruption ended.");
260 // TODO(tkchin): Handle this case.
261 }
262
263 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
264 NSError *error =
265 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
266 RTCLogError(@"Capture session runtime error: %@", error);
267
268 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
269 block:^{
270 #if TARGET_OS_IPHONE
271 if (error.code == AVErrorMediaServicesWereReset ) {
272 [self handleNonFatalError];
273 } else {
274 [self handleFatalError];
275 }
276 #else
277 [self handleFatalError];
278 #endif
279 }];
kthelgason 2016/11/10 09:28:10 ditto indentation.
daniela-webrtc 2016/11/11 13:08:58 Done.
280 }
281
282 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
283 RTCLog(@"Capture session started.");
284
285 self.isRunning = YES;
286 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
287 block:^{
288 // If we successfully restarted after an unknow n error,
289 // allow future retries on fatal errors.
290 _hasRetriedOnFatalError = NO;
291 }];
292 }
293
294 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
295 RTCLog(@"Capture session stopped.");
296 self.isRunning = NO;
297 }
298
299 - (void)handleFatalError {
300 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
301 block:^{
302 if (!_hasRetriedOnFatalError) {
303 RTCLogWarning(@"Attempting to recover from fa tal capture error.");
304 [self handleNonFatalError];
305 _hasRetriedOnFatalError = YES;
306 } else {
307 RTCLogError(@"Previous fatal error recovery f ailed.");
308 }
309 }];
310 }
311
312 - (void)handleNonFatalError {
313 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
314 block:^{
315 if (self.hasStarted) {
316 RTCLog(@"Restarting capture session after err or.");
317 [self.captureSession startRunning];
318 }
319 }];
320 }
321
322 #if TARGET_OS_IPHONE
323
324 #pragma mark - UIApplication notifications
325
326 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
327 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
328 block:^{
329 if (self.hasStarted && !self.captureSession.isR unning) {
330 RTCLog(@"Restarting capture session on active .");
331 [self.captureSession startRunning];
332 }
333 }];
334 }
335
336 #endif // TARGET_OS_IPHONE
337
338 #pragma mark - Private
339
340 - (BOOL)setupCaptureSession {
341 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
342 #if defined(WEBRTC_IOS)
343 captureSession.usesApplicationAudioSession = NO;
344 #endif
345 // Add the output.
346 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
347 if (![captureSession canAddOutput:videoDataOutput]) {
348 RTCLogError(@"Video data output unsupported.");
349 return NO;
350 }
351 [captureSession addOutput:videoDataOutput];
352
353 // Get the front and back cameras. If there isn't a front camera
354 // give up.
355 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
356 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
357 if (!frontCameraInput) {
358 RTCLogError(@"No front camera for capture session.");
359 return NO;
360 }
361
362 // Add the inputs.
363 if (![captureSession canAddInput:frontCameraInput] ||
364 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
365 RTCLogError(@"Session does not support capture inputs.");
366 return NO;
367 }
368 AVCaptureDeviceInput *input = self.useBackCamera ?
369 backCameraInput : frontCameraInput;
370 [captureSession addInput:input];
371
372 _captureSession = captureSession;
373 return YES;
374 }
375
376 - (AVCaptureVideoDataOutput *)videoDataOutput {
377 if (!_videoDataOutput) {
378 // Make the capturer output NV12. Ideally we want I420 but that's not
379 // currently supported on iPhone / iPad.
380 AVCaptureVideoDataOutput *videoDataOutput =
381 [[AVCaptureVideoDataOutput alloc] init];
382 videoDataOutput.videoSettings = @{
383 (NSString *)kCVPixelBufferPixelFormatTypeK ey :
384 @(kCVPixelFormatType_420YpCbCr8BiPlanarF ullRange)
385 };
386 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
387 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
388 _videoDataOutput = videoDataOutput;
389 }
390 return _videoDataOutput;
391 }
392
393 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
394 (AVCaptureDevicePosition)position {
395 for (AVCaptureDevice *captureDevice in
396 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
397 if (captureDevice.position == position) {
398 return captureDevice;
399 }
400 }
401 return nil;
402 }
403
404 - (AVCaptureDeviceInput *)frontCameraInput {
405 if (!_frontCameraInput) {
406 #if TARGET_OS_IPHONE
407 AVCaptureDevice *frontCameraDevice =
408 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
409 #else
410 AVCaptureDevice *frontCameraDevice =
411 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
412 #endif
413 if (!frontCameraDevice) {
414 RTCLogWarning(@"Failed to find front capture device.");
415 return nil;
416 }
417 NSError *error = nil;
418 AVCaptureDeviceInput *frontCameraInput =
419 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
420 error:&error];
421 if (!frontCameraInput) {
422 RTCLogError(@"Failed to create front camera input: %@",
423 error.localizedDescription);
424 return nil;
425 }
426 _frontCameraInput = frontCameraInput;
427 }
428 return _frontCameraInput;
429 }
430
431 - (AVCaptureDeviceInput *)backCameraInput {
432 if (!_backCameraInput) {
433 AVCaptureDevice *backCameraDevice =
434 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
435 if (!backCameraDevice) {
436 RTCLogWarning(@"Failed to find front capture device.");
437 return nil;
438 }
439 NSError *error = nil;
440 AVCaptureDeviceInput *backCameraInput =
441 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
442 error:&error];
443 if (!backCameraInput) {
444 RTCLogError(@"Failed to create front camera input: %@",
445 error.localizedDescription);
446 return nil;
447 }
448 _backCameraInput = backCameraInput;
449 }
450 return _backCameraInput;
451 }
452
453 // Called from capture session queue.
454 - (void)updateOrientation {
455 #if TARGET_OS_IPHONE
456 switch ([UIDevice currentDevice].orientation) {
457 case UIDeviceOrientationPortrait:
458 _rotation = webrtc::kVideoRotation_90;
459 break;
460 case UIDeviceOrientationPortraitUpsideDown:
461 _rotation = webrtc::kVideoRotation_270;
462 break;
463 case UIDeviceOrientationLandscapeLeft:
464 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0
465 : webrtc::kVideoRotation_180;
466 break;
467 case UIDeviceOrientationLandscapeRight:
468 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180
469 : webrtc::kVideoRotation_0;
470 break;
471 case UIDeviceOrientationFaceUp:
472 case UIDeviceOrientationFaceDown:
473 case UIDeviceOrientationUnknown:
474 // Ignore.
475 break;
476 }
477 #endif
478 }
479
480 // Update the current session input to match what's stored in _useBackCamera.
481 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{
484 [_captureSession beginConfiguration];
485 AVCaptureDeviceInput *oldInput = _backCameraInp ut;
486 AVCaptureDeviceInput *newInput = _frontCameraIn put;
487 if (useBackCamera) {
488 oldInput = _frontCameraInput;
489 newInput = _backCameraInput;
490 }
491 if (oldInput) {
492 // Ok to remove this even if it's not attache d. Will be no-op.
493 [_captureSession removeInput:oldInput];
494 }
495 if (newInput) {
496 [_captureSession addInput:newInput];
497 }
498 [self updateOrientation];
499 AVCaptureDevice *newDevice = newInput.device;
500 const cricket::VideoFormat *format = _capturer- >GetCaptureFormat();
501 SetFormatForCaptureDevice(newDevice, _captureSe ssion, *format);
502 [_captureSession commitConfiguration];
503 }];
504 }
505
506 @end
507
508 NS_ASSUME_NONNULL_END
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698