Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(262)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoCapturerInternal.mm

Issue 2488973002: Split avfoundationcapturer classes in separate files. (Closed)
Patch Set: Decrease the similarity to 10% Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #import "RTCAVFoundationVideoCapturerInternal.h"
12 12
13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h> 13 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE 14 #if TARGET_OS_IPHONE
16 #import <UIKit/UIKit.h> 15 #import <UIKit/UIKit.h>
17 #endif 16 #endif
18 17
19 #import "RTCDispatcher+Private.h" 18 #import "RTCDispatcher+Private.h"
20 #import "WebRTC/RTCLogging.h" 19 #import "WebRTC/RTCLogging.h"
21 #if TARGET_OS_IPHONE
22 #import "WebRTC/UIDevice+RTCDevice.h"
23 #endif
24 20
25 #include "libyuv/rotate.h"
26 21
27 #include "webrtc/base/bind.h" 22 NS_ASSUME_NONNULL_BEGIN
28 #include "webrtc/base/checks.h"
29 #include "webrtc/base/logging.h"
30 #include "webrtc/base/thread.h"
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
32 #include "webrtc/common_video/rotation.h"
33
34 // TODO(denicija): add support for higher frame rates.
35 // See http://crbug/webrtc/6355 for more info.
36 static const int kFramesPerSecond = 30;
37
38 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
41 }
42
43 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
45 }
46
47 // Returns filtered array of device formats based on predefined constraints our
48 // stack imposes.
49 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
50 const AVCaptureDevice *device,
51 int supportedFps) {
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
53 [NSMutableArray array];
54
55 for (AVCaptureDeviceFormat *format in device.formats) {
56 // Filter out subTypes that we currently don't support in the stack
57 FourCharCode mediaSubType =
58 CMFormatDescriptionGetMediaSubType(format.formatDescription);
59 if (!IsMediaSubTypeSupported(mediaSubType)) {
60 continue;
61 }
62
63 // Filter out frame rate ranges that we currently don't support in the stack
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange s) {
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
66 [eligibleDeviceFormats addObject:format];
67 break;
68 }
69 }
70 }
71
72 return [eligibleDeviceFormats copy];
73 }
74
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
77 const AVCaptureDevice *device,
78 const cricket::VideoFormat &videoFormat) {
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
81 GetEligibleDeviceFormats(device, videoFormat.framerate());
82
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
84 CMVideoDimensions dimension =
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
86 FourCharCode mediaSubType =
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
88
89 if (videoFormat.width == dimension.width &&
90 videoFormat.height == dimension.height) {
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
92 // This is the preferred format so no need to wait for better option.
93 return deviceFormat;
94 } else {
95 // This is good candidate, but let's wait for something better.
96 desiredDeviceFormat = deviceFormat;
97 }
98 }
99 }
100
101 return desiredDeviceFormat;
102 }
103
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
105 // device.
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
107 AVCaptureDevice *device) {
108 std::set<cricket::VideoFormat> supportedFormats;
109
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
111 GetEligibleDeviceFormats(device, kFramesPerSecond);
112
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
114 CMVideoDimensions dimension =
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
116 cricket::VideoFormat format = cricket::VideoFormat(
117 dimension.width, dimension.height,
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12);
120 supportedFormats.insert(format);
121 }
122
123 return supportedFormats;
124 }
125
126 // Sets device format for the provided capture device. Returns YES/NO depending on success.
127 // TODO(denicija): When this file is split this static method should be reconsid ered.
128 // Perhaps adding a category on AVCaptureDevice would be better.
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
130 AVCaptureSession *session,
131 const cricket::VideoFormat &format) {
132 AVCaptureDeviceFormat *deviceFormat =
133 GetDeviceFormatForVideoFormat(device, format);
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
135
136 NSError *error = nil;
137 BOOL success = YES;
138 [session beginConfiguration];
139 if ([device lockForConfiguration:&error]) {
140 @try {
141 device.activeFormat = deviceFormat;
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
143 } @catch (NSException *exception) {
144 RTCLogError(
145 @"Failed to set active format!\n User info:%@",
146 exception.userInfo);
147 success = NO;
148 }
149
150 [device unlockForConfiguration];
151 } else {
152 RTCLogError(
153 @"Failed to lock device %@. Error: %@",
154 device, error.userInfo);
155 success = NO;
156 }
157 [session commitConfiguration];
158
159 return success;
160 }
161
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
164 // because other webrtc objects own cricket::VideoCapturer, which is not
165 // ref counted. To prevent bad behavior we do not expose this class directly.
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject
167 <AVCaptureVideoDataOutputSampleBufferDelegate>
168
169 @property(nonatomic, readonly) AVCaptureSession *captureSession;
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
171 @property(nonatomic, readonly) BOOL canUseBackCamera;
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning.
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
175
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
177 // when we receive frames. This is safe because this object should be owned by
178 // it.
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
180 - (AVCaptureDevice *)getActiveCaptureDevice;
181
182 - (nullable AVCaptureDevice *)frontCaptureDevice;
183 - (nullable AVCaptureDevice *)backCaptureDevice;
184
185 // Starts and stops the capture session asynchronously. We cannot do this
186 // synchronously without blocking a WebRTC thread.
187 - (void)start;
188 - (void)stop;
189
190 @end
191
192 @implementation RTCAVFoundationVideoCapturerInternal { 23 @implementation RTCAVFoundationVideoCapturerInternal {
193 // Keep pointers to inputs for convenience. 24 // Keep pointers to inputs for convenience.
194 AVCaptureDeviceInput *_frontCameraInput; 25 AVCaptureDeviceInput *_frontCameraInput;
195 AVCaptureDeviceInput *_backCameraInput; 26 AVCaptureDeviceInput *_backCameraInput;
196 AVCaptureVideoDataOutput *_videoDataOutput; 27 AVCaptureVideoDataOutput *_videoDataOutput;
197 // The cricket::VideoCapturer that owns this class. Should never be NULL. 28 // The cricket::VideoCapturer that owns this class. Should never be NULL.
198 webrtc::AVFoundationVideoCapturer *_capturer; 29 webrtc::AVFoundationVideoCapturer *_capturer;
199 webrtc::VideoRotation _rotation; 30 webrtc::VideoRotation _rotation;
200 BOOL _hasRetriedOnFatalError; 31 BOOL _hasRetriedOnFatalError;
201 BOOL _isRunning; 32 BOOL _isRunning;
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
262 RTC_DCHECK(!self.hasStarted); 93 RTC_DCHECK(!self.hasStarted);
263 [[NSNotificationCenter defaultCenter] removeObserver:self]; 94 [[NSNotificationCenter defaultCenter] removeObserver:self];
264 _capturer = nullptr; 95 _capturer = nullptr;
265 } 96 }
266 97
267 - (AVCaptureSession *)captureSession { 98 - (AVCaptureSession *)captureSession {
268 return _captureSession; 99 return _captureSession;
269 } 100 }
270 101
271 - (AVCaptureDevice *)getActiveCaptureDevice { 102 - (AVCaptureDevice *)getActiveCaptureDevice {
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; 103 return self.useBackCamera ? _backCameraInput.device
104 : _frontCameraInput.device;
273 } 105 }
274 106
275 - (AVCaptureDevice *)frontCaptureDevice { 107 - (nullable AVCaptureDevice *)frontCaptureDevice {
276 return _frontCameraInput.device; 108 return _frontCameraInput.device;
277 } 109 }
278 110
279 - (AVCaptureDevice *)backCaptureDevice { 111 - (nullable AVCaptureDevice *)backCaptureDevice {
280 return _backCameraInput.device; 112 return _backCameraInput.device;
281 } 113 }
282 114
283 - (dispatch_queue_t)frameQueue { 115 - (dispatch_queue_t)frameQueue {
284 if (!_frameQueue) { 116 if (!_frameQueue) {
285 _frameQueue = 117 _frameQueue = dispatch_queue_create(
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 118 "org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
287 DISPATCH_QUEUE_SERIAL);
288 dispatch_set_target_queue( 119 dispatch_set_target_queue(
289 _frameQueue, 120 _frameQueue,
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 121 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
291 } 122 }
292 return _frameQueue; 123 return _frameQueue;
293 } 124 }
294 125
295 // Called from any thread (likely main thread). 126 // Called from any thread (likely main thread).
296 - (BOOL)canUseBackCamera { 127 - (BOOL)canUseBackCamera {
297 return _backCameraInput != nil; 128 return _backCameraInput != nil;
298 } 129 }
299 130
300 // Called from any thread (likely main thread). 131 // Called from any thread (likely main thread).
301 - (BOOL)useBackCamera { 132 - (BOOL)useBackCamera {
302 @synchronized(self) { 133 @synchronized(self) {
303 return _useBackCamera; 134 return _useBackCamera;
304 } 135 }
305 } 136 }
306 137
307 // Called from any thread (likely main thread). 138 // Called from any thread (likely main thread).
308 - (void)setUseBackCamera:(BOOL)useBackCamera { 139 - (void)setUseBackCamera:(BOOL)useBackCamera {
309 if (!self.canUseBackCamera) { 140 if (!self.canUseBackCamera) {
310 if (useBackCamera) { 141 if (useBackCamera) {
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" 142 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
312 "not switching."); 143 "not switching.");
313 } 144 }
314 return; 145 return;
315 } 146 }
316 @synchronized(self) { 147 @synchronized(self) {
317 if (_useBackCamera == useBackCamera) { 148 if (_useBackCamera == useBackCamera) {
318 return; 149 return;
319 } 150 }
320 _useBackCamera = useBackCamera; 151 _useBackCamera = useBackCamera;
321 [self updateSessionInputForUseBackCamera:useBackCamera]; 152 [self updateSessionInputForUseBackCamera:useBackCamera];
322 } 153 }
323 } 154 }
324 155
325 // Called from WebRTC thread. 156 // Called from WebRTC thread.
326 - (void)start { 157 - (void)start {
327 if (self.hasStarted) { 158 if (self.hasStarted) {
328 return; 159 return;
329 } 160 }
330 self.hasStarted = YES; 161 self.hasStarted = YES;
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 162 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
332 block:^{ 163 block:^{
333 #if TARGET_OS_IPHONE 164 #if TARGET_OS_IPHONE
334 // Default to portrait orientation on iPhone. This will be reset in 165 // Default to portrait orientation on iPhone. T his will be reset in
335 // updateOrientation unless orientation is unknown/faceup/facedown. 166 // updateOrientation unless orientation is unkn own/faceup/facedown.
336 _rotation = webrtc::kVideoRotation_90; 167 _rotation = webrtc::kVideoRotation_90;
337 #else 168 #else
338 // No rotation on Mac. 169 // No rotation on Mac.
339 _rotation = webrtc::kVideoRotation_0; 170 _rotation = webrtc::kVideoRotation_0;
340 #endif 171 #endif
341 [self updateOrientation]; 172 [self updateOrientation];
342 #if TARGET_OS_IPHONE 173 #if TARGET_OS_IPHONE
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; 174 [[UIDevice currentDevice] beginGeneratingDevice OrientationNotifications];
magjed_webrtc 2016/11/10 13:22:45 This line is too long now. Is this how it should b
daniela-webrtc 2016/11/11 13:08:59 Done.
344 #endif 175 #endif
345 AVCaptureSession *captureSession = self.captureSession; 176 AVCaptureSession *captureSession = self.capture Session;
346 [captureSession startRunning]; 177 [captureSession startRunning];
347 }]; 178 }];
348 } 179 }
349 180
350 // Called from same thread as start. 181 // Called from same thread as start.
351 - (void)stop { 182 - (void)stop {
352 if (!self.hasStarted) { 183 if (!self.hasStarted) {
353 return; 184 return;
354 } 185 }
355 self.hasStarted = NO; 186 self.hasStarted = NO;
356 // Due to this async block, it's possible that the ObjC object outlives the 187 // Due to this async block, it's possible that the ObjC object outlives the
357 // C++ one. In order to not invoke functions on the C++ object, we set 188 // C++ one. In order to not invoke functions on the C++ object, we set
358 // hasStarted immediately instead of dispatching it async. 189 // hasStarted immediately instead of dispatching it async.
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 190 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{ 191 block:^{
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; 192 [_videoDataOutput setSampleBufferDelegate:nil q ueue:nullptr];
362 [_captureSession stopRunning]; 193 [_captureSession stopRunning];
363 #if TARGET_OS_IPHONE 194 #if TARGET_OS_IPHONE
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; 195 [[UIDevice currentDevice] endGeneratingDeviceOr ientationNotifications];
365 #endif 196 #endif
366 }]; 197 }];
367 } 198 }
368 199
369 #pragma mark iOS notifications 200 #pragma mark iOS notifications
370 201
371 #if TARGET_OS_IPHONE 202 #if TARGET_OS_IPHONE
372 - (void)deviceOrientationDidChange:(NSNotification *)notification { 203 - (void)deviceOrientationDidChange:(NSNotification *)notification {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 204 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{ 205 block:^{
375 [self updateOrientation]; 206 [self updateOrientation];
376 }]; 207 }];
377 } 208 }
378 #endif 209 #endif
379 210
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate 211 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
381 212
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput 213 - (void)captureOutput:(AVCaptureOutput *)captureOutput
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 214 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
384 fromConnection:(AVCaptureConnection *)connection { 215 fromConnection:(AVCaptureConnection *)connection {
385 NSParameterAssert(captureOutput == _videoDataOutput); 216 NSParameterAssert(captureOutput == _videoDataOutput);
386 if (!self.hasStarted) { 217 if (!self.hasStarted) {
387 return; 218 return;
388 } 219 }
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); 220 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
390 } 221 }
391 222
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput 223 - (void)captureOutput:(AVCaptureOutput *)captureOutput
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer 224 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
394 fromConnection:(AVCaptureConnection *)connection { 225 fromConnection:(AVCaptureConnection *)connection {
395 RTCLogError(@"Dropped sample buffer."); 226 RTCLogError(@"Dropped sample buffer.");
396 } 227 }
397 228
398 #pragma mark - AVCaptureSession notifications 229 #pragma mark - AVCaptureSession notifications
399 230
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { 231 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
401 NSString *reasonString = nil; 232 NSString *reasonString = nil;
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \ 233 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 234 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
magjed_webrtc 2016/11/10 13:22:45 I don't think this indentation is right?
daniela-webrtc 2016/11/11 13:08:59 Done.
404 NSNumber *reason = 235 NSNumber *reason =
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; 236 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
magjed_webrtc 2016/11/10 13:22:45 need to indent 4 spaces
daniela-webrtc 2016/11/11 13:08:59 Done.
406 if (reason) { 237 if (reason) {
407 switch (reason.intValue) { 238 switch (reason.intValue) {
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground : 239 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground :
409 reasonString = @"VideoDeviceNotAvailableInBackground"; 240 reasonString = @"VideoDeviceNotAvailableInBackground";
410 break; 241 break;
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: 242 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
412 reasonString = @"AudioDeviceInUseByAnotherClient"; 243 reasonString = @"AudioDeviceInUseByAnotherClient";
413 break; 244 break;
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: 245 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
415 reasonString = @"VideoDeviceInUseByAnotherClient"; 246 reasonString = @"VideoDeviceInUseByAnotherClient";
416 break; 247 break;
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps: 248 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps:
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; 249 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
419 break; 250 break;
420 } 251 }
421 } 252 }
422 #endif 253 #endif
423 RTCLog(@"Capture session interrupted: %@", reasonString); 254 RTCLog(@"Capture session interrupted: %@", reasonString);
424 // TODO(tkchin): Handle this case. 255 // TODO(tkchin): Handle this case.
425 } 256 }
426 257
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { 258 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
428 RTCLog(@"Capture session interruption ended."); 259 RTCLog(@"Capture session interruption ended.");
429 // TODO(tkchin): Handle this case. 260 // TODO(tkchin): Handle this case.
430 } 261 }
431 262
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { 263 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
433 NSError *error = 264 NSError *error =
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; 265 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
magjed_webrtc 2016/11/10 13:22:45 ditto
daniela-webrtc 2016/11/11 13:08:59 Done.
435 RTCLogError(@"Capture session runtime error: %@", error); 266 RTCLogError(@"Capture session runtime error: %@", error);
436 267
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 268 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
438 block:^{ 269 block:^{
439 #if TARGET_OS_IPHONE 270 #if TARGET_OS_IPHONE
440 if (error.code == AVErrorMediaServicesWereReset) { 271 if (error.code == AVErrorMediaServicesWereReset ) {
441 [self handleNonFatalError]; 272 [self handleNonFatalError];
442 } else { 273 } else {
443 [self handleFatalError]; 274 [self handleFatalError];
444 } 275 }
445 #else 276 #else
446 [self handleFatalError]; 277 [self handleFatalError];
447 #endif 278 #endif
448 }]; 279 }];
449 } 280 }
450 281
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { 282 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
452 RTCLog(@"Capture session started."); 283 RTCLog(@"Capture session started.");
453 284
454 self.isRunning = YES; 285 self.isRunning = YES;
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 286 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
456 block:^{ 287 block:^{
457 // If we successfully restarted after an unknown error, allow future 288 // If we successfully restarted after an unknow n error,
458 // retries on fatal errors. 289 // allow future retries on fatal errors.
459 _hasRetriedOnFatalError = NO; 290 _hasRetriedOnFatalError = NO;
460 }]; 291 }];
461 } 292 }
462 293
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { 294 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
464 RTCLog(@"Capture session stopped."); 295 RTCLog(@"Capture session stopped.");
465 self.isRunning = NO; 296 self.isRunning = NO;
466 } 297 }
467 298
468 - (void)handleFatalError { 299 - (void)handleFatalError {
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 300 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
470 block:^{ 301 block:^{
471 if (!_hasRetriedOnFatalError) { 302 if (!_hasRetriedOnFatalError) {
472 RTCLogWarning(@"Attempting to recover from fatal capture error."); 303 RTCLogWarning(@"Attempting to recover from fa tal capture error.");
473 [self handleNonFatalError]; 304 [self handleNonFatalError];
474 _hasRetriedOnFatalError = YES; 305 _hasRetriedOnFatalError = YES;
475 } else { 306 } else {
476 RTCLogError(@"Previous fatal error recovery failed."); 307 RTCLogError(@"Previous fatal error recovery f ailed.");
477 } 308 }
478 }]; 309 }];
479 } 310 }
480 311
481 - (void)handleNonFatalError { 312 - (void)handleNonFatalError {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 313 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{ 314 block:^{
484 if (self.hasStarted) { 315 if (self.hasStarted) {
485 RTCLog(@"Restarting capture session after error."); 316 RTCLog(@"Restarting capture session after err or.");
486 [self.captureSession startRunning]; 317 [self.captureSession startRunning];
487 } 318 }
488 }]; 319 }];
489 } 320 }
490 321
491 #if TARGET_OS_IPHONE 322 #if TARGET_OS_IPHONE
492 323
493 #pragma mark - UIApplication notifications 324 #pragma mark - UIApplication notifications
494 325
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { 326 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 327 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
497 block:^{ 328 block:^{
498 if (self.hasStarted && !self.captureSession.isRunning) { 329 if (self.hasStarted && !self.captureSession.isR unning) {
499 RTCLog(@"Restarting capture session on active."); 330 RTCLog(@"Restarting capture session on active .");
500 [self.captureSession startRunning]; 331 [self.captureSession startRunning];
501 } 332 }
502 }]; 333 }];
503 } 334 }
504 335
505 #endif // TARGET_OS_IPHONE 336 #endif // TARGET_OS_IPHONE
506 337
507 #pragma mark - Private 338 #pragma mark - Private
508 339
509 - (BOOL)setupCaptureSession { 340 - (BOOL)setupCaptureSession {
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; 341 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
511 #if defined(WEBRTC_IOS) 342 #if defined(WEBRTC_IOS)
512 captureSession.usesApplicationAudioSession = NO; 343 captureSession.usesApplicationAudioSession = NO;
(...skipping 15 matching lines...) Expand all
528 return NO; 359 return NO;
529 } 360 }
530 361
531 // Add the inputs. 362 // Add the inputs.
532 if (![captureSession canAddInput:frontCameraInput] || 363 if (![captureSession canAddInput:frontCameraInput] ||
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { 364 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
534 RTCLogError(@"Session does not support capture inputs."); 365 RTCLogError(@"Session does not support capture inputs.");
535 return NO; 366 return NO;
536 } 367 }
537 AVCaptureDeviceInput *input = self.useBackCamera ? 368 AVCaptureDeviceInput *input = self.useBackCamera ?
538 backCameraInput : frontCameraInput; 369 backCameraInput : frontCameraInput;
539 [captureSession addInput:input]; 370 [captureSession addInput:input];
540 371
541 _captureSession = captureSession; 372 _captureSession = captureSession;
542 return YES; 373 return YES;
543 } 374 }
544 375
545 - (AVCaptureVideoDataOutput *)videoDataOutput { 376 - (AVCaptureVideoDataOutput *)videoDataOutput {
546 if (!_videoDataOutput) { 377 if (!_videoDataOutput) {
547 // Make the capturer output NV12. Ideally we want I420 but that's not 378 // Make the capturer output NV12. Ideally we want I420 but that's not
548 // currently supported on iPhone / iPad. 379 // currently supported on iPhone / iPad.
549 AVCaptureVideoDataOutput *videoDataOutput = 380 AVCaptureVideoDataOutput *videoDataOutput =
550 [[AVCaptureVideoDataOutput alloc] init]; 381 [[AVCaptureVideoDataOutput alloc] init];
551 videoDataOutput.videoSettings = @{ 382 videoDataOutput.videoSettings = @{
552 (NSString *)kCVPixelBufferPixelFormatTypeKey : 383 (NSString *)kCVPixelBufferPixelFormatTypeK ey :
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) 384 @(kCVPixelFormatType_420YpCbCr8BiPlanarF ullRange)
554 }; 385 };
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; 386 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; 387 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
557 _videoDataOutput = videoDataOutput; 388 _videoDataOutput = videoDataOutput;
558 } 389 }
559 return _videoDataOutput; 390 return _videoDataOutput;
560 } 391 }
561 392
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition: 393 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
563 (AVCaptureDevicePosition)position { 394 (AVCaptureDevicePosition)position {
564 for (AVCaptureDevice *captureDevice in 395 for (AVCaptureDevice *captureDevice in
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 396 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
566 if (captureDevice.position == position) { 397 if (captureDevice.position == position) {
567 return captureDevice; 398 return captureDevice;
568 } 399 }
569 } 400 }
570 return nil; 401 return nil;
571 } 402 }
572 403
573 - (AVCaptureDeviceInput *)frontCameraInput { 404 - (AVCaptureDeviceInput *)frontCameraInput {
574 if (!_frontCameraInput) { 405 if (!_frontCameraInput) {
575 #if TARGET_OS_IPHONE 406 #if TARGET_OS_IPHONE
576 AVCaptureDevice *frontCameraDevice = 407 AVCaptureDevice *frontCameraDevice =
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; 408 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
578 #else 409 #else
579 AVCaptureDevice *frontCameraDevice = 410 AVCaptureDevice *frontCameraDevice =
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 411 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
581 #endif 412 #endif
582 if (!frontCameraDevice) { 413 if (!frontCameraDevice) {
583 RTCLogWarning(@"Failed to find front capture device."); 414 RTCLogWarning(@"Failed to find front capture device.");
584 return nil; 415 return nil;
585 } 416 }
586 NSError *error = nil; 417 NSError *error = nil;
587 AVCaptureDeviceInput *frontCameraInput = 418 AVCaptureDeviceInput *frontCameraInput =
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice 419 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
589 error:&error]; 420 error:&error];
590 if (!frontCameraInput) { 421 if (!frontCameraInput) {
591 RTCLogError(@"Failed to create front camera input: %@", 422 RTCLogError(@"Failed to create front camera input: %@",
592 error.localizedDescription); 423 error.localizedDescription);
593 return nil; 424 return nil;
594 } 425 }
595 _frontCameraInput = frontCameraInput; 426 _frontCameraInput = frontCameraInput;
596 } 427 }
597 return _frontCameraInput; 428 return _frontCameraInput;
598 } 429 }
599 430
600 - (AVCaptureDeviceInput *)backCameraInput { 431 - (AVCaptureDeviceInput *)backCameraInput {
601 if (!_backCameraInput) { 432 if (!_backCameraInput) {
602 AVCaptureDevice *backCameraDevice = 433 AVCaptureDevice *backCameraDevice =
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; 434 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
604 if (!backCameraDevice) { 435 if (!backCameraDevice) {
605 RTCLogWarning(@"Failed to find front capture device."); 436 RTCLogWarning(@"Failed to find front capture device.");
606 return nil; 437 return nil;
607 } 438 }
608 NSError *error = nil; 439 NSError *error = nil;
609 AVCaptureDeviceInput *backCameraInput = 440 AVCaptureDeviceInput *backCameraInput =
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice 441 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
611 error:&error]; 442 error:&error];
612 if (!backCameraInput) { 443 if (!backCameraInput) {
613 RTCLogError(@"Failed to create front camera input: %@", 444 RTCLogError(@"Failed to create front camera input: %@",
614 error.localizedDescription); 445 error.localizedDescription);
615 return nil; 446 return nil;
616 } 447 }
617 _backCameraInput = backCameraInput; 448 _backCameraInput = backCameraInput;
618 } 449 }
619 return _backCameraInput; 450 return _backCameraInput;
620 } 451 }
621 452
622 // Called from capture session queue. 453 // Called from capture session queue.
623 - (void)updateOrientation { 454 - (void)updateOrientation {
624 #if TARGET_OS_IPHONE 455 #if TARGET_OS_IPHONE
625 switch ([UIDevice currentDevice].orientation) { 456 switch ([UIDevice currentDevice].orientation) {
626 case UIDeviceOrientationPortrait: 457 case UIDeviceOrientationPortrait:
627 _rotation = webrtc::kVideoRotation_90; 458 _rotation = webrtc::kVideoRotation_90;
628 break; 459 break;
629 case UIDeviceOrientationPortraitUpsideDown: 460 case UIDeviceOrientationPortraitUpsideDown:
630 _rotation = webrtc::kVideoRotation_270; 461 _rotation = webrtc::kVideoRotation_270;
631 break; 462 break;
632 case UIDeviceOrientationLandscapeLeft: 463 case UIDeviceOrientationLandscapeLeft:
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 464 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0
634 : webrtc::kVideoRotation_180; 465 : webrtc::kVideoRotation_180;
635 break; 466 break;
636 case UIDeviceOrientationLandscapeRight: 467 case UIDeviceOrientationLandscapeRight:
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 468 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180
638 : webrtc::kVideoRotation_0; 469 : webrtc::kVideoRotation_0;
639 break; 470 break;
640 case UIDeviceOrientationFaceUp: 471 case UIDeviceOrientationFaceUp:
641 case UIDeviceOrientationFaceDown: 472 case UIDeviceOrientationFaceDown:
642 case UIDeviceOrientationUnknown: 473 case UIDeviceOrientationUnknown:
643 // Ignore. 474 // Ignore.
644 break; 475 break;
645 } 476 }
646 #endif 477 #endif
647 } 478 }
648 479
649 // Update the current session input to match what's stored in _useBackCamera. 480 // Update the current session input to match what's stored in _useBackCamera.
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { 481 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
652 block:^{ 483 block:^{
653 [_captureSession beginConfiguration]; 484 [_captureSession beginConfiguration];
654 AVCaptureDeviceInput *oldInput = _backCameraInput; 485 AVCaptureDeviceInput *oldInput = _backCameraInp ut;
655 AVCaptureDeviceInput *newInput = _frontCameraInput; 486 AVCaptureDeviceInput *newInput = _frontCameraIn put;
656 if (useBackCamera) { 487 if (useBackCamera) {
657 oldInput = _frontCameraInput; 488 oldInput = _frontCameraInput;
658 newInput = _backCameraInput; 489 newInput = _backCameraInput;
659 } 490 }
660 if (oldInput) { 491 if (oldInput) {
661 // Ok to remove this even if it's not attached. Will be no-op. 492 // Ok to remove this even if it's not attache d. Will be no-op.
662 [_captureSession removeInput:oldInput]; 493 [_captureSession removeInput:oldInput];
663 } 494 }
664 if (newInput) { 495 if (newInput) {
665 [_captureSession addInput:newInput]; 496 [_captureSession addInput:newInput];
666 } 497 }
667 [self updateOrientation]; 498 [self updateOrientation];
668 AVCaptureDevice *newDevice = newInput.device; 499 AVCaptureDevice *newDevice = newInput.device;
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat(); 500 const cricket::VideoFormat *format = _capturer- >GetCaptureFormat();
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format); 501 SetFormatForCaptureDevice(newDevice, _captureSe ssion, *format);
671 [_captureSession commitConfiguration]; 502 [_captureSession commitConfiguration];
672 }]; 503 }];
673 } 504 }
674 505
675 @end 506 @end
676 507
677 namespace webrtc { 508 NS_ASSUME_NONNULL_END
678
679 enum AVFoundationVideoCapturerMessageType : uint32_t {
680 kMessageTypeFrame,
681 };
682
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
684 _capturer =
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
686
687 std::set<cricket::VideoFormat> front_camera_video_formats =
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
689
690 std::set<cricket::VideoFormat> back_camera_video_formats =
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
692
693 std::vector<cricket::VideoFormat> intersection_video_formats;
694 if (back_camera_video_formats.empty()) {
695 intersection_video_formats.assign(front_camera_video_formats.begin(),
696 front_camera_video_formats.end());
697
698 } else if (front_camera_video_formats.empty()) {
699 intersection_video_formats.assign(back_camera_video_formats.begin(),
700 back_camera_video_formats.end());
701 } else {
702 std::set_intersection(
703 front_camera_video_formats.begin(), front_camera_video_formats.end(),
704 back_camera_video_formats.begin(), back_camera_video_formats.end(),
705 std::back_inserter(intersection_video_formats));
706 }
707 SetSupportedFormats(intersection_video_formats);
708 }
709
710 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
711 _capturer = nil;
712 }
713
714 cricket::CaptureState AVFoundationVideoCapturer::Start(
715 const cricket::VideoFormat& format) {
716 if (!_capturer) {
717 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
718 return cricket::CaptureState::CS_FAILED;
719 }
720 if (_capturer.isRunning) {
721 LOG(LS_ERROR) << "The capturer is already running.";
722 return cricket::CaptureState::CS_FAILED;
723 }
724
725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
726 AVCaptureSession* session = _capturer.captureSession;
727
728 if (!SetFormatForCaptureDevice(device, session, format)) {
729 return cricket::CaptureState::CS_FAILED;
730 }
731
732 SetCaptureFormat(&format);
733 // This isn't super accurate because it takes a while for the AVCaptureSession
734 // to spin up, and this call returns async.
735 // TODO(tkchin): make this better.
736 [_capturer start];
737 SetCaptureState(cricket::CaptureState::CS_RUNNING);
738
739 return cricket::CaptureState::CS_STARTING;
740 }
741
742 void AVFoundationVideoCapturer::Stop() {
743 [_capturer stop];
744 SetCaptureFormat(NULL);
745 }
746
747 bool AVFoundationVideoCapturer::IsRunning() {
748 return _capturer.isRunning;
749 }
750
751 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
752 return _capturer.captureSession;
753 }
754
755 bool AVFoundationVideoCapturer::CanUseBackCamera() const {
756 return _capturer.canUseBackCamera;
757 }
758
759 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
760 _capturer.useBackCamera = useBackCamera;
761 }
762
763 bool AVFoundationVideoCapturer::GetUseBackCamera() const {
764 return _capturer.useBackCamera;
765 }
766
767 void AVFoundationVideoCapturer::CaptureSampleBuffer(
768 CMSampleBufferRef sample_buffer, VideoRotation rotation) {
769 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
770 !CMSampleBufferIsValid(sample_buffer) ||
771 !CMSampleBufferDataIsReady(sample_buffer)) {
772 return;
773 }
774
775 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
776 if (image_buffer == NULL) {
777 return;
778 }
779
780 const int captured_width = CVPixelBufferGetWidth(image_buffer);
781 const int captured_height = CVPixelBufferGetHeight(image_buffer);
782
783 int adapted_width;
784 int adapted_height;
785 int crop_width;
786 int crop_height;
787 int crop_x;
788 int crop_y;
789 int64_t translated_camera_time_us;
790
791 if (!AdaptFrame(captured_width, captured_height,
792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
793 rtc::TimeMicros(), &adapted_width, &adapted_height,
794 &crop_width, &crop_height, &crop_x, &crop_y,
795 &translated_camera_time_us)) {
796 return;
797 }
798
799 rtc::scoped_refptr<VideoFrameBuffer> buffer =
800 new rtc::RefCountedObject<CoreVideoFrameBuffer>(
801 image_buffer,
802 adapted_width, adapted_height,
803 crop_width, crop_height,
804 crop_x, crop_y);
805
806 // Applying rotation is only supported for legacy reasons and performance is
807 // not critical here.
808 if (apply_rotation() && rotation != kVideoRotation_0) {
809 buffer = buffer->NativeToI420Buffer();
810 rtc::scoped_refptr<I420Buffer> rotated_buffer =
811 (rotation == kVideoRotation_180)
812 ? I420Buffer::Create(adapted_width, adapted_height)
813 : I420Buffer::Create(adapted_height, adapted_width);
814 libyuv::I420Rotate(
815 buffer->DataY(), buffer->StrideY(),
816 buffer->DataU(), buffer->StrideU(),
817 buffer->DataV(), buffer->StrideV(),
818 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
819 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
820 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
821 buffer->width(), buffer->height(),
822 static_cast<libyuv::RotationMode>(rotation));
823 buffer = rotated_buffer;
824 }
825
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us),
827 captured_width, captured_height);
828 }
829
830 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698