Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(181)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoCapturerInternal.mm

Issue 2488973002: Split avfoundationcapturer classes in separate files. (Closed)
Patch Set: Fix comments. Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #import "RTCAVFoundationVideoCapturerInternal.h"
12 12
13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h> 13 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE 14 #if TARGET_OS_IPHONE
16 #import <UIKit/UIKit.h> 15 #import <UIKit/UIKit.h>
17 #endif 16 #endif
18 17
19 #import "RTCDispatcher+Private.h" 18 #import "RTCDispatcher+Private.h"
19 #import "RTCAVFoundationFormatMapper.h"
20 #import "WebRTC/RTCLogging.h" 20 #import "WebRTC/RTCLogging.h"
21 #if TARGET_OS_IPHONE
22 #import "WebRTC/UIDevice+RTCDevice.h"
23 #endif
24 21
25 #include "libyuv/rotate.h" 22 NS_ASSUME_NONNULL_BEGIN
26
27 #include "webrtc/base/bind.h"
28 #include "webrtc/base/checks.h"
29 #include "webrtc/base/logging.h"
30 #include "webrtc/base/thread.h"
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
32 #include "webrtc/common_video/rotation.h"
33
34 // TODO(denicija): add support for higher frame rates.
35 // See http://crbug/webrtc/6355 for more info.
36 static const int kFramesPerSecond = 30;
37
38 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
41 }
42
43 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
45 }
46
47 // Returns filtered array of device formats based on predefined constraints our
48 // stack imposes.
49 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
50 const AVCaptureDevice *device,
51 int supportedFps) {
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
53 [NSMutableArray array];
54
55 for (AVCaptureDeviceFormat *format in device.formats) {
56 // Filter out subTypes that we currently don't support in the stack
57 FourCharCode mediaSubType =
58 CMFormatDescriptionGetMediaSubType(format.formatDescription);
59 if (!IsMediaSubTypeSupported(mediaSubType)) {
60 continue;
61 }
62
63 // Filter out frame rate ranges that we currently don't support in the stack
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange s) {
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
66 [eligibleDeviceFormats addObject:format];
67 break;
68 }
69 }
70 }
71
72 return [eligibleDeviceFormats copy];
73 }
74
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
77 const AVCaptureDevice *device,
78 const cricket::VideoFormat &videoFormat) {
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
81 GetEligibleDeviceFormats(device, videoFormat.framerate());
82
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
84 CMVideoDimensions dimension =
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
86 FourCharCode mediaSubType =
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
88
89 if (videoFormat.width == dimension.width &&
90 videoFormat.height == dimension.height) {
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
92 // This is the preferred format so no need to wait for better option.
93 return deviceFormat;
94 } else {
95 // This is good candidate, but let's wait for something better.
96 desiredDeviceFormat = deviceFormat;
97 }
98 }
99 }
100
101 return desiredDeviceFormat;
102 }
103
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
105 // device.
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
107 AVCaptureDevice *device) {
108 std::set<cricket::VideoFormat> supportedFormats;
109
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
111 GetEligibleDeviceFormats(device, kFramesPerSecond);
112
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
114 CMVideoDimensions dimension =
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
116 cricket::VideoFormat format = cricket::VideoFormat(
117 dimension.width, dimension.height,
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12);
120 supportedFormats.insert(format);
121 }
122
123 return supportedFormats;
124 }
125
126 // Sets device format for the provided capture device. Returns YES/NO depending on success.
127 // TODO(denicija): When this file is split this static method should be reconsid ered.
128 // Perhaps adding a category on AVCaptureDevice would be better.
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
130 AVCaptureSession *session,
131 const cricket::VideoFormat &format) {
132 AVCaptureDeviceFormat *deviceFormat =
133 GetDeviceFormatForVideoFormat(device, format);
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
135
136 NSError *error = nil;
137 BOOL success = YES;
138 [session beginConfiguration];
139 if ([device lockForConfiguration:&error]) {
140 @try {
141 device.activeFormat = deviceFormat;
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
143 } @catch (NSException *exception) {
144 RTCLogError(
145 @"Failed to set active format!\n User info:%@",
146 exception.userInfo);
147 success = NO;
148 }
149
150 [device unlockForConfiguration];
151 } else {
152 RTCLogError(
153 @"Failed to lock device %@. Error: %@",
154 device, error.userInfo);
155 success = NO;
156 }
157 [session commitConfiguration];
158
159 return success;
160 }
161
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
164 // because other webrtc objects own cricket::VideoCapturer, which is not
165 // ref counted. To prevent bad behavior we do not expose this class directly.
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject
167 <AVCaptureVideoDataOutputSampleBufferDelegate>
168
169 @property(nonatomic, readonly) AVCaptureSession *captureSession;
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
171 @property(nonatomic, readonly) BOOL canUseBackCamera;
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning.
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
175
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
177 // when we receive frames. This is safe because this object should be owned by
178 // it.
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
180 - (AVCaptureDevice *)getActiveCaptureDevice;
181
182 - (nullable AVCaptureDevice *)frontCaptureDevice;
183 - (nullable AVCaptureDevice *)backCaptureDevice;
184
185 // Starts and stops the capture session asynchronously. We cannot do this
186 // synchronously without blocking a WebRTC thread.
187 - (void)start;
188 - (void)stop;
189
190 @end
191 23
192 @implementation RTCAVFoundationVideoCapturerInternal { 24 @implementation RTCAVFoundationVideoCapturerInternal {
193 // Keep pointers to inputs for convenience. 25 // Keep pointers to inputs for convenience.
194 AVCaptureDeviceInput *_frontCameraInput; 26 AVCaptureDeviceInput *_frontCameraInput;
195 AVCaptureDeviceInput *_backCameraInput; 27 AVCaptureDeviceInput *_backCameraInput;
196 AVCaptureVideoDataOutput *_videoDataOutput; 28 AVCaptureVideoDataOutput *_videoDataOutput;
197 // The cricket::VideoCapturer that owns this class. Should never be NULL. 29 // The cricket::VideoCapturer that owns this class. Should never be NULL.
198 webrtc::AVFoundationVideoCapturer *_capturer; 30 webrtc::AVFoundationVideoCapturer *_capturer;
199 webrtc::VideoRotation _rotation; 31 webrtc::VideoRotation _rotation;
200 BOOL _hasRetriedOnFatalError; 32 BOOL _hasRetriedOnFatalError;
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
265 } 97 }
266 98
267 - (AVCaptureSession *)captureSession { 99 - (AVCaptureSession *)captureSession {
268 return _captureSession; 100 return _captureSession;
269 } 101 }
270 102
271 - (AVCaptureDevice *)getActiveCaptureDevice { 103 - (AVCaptureDevice *)getActiveCaptureDevice {
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; 104 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
273 } 105 }
274 106
275 - (AVCaptureDevice *)frontCaptureDevice { 107 - (nullable AVCaptureDevice *)frontCaptureDevice {
276 return _frontCameraInput.device; 108 return _frontCameraInput.device;
277 } 109 }
278 110
279 - (AVCaptureDevice *)backCaptureDevice { 111 - (nullable AVCaptureDevice *)backCaptureDevice {
280 return _backCameraInput.device; 112 return _backCameraInput.device;
281 } 113 }
282 114
283 - (dispatch_queue_t)frameQueue { 115 - (dispatch_queue_t)frameQueue {
284 if (!_frameQueue) { 116 if (!_frameQueue) {
285 _frameQueue = 117 _frameQueue =
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 118 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL);
287 DISPATCH_QUEUE_SERIAL); 119 dispatch_set_target_queue(_frameQueue,
288 dispatch_set_target_queue( 120 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0));
289 _frameQueue,
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
291 } 121 }
292 return _frameQueue; 122 return _frameQueue;
293 } 123 }
294 124
295 // Called from any thread (likely main thread). 125 // Called from any thread (likely main thread).
296 - (BOOL)canUseBackCamera { 126 - (BOOL)canUseBackCamera {
297 return _backCameraInput != nil; 127 return _backCameraInput != nil;
298 } 128 }
299 129
300 // Called from any thread (likely main thread). 130 // Called from any thread (likely main thread).
301 - (BOOL)useBackCamera { 131 - (BOOL)useBackCamera {
302 @synchronized(self) { 132 @synchronized(self) {
303 return _useBackCamera; 133 return _useBackCamera;
304 } 134 }
305 } 135 }
306 136
307 // Called from any thread (likely main thread). 137 // Called from any thread (likely main thread).
308 - (void)setUseBackCamera:(BOOL)useBackCamera { 138 - (void)setUseBackCamera:(BOOL)useBackCamera {
309 if (!self.canUseBackCamera) { 139 if (!self.canUseBackCamera) {
310 if (useBackCamera) { 140 if (useBackCamera) {
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" 141 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
312 "not switching."); 142 "not switching.");
313 } 143 }
314 return; 144 return;
315 } 145 }
316 @synchronized(self) { 146 @synchronized(self) {
317 if (_useBackCamera == useBackCamera) { 147 if (_useBackCamera == useBackCamera) {
318 return; 148 return;
319 } 149 }
320 _useBackCamera = useBackCamera; 150 _useBackCamera = useBackCamera;
321 [self updateSessionInputForUseBackCamera:useBackCamera]; 151 [self updateSessionInputForUseBackCamera:useBackCamera];
322 } 152 }
323 } 153 }
324 154
325 // Called from WebRTC thread. 155 // Called from WebRTC thread.
326 - (void)start { 156 - (void)start {
327 if (self.hasStarted) { 157 if (self.hasStarted) {
328 return; 158 return;
329 } 159 }
330 self.hasStarted = YES; 160 self.hasStarted = YES;
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 161 [RTCDispatcher
332 block:^{ 162 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
163 block:^{
333 #if TARGET_OS_IPHONE 164 #if TARGET_OS_IPHONE
334 // Default to portrait orientation on iPhone. This will be reset in 165 // Default to portrait orientation on iPhone. This will be reset in
335 // updateOrientation unless orientation is unknown/faceup/facedown. 166 // updateOrientation unless orientation is unknown/faceu p/facedown.
336 _rotation = webrtc::kVideoRotation_90; 167 _rotation = webrtc::kVideoRotation_90;
337 #else 168 #else
338 // No rotation on Mac. 169 // No rotation on Mac.
339 _rotation = webrtc::kVideoRotation_0; 170 _rotation = webrtc::kVideoRotation_0;
340 #endif 171 #endif
341 [self updateOrientation]; 172 [self updateOrientation];
342 #if TARGET_OS_IPHONE 173 #if TARGET_OS_IPHONE
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; 174 [[UIDevice currentDevice] beginGeneratingDeviceOrientati onNotifications];
344 #endif 175 #endif
345 AVCaptureSession *captureSession = self.captureSession; 176 AVCaptureSession *captureSession = self.captureSession;
346 [captureSession startRunning]; 177 [captureSession startRunning];
347 }]; 178 }];
348 } 179 }
349 180
350 // Called from same thread as start. 181 // Called from same thread as start.
351 - (void)stop { 182 - (void)stop {
352 if (!self.hasStarted) { 183 if (!self.hasStarted) {
353 return; 184 return;
354 } 185 }
355 self.hasStarted = NO; 186 self.hasStarted = NO;
356 // Due to this async block, it's possible that the ObjC object outlives the 187 // Due to this async block, it's possible that the ObjC object outlives the
357 // C++ one. In order to not invoke functions on the C++ object, we set 188 // C++ one. In order to not invoke functions on the C++ object, we set
358 // hasStarted immediately instead of dispatching it async. 189 // hasStarted immediately instead of dispatching it async.
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 190 [RTCDispatcher
360 block:^{ 191 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; 192 block:^{
362 [_captureSession stopRunning]; 193 [_videoDataOutput setSampleBufferDelegate:nil queue:null ptr];
194 [_captureSession stopRunning];
363 #if TARGET_OS_IPHONE 195 #if TARGET_OS_IPHONE
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; 196 [[UIDevice currentDevice] endGeneratingDeviceOrientation Notifications];
365 #endif 197 #endif
366 }]; 198 }];
367 } 199 }
368 200
369 #pragma mark iOS notifications 201 #pragma mark iOS notifications
370 202
371 #if TARGET_OS_IPHONE 203 #if TARGET_OS_IPHONE
372 - (void)deviceOrientationDidChange:(NSNotification *)notification { 204 - (void)deviceOrientationDidChange:(NSNotification *)notification {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 205 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{ 206 block:^{
375 [self updateOrientation]; 207 [self updateOrientation];
376 }]; 208 }];
377 } 209 }
378 #endif 210 #endif
379 211
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate 212 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
381 213
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput 214 - (void)captureOutput:(AVCaptureOutput *)captureOutput
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 215 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
384 fromConnection:(AVCaptureConnection *)connection { 216 fromConnection:(AVCaptureConnection *)connection {
385 NSParameterAssert(captureOutput == _videoDataOutput); 217 NSParameterAssert(captureOutput == _videoDataOutput);
386 if (!self.hasStarted) { 218 if (!self.hasStarted) {
387 return; 219 return;
388 } 220 }
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); 221 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
390 } 222 }
391 223
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput 224 - (void)captureOutput:(AVCaptureOutput *)captureOutput
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer 225 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
394 fromConnection:(AVCaptureConnection *)connection { 226 fromConnection:(AVCaptureConnection *)connection {
395 RTCLogError(@"Dropped sample buffer."); 227 RTCLogError(@"Dropped sample buffer.");
396 } 228 }
397 229
398 #pragma mark - AVCaptureSession notifications 230 #pragma mark - AVCaptureSession notifications
399 231
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { 232 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
401 NSString *reasonString = nil; 233 NSString *reasonString = nil;
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \ 234 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 235 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
404 NSNumber *reason = 236 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey ];
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
406 if (reason) { 237 if (reason) {
407 switch (reason.intValue) { 238 switch (reason.intValue) {
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground : 239 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground :
409 reasonString = @"VideoDeviceNotAvailableInBackground"; 240 reasonString = @"VideoDeviceNotAvailableInBackground";
410 break; 241 break;
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: 242 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
412 reasonString = @"AudioDeviceInUseByAnotherClient"; 243 reasonString = @"AudioDeviceInUseByAnotherClient";
413 break; 244 break;
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: 245 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
415 reasonString = @"VideoDeviceInUseByAnotherClient"; 246 reasonString = @"VideoDeviceInUseByAnotherClient";
416 break; 247 break;
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps: 248 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps:
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; 249 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
419 break; 250 break;
420 } 251 }
421 } 252 }
422 #endif 253 #endif
423 RTCLog(@"Capture session interrupted: %@", reasonString); 254 RTCLog(@"Capture session interrupted: %@", reasonString);
424 // TODO(tkchin): Handle this case. 255 // TODO(tkchin): Handle this case.
425 } 256 }
426 257
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { 258 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
428 RTCLog(@"Capture session interruption ended."); 259 RTCLog(@"Capture session interruption ended.");
429 // TODO(tkchin): Handle this case. 260 // TODO(tkchin): Handle this case.
430 } 261 }
431 262
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { 263 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
433 NSError *error = 264 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ;
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
435 RTCLogError(@"Capture session runtime error: %@", error); 265 RTCLogError(@"Capture session runtime error: %@", error);
436 266
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 267 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
438 block:^{ 268 block:^{
439 #if TARGET_OS_IPHONE 269 #if TARGET_OS_IPHONE
440 if (error.code == AVErrorMediaServicesWereReset) { 270 if (error.code == AVErrorMediaServicesWereRes et) {
441 [self handleNonFatalError]; 271 [self handleNonFatalError];
442 } else { 272 } else {
443 [self handleFatalError]; 273 [self handleFatalError];
444 } 274 }
445 #else 275 #else
446 [self handleFatalError]; 276 [self handleFatalError];
447 #endif 277 #endif
448 }]; 278 }];
449 } 279 }
450 280
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { 281 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
452 RTCLog(@"Capture session started."); 282 RTCLog(@"Capture session started.");
453 283
454 self.isRunning = YES; 284 self.isRunning = YES;
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 285 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
456 block:^{ 286 block:^{
457 // If we successfully restarted after an unknown error, allow future 287 // If we successfully restarted after an unkn own error,
458 // retries on fatal errors. 288 // allow future retries on fatal errors.
459 _hasRetriedOnFatalError = NO; 289 _hasRetriedOnFatalError = NO;
460 }]; 290 }];
461 } 291 }
462 292
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { 293 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
464 RTCLog(@"Capture session stopped."); 294 RTCLog(@"Capture session stopped.");
465 self.isRunning = NO; 295 self.isRunning = NO;
466 } 296 }
467 297
468 - (void)handleFatalError { 298 - (void)handleFatalError {
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 299 [RTCDispatcher
470 block:^{ 300 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
471 if (!_hasRetriedOnFatalError) { 301 block:^{
472 RTCLogWarning(@"Attempting to recover from fatal capture error."); 302 if (!_hasRetriedOnFatalError) {
473 [self handleNonFatalError]; 303 RTCLogWarning(@"Attempting to recover from fatal captu re error.");
474 _hasRetriedOnFatalError = YES; 304 [self handleNonFatalError];
475 } else { 305 _hasRetriedOnFatalError = YES;
476 RTCLogError(@"Previous fatal error recovery failed."); 306 } else {
477 } 307 RTCLogError(@"Previous fatal error recovery failed.");
478 }]; 308 }
309 }];
479 } 310 }
480 311
481 - (void)handleNonFatalError { 312 - (void)handleNonFatalError {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 313 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{ 314 block:^{
484 if (self.hasStarted) { 315 if (self.hasStarted) {
485 RTCLog(@"Restarting capture session after error."); 316 RTCLog(@"Restarting capture session after e rror.");
486 [self.captureSession startRunning]; 317 [self.captureSession startRunning];
487 } 318 }
488 }]; 319 }];
489 } 320 }
490 321
491 #if TARGET_OS_IPHONE 322 #if TARGET_OS_IPHONE
492 323
493 #pragma mark - UIApplication notifications 324 #pragma mark - UIApplication notifications
494 325
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { 326 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 327 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
497 block:^{ 328 block:^{
498 if (self.hasStarted && !self.captureSession.isRunning) { 329 if (self.hasStarted && !self.captureSession.i sRunning) {
499 RTCLog(@"Restarting capture session on active."); 330 RTCLog(@"Restarting capture session on acti ve.");
500 [self.captureSession startRunning]; 331 [self.captureSession startRunning];
501 } 332 }
502 }]; 333 }];
503 } 334 }
504 335
505 #endif // TARGET_OS_IPHONE 336 #endif // TARGET_OS_IPHONE
506 337
507 #pragma mark - Private 338 #pragma mark - Private
508 339
509 - (BOOL)setupCaptureSession { 340 - (BOOL)setupCaptureSession {
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; 341 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
511 #if defined(WEBRTC_IOS) 342 #if defined(WEBRTC_IOS)
512 captureSession.usesApplicationAudioSession = NO; 343 captureSession.usesApplicationAudioSession = NO;
(...skipping 14 matching lines...) Expand all
527 RTCLogError(@"No front camera for capture session."); 358 RTCLogError(@"No front camera for capture session.");
528 return NO; 359 return NO;
529 } 360 }
530 361
531 // Add the inputs. 362 // Add the inputs.
532 if (![captureSession canAddInput:frontCameraInput] || 363 if (![captureSession canAddInput:frontCameraInput] ||
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { 364 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
534 RTCLogError(@"Session does not support capture inputs."); 365 RTCLogError(@"Session does not support capture inputs.");
535 return NO; 366 return NO;
536 } 367 }
537 AVCaptureDeviceInput *input = self.useBackCamera ? 368 AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCame raInput;
538 backCameraInput : frontCameraInput;
539 [captureSession addInput:input]; 369 [captureSession addInput:input];
540 370
541 _captureSession = captureSession; 371 _captureSession = captureSession;
542 return YES; 372 return YES;
543 } 373 }
544 374
545 - (AVCaptureVideoDataOutput *)videoDataOutput { 375 - (AVCaptureVideoDataOutput *)videoDataOutput {
546 if (!_videoDataOutput) { 376 if (!_videoDataOutput) {
547 // Make the capturer output NV12. Ideally we want I420 but that's not 377 // Make the capturer output NV12. Ideally we want I420 but that's not
548 // currently supported on iPhone / iPad. 378 // currently supported on iPhone / iPad.
549 AVCaptureVideoDataOutput *videoDataOutput = 379 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc ] init];
550 [[AVCaptureVideoDataOutput alloc] init];
551 videoDataOutput.videoSettings = @{ 380 videoDataOutput.videoSettings = @{
552 (NSString *)kCVPixelBufferPixelFormatTypeKey : 381 (NSString *)
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) 382 //TODO(denicija): Remove this color conversion and use the original captur e format directly.
magjed_webrtc 2016/11/21 13:57:52 nit: Add space between // and TODO.
daniela-webrtc 2016/11/21 16:29:50 Done.
383 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar FullRange)
554 }; 384 };
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; 385 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; 386 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
557 _videoDataOutput = videoDataOutput; 387 _videoDataOutput = videoDataOutput;
558 } 388 }
559 return _videoDataOutput; 389 return _videoDataOutput;
560 } 390 }
561 391
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition: 392 - (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)posi tion {
563 (AVCaptureDevicePosition)position { 393 for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:A VMediaTypeVideo]) {
564 for (AVCaptureDevice *captureDevice in
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
566 if (captureDevice.position == position) { 394 if (captureDevice.position == position) {
567 return captureDevice; 395 return captureDevice;
568 } 396 }
569 } 397 }
570 return nil; 398 return nil;
571 } 399 }
572 400
573 - (AVCaptureDeviceInput *)frontCameraInput { 401 - (AVCaptureDeviceInput *)frontCameraInput {
574 if (!_frontCameraInput) { 402 if (!_frontCameraInput) {
575 #if TARGET_OS_IPHONE 403 #if TARGET_OS_IPHONE
576 AVCaptureDevice *frontCameraDevice = 404 AVCaptureDevice *frontCameraDevice =
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; 405 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
578 #else 406 #else
579 AVCaptureDevice *frontCameraDevice = 407 AVCaptureDevice *frontCameraDevice =
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 408 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
581 #endif 409 #endif
582 if (!frontCameraDevice) { 410 if (!frontCameraDevice) {
583 RTCLogWarning(@"Failed to find front capture device."); 411 RTCLogWarning(@"Failed to find front capture device.");
584 return nil; 412 return nil;
585 } 413 }
586 NSError *error = nil; 414 NSError *error = nil;
587 AVCaptureDeviceInput *frontCameraInput = 415 AVCaptureDeviceInput *frontCameraInput =
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice 416 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&err or];
589 error:&error];
590 if (!frontCameraInput) { 417 if (!frontCameraInput) {
591 RTCLogError(@"Failed to create front camera input: %@", 418 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes cription);
592 error.localizedDescription);
593 return nil; 419 return nil;
594 } 420 }
595 _frontCameraInput = frontCameraInput; 421 _frontCameraInput = frontCameraInput;
596 } 422 }
597 return _frontCameraInput; 423 return _frontCameraInput;
598 } 424 }
599 425
600 - (AVCaptureDeviceInput *)backCameraInput { 426 - (AVCaptureDeviceInput *)backCameraInput {
601 if (!_backCameraInput) { 427 if (!_backCameraInput) {
602 AVCaptureDevice *backCameraDevice = 428 AVCaptureDevice *backCameraDevice =
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; 429 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
604 if (!backCameraDevice) { 430 if (!backCameraDevice) {
605 RTCLogWarning(@"Failed to find front capture device."); 431 RTCLogWarning(@"Failed to find front capture device.");
606 return nil; 432 return nil;
607 } 433 }
608 NSError *error = nil; 434 NSError *error = nil;
609 AVCaptureDeviceInput *backCameraInput = 435 AVCaptureDeviceInput *backCameraInput =
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice 436 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&erro r];
611 error:&error];
612 if (!backCameraInput) { 437 if (!backCameraInput) {
613 RTCLogError(@"Failed to create front camera input: %@", 438 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes cription);
614 error.localizedDescription);
615 return nil; 439 return nil;
616 } 440 }
617 _backCameraInput = backCameraInput; 441 _backCameraInput = backCameraInput;
618 } 442 }
619 return _backCameraInput; 443 return _backCameraInput;
620 } 444 }
621 445
622 // Called from capture session queue. 446 // Called from capture session queue.
623 - (void)updateOrientation { 447 - (void)updateOrientation {
624 #if TARGET_OS_IPHONE 448 #if TARGET_OS_IPHONE
625 switch ([UIDevice currentDevice].orientation) { 449 switch ([UIDevice currentDevice].orientation) {
626 case UIDeviceOrientationPortrait: 450 case UIDeviceOrientationPortrait:
627 _rotation = webrtc::kVideoRotation_90; 451 _rotation = webrtc::kVideoRotation_90;
628 break; 452 break;
629 case UIDeviceOrientationPortraitUpsideDown: 453 case UIDeviceOrientationPortraitUpsideDown:
630 _rotation = webrtc::kVideoRotation_270; 454 _rotation = webrtc::kVideoRotation_270;
631 break; 455 break;
632 case UIDeviceOrientationLandscapeLeft: 456 case UIDeviceOrientationLandscapeLeft:
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 457 _rotation =
634 : webrtc::kVideoRotation_180; 458 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 : webrtc::kVi deoRotation_180;
635 break; 459 break;
636 case UIDeviceOrientationLandscapeRight: 460 case UIDeviceOrientationLandscapeRight:
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 461 _rotation =
638 : webrtc::kVideoRotation_0; 462 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 : webrtc::k VideoRotation_0;
639 break; 463 break;
640 case UIDeviceOrientationFaceUp: 464 case UIDeviceOrientationFaceUp:
641 case UIDeviceOrientationFaceDown: 465 case UIDeviceOrientationFaceDown:
642 case UIDeviceOrientationUnknown: 466 case UIDeviceOrientationUnknown:
643 // Ignore. 467 // Ignore.
644 break; 468 break;
645 } 469 }
646 #endif 470 #endif
647 } 471 }
648 472
649 // Update the current session input to match what's stored in _useBackCamera. 473 // Update the current session input to match what's stored in _useBackCamera.
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { 474 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 475 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
652 block:^{ 476 block:^{
653 [_captureSession beginConfiguration]; 477 [_captureSession beginConfiguration];
654 AVCaptureDeviceInput *oldInput = _backCameraInput; 478 AVCaptureDeviceInput *oldInput = _backCameraI nput;
655 AVCaptureDeviceInput *newInput = _frontCameraInput; 479 AVCaptureDeviceInput *newInput = _frontCamera Input;
656 if (useBackCamera) { 480 if (useBackCamera) {
657 oldInput = _frontCameraInput; 481 oldInput = _frontCameraInput;
658 newInput = _backCameraInput; 482 newInput = _backCameraInput;
659 } 483 }
660 if (oldInput) { 484 if (oldInput) {
661 // Ok to remove this even if it's not attached. Will be no-op. 485 // Ok to remove this even if it's not attac hed. Will be no-op.
662 [_captureSession removeInput:oldInput]; 486 [_captureSession removeInput:oldInput];
663 } 487 }
664 if (newInput) { 488 if (newInput) {
665 [_captureSession addInput:newInput]; 489 [_captureSession addInput:newInput];
666 } 490 }
667 [self updateOrientation]; 491 [self updateOrientation];
668 AVCaptureDevice *newDevice = newInput.device; 492 AVCaptureDevice *newDevice = newInput.device;
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat(); 493 const cricket::VideoFormat *format =
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format); 494 _capturer->GetCaptureFormat();
magjed_webrtc 2016/11/21 13:57:52 nit: The indentation here should be 4 spaces.
daniela-webrtc 2016/11/21 16:29:50 All these classes use 2 space indentation througho
magjed_webrtc 2016/11/21 16:55:35 No, 2 spaces is used for indenting after e.g. an i
671 [_captureSession commitConfiguration]; 495 [RTCAVFoundationFormatMapper setFormat:*forma t
672 }]; 496 forCaptureDevice:newDev ice
497 captureSession:_captu reSession];
498 [_captureSession commitConfiguration];
499 }];
673 } 500 }
674 501
675 @end 502 @end
676 503
677 namespace webrtc { 504 NS_ASSUME_NONNULL_END
678
679 enum AVFoundationVideoCapturerMessageType : uint32_t {
680 kMessageTypeFrame,
681 };
682
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
684 _capturer =
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
686
687 std::set<cricket::VideoFormat> front_camera_video_formats =
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
689
690 std::set<cricket::VideoFormat> back_camera_video_formats =
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
692
693 std::vector<cricket::VideoFormat> intersection_video_formats;
694 if (back_camera_video_formats.empty()) {
695 intersection_video_formats.assign(front_camera_video_formats.begin(),
696 front_camera_video_formats.end());
697
698 } else if (front_camera_video_formats.empty()) {
699 intersection_video_formats.assign(back_camera_video_formats.begin(),
700 back_camera_video_formats.end());
701 } else {
702 std::set_intersection(
703 front_camera_video_formats.begin(), front_camera_video_formats.end(),
704 back_camera_video_formats.begin(), back_camera_video_formats.end(),
705 std::back_inserter(intersection_video_formats));
706 }
707 SetSupportedFormats(intersection_video_formats);
708 }
709
710 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
711 _capturer = nil;
712 }
713
714 cricket::CaptureState AVFoundationVideoCapturer::Start(
715 const cricket::VideoFormat& format) {
716 if (!_capturer) {
717 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
718 return cricket::CaptureState::CS_FAILED;
719 }
720 if (_capturer.isRunning) {
721 LOG(LS_ERROR) << "The capturer is already running.";
722 return cricket::CaptureState::CS_FAILED;
723 }
724
725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
726 AVCaptureSession* session = _capturer.captureSession;
727
728 if (!SetFormatForCaptureDevice(device, session, format)) {
729 return cricket::CaptureState::CS_FAILED;
730 }
731
732 SetCaptureFormat(&format);
733 // This isn't super accurate because it takes a while for the AVCaptureSession
734 // to spin up, and this call returns async.
735 // TODO(tkchin): make this better.
736 [_capturer start];
737 SetCaptureState(cricket::CaptureState::CS_RUNNING);
738
739 return cricket::CaptureState::CS_STARTING;
740 }
741
742 void AVFoundationVideoCapturer::Stop() {
743 [_capturer stop];
744 SetCaptureFormat(NULL);
745 }
746
747 bool AVFoundationVideoCapturer::IsRunning() {
748 return _capturer.isRunning;
749 }
750
751 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
752 return _capturer.captureSession;
753 }
754
755 bool AVFoundationVideoCapturer::CanUseBackCamera() const {
756 return _capturer.canUseBackCamera;
757 }
758
759 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
760 _capturer.useBackCamera = useBackCamera;
761 }
762
763 bool AVFoundationVideoCapturer::GetUseBackCamera() const {
764 return _capturer.useBackCamera;
765 }
766
767 void AVFoundationVideoCapturer::CaptureSampleBuffer(
768 CMSampleBufferRef sample_buffer, VideoRotation rotation) {
769 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
770 !CMSampleBufferIsValid(sample_buffer) ||
771 !CMSampleBufferDataIsReady(sample_buffer)) {
772 return;
773 }
774
775 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
776 if (image_buffer == NULL) {
777 return;
778 }
779
780 const int captured_width = CVPixelBufferGetWidth(image_buffer);
781 const int captured_height = CVPixelBufferGetHeight(image_buffer);
782
783 int adapted_width;
784 int adapted_height;
785 int crop_width;
786 int crop_height;
787 int crop_x;
788 int crop_y;
789 int64_t translated_camera_time_us;
790
791 if (!AdaptFrame(captured_width, captured_height,
792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
793 rtc::TimeMicros(), &adapted_width, &adapted_height,
794 &crop_width, &crop_height, &crop_x, &crop_y,
795 &translated_camera_time_us)) {
796 return;
797 }
798
799 rtc::scoped_refptr<VideoFrameBuffer> buffer =
800 new rtc::RefCountedObject<CoreVideoFrameBuffer>(
801 image_buffer,
802 adapted_width, adapted_height,
803 crop_width, crop_height,
804 crop_x, crop_y);
805
806 // Applying rotation is only supported for legacy reasons and performance is
807 // not critical here.
808 if (apply_rotation() && rotation != kVideoRotation_0) {
809 buffer = buffer->NativeToI420Buffer();
810 rtc::scoped_refptr<I420Buffer> rotated_buffer =
811 (rotation == kVideoRotation_180)
812 ? I420Buffer::Create(adapted_width, adapted_height)
813 : I420Buffer::Create(adapted_height, adapted_width);
814 libyuv::I420Rotate(
815 buffer->DataY(), buffer->StrideY(),
816 buffer->DataU(), buffer->StrideU(),
817 buffer->DataV(), buffer->StrideV(),
818 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
819 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
820 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
821 buffer->width(), buffer->height(),
822 static_cast<libyuv::RotationMode>(rotation));
823 buffer = rotated_buffer;
824 }
825
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us),
827 captured_width, captured_height);
828 }
829
830 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698