Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1088)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.mm

Issue 2988783002: Removed file RTCCameraVideoCapturer.mm that isn't needed (Closed)
Patch Set: Added changes from review of 2964703002 after commit. Created 3 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #import <Foundation/Foundation.h>
12
13 #import "WebRTC/RTCCameraVideoCapturer.h"
14 #import "WebRTC/RTCLogging.h"
15 #import "WebRTC/RTCVideoFrameBuffer.h"
16
17 #if TARGET_OS_IPHONE
18 #import "WebRTC/UIDevice+RTCDevice.h"
19 #endif
20
21 #import "AVCaptureSession+Device.h"
22 #import "RTCDispatcher+Private.h"
23
24 const int64_t kNanosecondsPerSecond = 1000000000;
25
26 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
27 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
28 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
29 }
30
31 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e>
32 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
33 @end
34
35 @implementation RTCCameraVideoCapturer {
36 AVCaptureVideoDataOutput *_videoDataOutput;
37 AVCaptureSession *_captureSession;
38 AVCaptureDevice *_currentDevice;
39 BOOL _hasRetriedOnFatalError;
40 BOOL _isRunning;
41 // Will the session be running once all asynchronous operations have been comp leted?
42 BOOL _willBeRunning;
43 #if TARGET_OS_IPHONE
44 UIDeviceOrientation _orientation;
45 #endif
46 }
47
48 @synthesize frameQueue = _frameQueue;
49 @synthesize captureSession = _captureSession;
50
51 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
52 if (self = [super initWithDelegate:delegate]) {
53 // Create the capture session and all relevant inputs and outputs. We need
54 // to do this in init because the application may want the capture session
55 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
56 // created here are retained until dealloc and never recreated.
57 if (![self setupCaptureSession]) {
58 return nil;
59 }
60 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
61 #if TARGET_OS_IPHONE
62 _orientation = UIDeviceOrientationPortrait;
63 [center addObserver:self
64 selector:@selector(deviceOrientationDidChange:)
65 name:UIDeviceOrientationDidChangeNotification
66 object:nil];
67 [center addObserver:self
68 selector:@selector(handleCaptureSessionInterruption:)
69 name:AVCaptureSessionWasInterruptedNotification
70 object:_captureSession];
71 [center addObserver:self
72 selector:@selector(handleCaptureSessionInterruptionEnded:)
73 name:AVCaptureSessionInterruptionEndedNotification
74 object:_captureSession];
75 [center addObserver:self
76 selector:@selector(handleApplicationDidBecomeActive:)
77 name:UIApplicationDidBecomeActiveNotification
78 object:[UIApplication sharedApplication]];
79 #endif
80 [center addObserver:self
81 selector:@selector(handleCaptureSessionRuntimeError:)
82 name:AVCaptureSessionRuntimeErrorNotification
83 object:_captureSession];
84 [center addObserver:self
85 selector:@selector(handleCaptureSessionDidStartRunning:)
86 name:AVCaptureSessionDidStartRunningNotification
87 object:_captureSession];
88 [center addObserver:self
89 selector:@selector(handleCaptureSessionDidStopRunning:)
90 name:AVCaptureSessionDidStopRunningNotification
91 object:_captureSession];
92 }
93 return self;
94 }
95
96 - (void)dealloc {
97 NSAssert(
98 !_willBeRunning,
99 @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to c all stopCapture?");
100 [[NSNotificationCenter defaultCenter] removeObserver:self];
101 }
102
103 + (NSArray<AVCaptureDevice *> *)captureDevices {
104 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
105 }
106
107 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
108 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array];
109
110 for (AVCaptureDeviceFormat *format in device.formats) {
111 // Filter out subTypes that we currently don't support in the stack
112 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description);
113 if (IsMediaSubTypeSupported(mediaSubType)) {
114 [eligibleDeviceFormats addObject:format];
115 }
116 }
117
118 return eligibleDeviceFormats;
119 }
120
121 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
122 format:(AVCaptureDeviceFormat *)format
123 fps:(NSInteger)fps {
124 _willBeRunning = YES;
125 [RTCDispatcher
126 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
127 block:^{
128 RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
129
130 #if TARGET_OS_IPHONE
131 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications];
132 #endif
133
134 _currentDevice = device;
135
136 NSError *error = nil;
137 if (![_currentDevice lockForConfiguration:&error]) {
138 RTCLogError(
139 @"Failed to lock device %@. Error: %@", _currentDevi ce, error.userInfo);
140 return;
141 }
142 [self reconfigureCaptureSessionInput];
143 [self updateOrientation];
144 [_captureSession startRunning];
145 [self updateDeviceCaptureFormat:format fps:fps];
146 [_currentDevice unlockForConfiguration];
147 _isRunning = YES;
148 }];
149 }
150
151 - (void)stopCapture {
152 _willBeRunning = NO;
153 [RTCDispatcher
154 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
155 block:^{
156 RTCLogInfo("Stop");
157 _currentDevice = nil;
158 for (AVCaptureDeviceInput *oldInput in [_captureSession.in puts copy]) {
159 [_captureSession removeInput:oldInput];
160 }
161 [_captureSession stopRunning];
162
163 #if TARGET_OS_IPHONE
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications];
165 #endif
166 _isRunning = NO;
167 }];
168 }
169
170 #pragma mark iOS notifications
171
172 #if TARGET_OS_IPHONE
173 - (void)deviceOrientationDidChange:(NSNotification *)notification {
174 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
175 block:^{
176 [self updateOrientation];
177 }];
178 }
179 #endif
180
181 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
182
183 - (void)captureOutput:(AVCaptureOutput *)captureOutput
184 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
185 fromConnection:(AVCaptureConnection *)connection {
186 NSParameterAssert(captureOutput == _videoDataOutput);
187
188 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) ||
189 !CMSampleBufferDataIsReady(sampleBuffer)) {
190 return;
191 }
192
193 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
194 if (pixelBuffer == nil) {
195 return;
196 }
197
198 #if TARGET_OS_IPHONE
199 // Default to portrait orientation on iPhone.
200 RTCVideoRotation rotation = RTCVideoRotation_90;
201 BOOL usingFrontCamera;
202 // Check the image's EXIF for the camera the image came from as the image coul d have been
203 // delayed as we set alwaysDiscardsLateVideoFrames to NO.
204 AVCaptureDevicePosition cameraPosition =
205 [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
206 if (cameraPosition != AVCaptureDevicePositionUnspecified) {
207 usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
208 } else {
209 AVCaptureDeviceInput *deviceInput =
210 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.fir stObject).input;
211 usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.positi on;
212 }
213
214 switch (_orientation) {
215 case UIDeviceOrientationPortrait:
216 rotation = RTCVideoRotation_90;
217 break;
218 case UIDeviceOrientationPortraitUpsideDown:
219 rotation = RTCVideoRotation_270;
220 break;
221 case UIDeviceOrientationLandscapeLeft:
222 rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
223 break;
224 case UIDeviceOrientationLandscapeRight:
225 rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
226 break;
227 case UIDeviceOrientationFaceUp:
228 case UIDeviceOrientationFaceDown:
229 case UIDeviceOrientationUnknown:
230 // Ignore.
231 break;
232 }
233 #else
234 // No rotation on Mac.
235 RTCVideoRotation rotation = RTCVideoRotation_0;
236 #endif
237
238 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff er:pixelBuffer];
239 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) *
240 kNanosecondsPerSecond;
241 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff er
242 rotation:rotation
243 timeStampNs:timeStampNs] ;
244 [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
245 }
246
247 - (void)captureOutput:(AVCaptureOutput *)captureOutput
248 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
249 fromConnection:(AVCaptureConnection *)connection {
250 RTCLogError(@"Dropped sample buffer.");
251 }
252
253 #pragma mark - AVCaptureSession notifications
254
255 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
256 NSString *reasonString = nil;
257 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
258 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
259 if ([UIDevice isIOS9OrLater]) {
260 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey];
261 if (reason) {
262 switch (reason.intValue) {
263 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd:
264 reasonString = @"VideoDeviceNotAvailableInBackground";
265 break;
266 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
267 reasonString = @"AudioDeviceInUseByAnotherClient";
268 break;
269 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
270 reasonString = @"VideoDeviceInUseByAnotherClient";
271 break;
272 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps:
273 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
274 break;
275 }
276 }
277 }
278 #endif
279 RTCLog(@"Capture session interrupted: %@", reasonString);
280 }
281
282 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
283 RTCLog(@"Capture session interruption ended.");
284 }
285
286 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
287 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ;
288 RTCLogError(@"Capture session runtime error: %@", error);
289
290 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
291 block:^{
292 #if TARGET_OS_IPHONE
293 if (error.code == AVErrorMediaServicesWereReset ) {
294 [self handleNonFatalError];
295 } else {
296 [self handleFatalError];
297 }
298 #else
299 [self handleFatalError];
300 #endif
301 }];
302 }
303
304 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
305 RTCLog(@"Capture session started.");
306
307 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
308 block:^{
309 // If we successfully restarted after an unknow n error,
310 // allow future retries on fatal errors.
311 _hasRetriedOnFatalError = NO;
312 }];
313 }
314
315 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
316 RTCLog(@"Capture session stopped.");
317 }
318
319 - (void)handleFatalError {
320 [RTCDispatcher
321 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
322 block:^{
323 if (!_hasRetriedOnFatalError) {
324 RTCLogWarning(@"Attempting to recover from fatal capture error.");
325 [self handleNonFatalError];
326 _hasRetriedOnFatalError = YES;
327 } else {
328 RTCLogError(@"Previous fatal error recovery failed.");
329 }
330 }];
331 }
332
333 - (void)handleNonFatalError {
334 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
335 block:^{
336 RTCLog(@"Restarting capture session after error .");
337 if (_isRunning) {
338 [_captureSession startRunning];
339 }
340 }];
341 }
342
343 #if TARGET_OS_IPHONE
344
345 #pragma mark - UIApplication notifications
346
347 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
348 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
349 block:^{
350 if (_isRunning && !_captureSession.isRunning) {
351 RTCLog(@"Restarting capture session on active .");
352 [_captureSession startRunning];
353 }
354 }];
355 }
356
357 #endif // TARGET_OS_IPHONE
358
359 #pragma mark - Private
360
361 - (dispatch_queue_t)frameQueue {
362 if (!_frameQueue) {
363 _frameQueue =
364 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL);
365 dispatch_set_target_queue(_frameQueue,
366 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0));
367 }
368 return _frameQueue;
369 }
370
371 - (BOOL)setupCaptureSession {
372 NSAssert(_captureSession == nil, @"Setup capture session called twice.");
373 _captureSession = [[AVCaptureSession alloc] init];
374 #if defined(WEBRTC_IOS)
375 _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
376 _captureSession.usesApplicationAudioSession = NO;
377 #endif
378 [self setupVideoDataOutput];
379 // Add the output.
380 if (![_captureSession canAddOutput:_videoDataOutput]) {
381 RTCLogError(@"Video data output unsupported.");
382 return NO;
383 }
384 [_captureSession addOutput:_videoDataOutput];
385
386 return YES;
387 }
388
389 - (void)setupVideoDataOutput {
390 NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
391 // Make the capturer output NV12. Ideally we want I420 but that's not
392 // currently supported on iPhone / iPad.
393 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
394 videoDataOutput.videoSettings = @{
395 (NSString *)
396 // TODO(denicija): Remove this color conversion and use the original capture format directly.
397 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu llRange)
398 };
399 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
400 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
401 _videoDataOutput = videoDataOutput;
402 }
403
404 #pragma mark - Private, called inside capture queue
405
406 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger )fps {
407 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
408 @"updateDeviceCaptureFormat must be called on the capture queue.");
409 @try {
410 _currentDevice.activeFormat = format;
411 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
412 _currentDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
413 } @catch (NSException *exception) {
414 RTCLogError(@"Failed to set active format!\n User info:%@", exception.userIn fo);
415 return;
416 }
417 }
418
419 - (void)reconfigureCaptureSessionInput {
420 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
421 @"reconfigureCaptureSessionInput must be called on the capture queue. ");
422 NSError *error = nil;
423 AVCaptureDeviceInput *input =
424 [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
425 if (!input) {
426 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescr iption);
427 return;
428 }
429 [_captureSession beginConfiguration];
430 for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
431 [_captureSession removeInput:oldInput];
432 }
433 if ([_captureSession canAddInput:input]) {
434 [_captureSession addInput:input];
435 } else {
436 RTCLogError(@"Cannot add camera as an input to the session.");
437 }
438 [_captureSession commitConfiguration];
439 }
440
441 - (void)updateOrientation {
442 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
443 @"updateOrientation must be called on the capture queue.");
444 #if TARGET_OS_IPHONE
445 _orientation = [UIDevice currentDevice].orientation;
446 #endif
447 }
448
449 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698