Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(32)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/RTCCameraVideoCapturer.m

Issue 2776703002: New RTCCameraVideoCapturer. (Closed)
Patch Set: Make RTCCameraVideoCapturer pure Objective-C. Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/sdk/BUILD.gn ('k') | webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #import <Foundation/Foundation.h>
12
13 #import "WebRTC/RTCCameraVideoCapturer.h"
14 #import "WebRTC/RTCLogging.h"
15
16 #if TARGET_OS_IPHONE
17 #import "WebRTC/UIDevice+RTCDevice.h"
18 #endif
19
20 #import "RTCDispatcher+Private.h"
21
22 const int64_t kNanosecondsPerSecond = 1000000000;
23
24 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
25 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
26 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
27 }
28
29 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e>
30 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
31 @end
32
33 @implementation RTCCameraVideoCapturer {
34 AVCaptureVideoDataOutput *_videoDataOutput;
35 AVCaptureSession *_captureSession;
36 AVCaptureDevice *_currentDevice;
37 RTCVideoRotation _rotation;
38 BOOL _hasRetriedOnFatalError;
39 BOOL _isRunning;
40 // Will the session be running once all asynchronous operations have been comp leted?
41 BOOL _willBeRunning;
42 }
43
44 @synthesize frameQueue = _frameQueue;
45 @synthesize captureSession = _captureSession;
46
47 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
48 if (self = [super initWithDelegate:delegate]) {
49 // Create the capture session and all relevant inputs and outputs. We need
50 // to do this in init because the application may want the capture session
51 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
52 // created here are retained until dealloc and never recreated.
53 if (![self setupCaptureSession]) {
54 return nil;
55 }
56 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
57 #if TARGET_OS_IPHONE
58 [center addObserver:self
59 selector:@selector(deviceOrientationDidChange:)
60 name:UIDeviceOrientationDidChangeNotification
61 object:nil];
62 [center addObserver:self
63 selector:@selector(handleCaptureSessionInterruption:)
64 name:AVCaptureSessionWasInterruptedNotification
65 object:_captureSession];
66 [center addObserver:self
67 selector:@selector(handleCaptureSessionInterruptionEnded:)
68 name:AVCaptureSessionInterruptionEndedNotification
69 object:_captureSession];
70 [center addObserver:self
71 selector:@selector(handleApplicationDidBecomeActive:)
72 name:UIApplicationDidBecomeActiveNotification
73 object:[UIApplication sharedApplication]];
74 #endif
75 [center addObserver:self
76 selector:@selector(handleCaptureSessionRuntimeError:)
77 name:AVCaptureSessionRuntimeErrorNotification
78 object:_captureSession];
79 [center addObserver:self
80 selector:@selector(handleCaptureSessionDidStartRunning:)
81 name:AVCaptureSessionDidStartRunningNotification
82 object:_captureSession];
83 [center addObserver:self
84 selector:@selector(handleCaptureSessionDidStopRunning:)
85 name:AVCaptureSessionDidStopRunningNotification
86 object:_captureSession];
87 }
88 return self;
89 }
90
91 - (void)dealloc {
92 NSAssert(
93 !_willBeRunning,
94 @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to c all stopCapture?");
95 [[NSNotificationCenter defaultCenter] removeObserver:self];
96 }
97
98 + (NSArray<AVCaptureDevice *> *)captureDevices {
99 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
100 }
101
102 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
103 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array];
104
105 for (AVCaptureDeviceFormat *format in device.formats) {
106 // Filter out subTypes that we currently don't support in the stack
107 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description);
108 if (IsMediaSubTypeSupported(mediaSubType)) {
109 [eligibleDeviceFormats addObject:format];
110 }
111 }
112
113 return eligibleDeviceFormats;
114 }
115
116 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
117 format:(AVCaptureDeviceFormat *)format
118 fps:(int)fps {
119 _willBeRunning = true;
120 [RTCDispatcher
121 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
122 block:^{
123 RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, f ps);
124
125 #if TARGET_OS_IPHONE
126 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications];
127 #endif
128
129 _currentDevice = device;
130 [self updateDeviceCaptureFormat:format fps:fps];
131 [self reconfigureCaptureSessionInput];
132 [self updateOrientation];
133 [_captureSession startRunning];
134 [_currentDevice unlockForConfiguration];
135 _isRunning = true;
136 }];
137 }
138
139 - (void)stopCapture {
140 _willBeRunning = false;
141 [RTCDispatcher
142 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
143 block:^{
144 RTCLogInfo("Stop");
145 _currentDevice = nil;
146 for (AVCaptureDeviceInput *oldInput in _captureSession.inp uts) {
147 [_captureSession removeInput:oldInput];
148 }
149 [_captureSession stopRunning];
150
151 #if TARGET_OS_IPHONE
152 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications];
153 #endif
154 _isRunning = false;
155 }];
156 }
157
158 #pragma mark iOS notifications
159
160 #if TARGET_OS_IPHONE
161 - (void)deviceOrientationDidChange:(NSNotification *)notification {
162 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
163 block:^{
164 [self updateOrientation];
165 }];
166 }
167 #endif
168
169 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
170
171 - (void)captureOutput:(AVCaptureOutput *)captureOutput
172 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
173 fromConnection:(AVCaptureConnection *)connection {
174 NSParameterAssert(captureOutput == _videoDataOutput);
175
176 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) ||
177 !CMSampleBufferDataIsReady(sampleBuffer)) {
178 return;
179 }
180
181 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
182 if (pixelBuffer == nil) {
183 return;
184 }
185
186 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) *
187 kNanosecondsPerSecond;
188 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBu ffer
189 rotation:_rotati on
190 timeStampNs:timeSta mpNs];
191 [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
192 }
193
194 - (void)captureOutput:(AVCaptureOutput *)captureOutput
195 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
196 fromConnection:(AVCaptureConnection *)connection {
197 RTCLogError(@"Dropped sample buffer.");
198 }
199
200 #pragma mark - AVCaptureSession notifications
201
202 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
203 NSString *reasonString = nil;
204 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
205 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
206 if ([UIDevice isIOS9OrLater]) {
207 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey];
208 if (reason) {
209 switch (reason.intValue) {
210 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd:
211 reasonString = @"VideoDeviceNotAvailableInBackground";
212 break;
213 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
214 reasonString = @"AudioDeviceInUseByAnotherClient";
215 break;
216 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
217 reasonString = @"VideoDeviceInUseByAnotherClient";
218 break;
219 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps:
220 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
221 break;
222 }
223 }
224 }
225 #endif
226 RTCLog(@"Capture session interrupted: %@", reasonString);
227 // TODO(tkchin): Handle this case.
228 }
229
230 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
231 RTCLog(@"Capture session interruption ended.");
232 // TODO(tkchin): Handle this case.
233 }
234
235 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
236 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ;
237 RTCLogError(@"Capture session runtime error: %@", error);
238
239 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
240 block:^{
241 #if TARGET_OS_IPHONE
242 if (error.code == AVErrorMediaServicesWereReset ) {
243 [self handleNonFatalError];
244 } else {
245 [self handleFatalError];
246 }
247 #else
248 [self handleFatalError];
249 #endif
250 }];
251 }
252
253 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
254 RTCLog(@"Capture session started.");
255
256 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
257 block:^{
258 // If we successfully restarted after an unknow n error,
259 // allow future retries on fatal errors.
260 _hasRetriedOnFatalError = NO;
261 }];
262 }
263
264 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
265 RTCLog(@"Capture session stopped.");
266 }
267
268 - (void)handleFatalError {
269 [RTCDispatcher
270 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
271 block:^{
272 if (!_hasRetriedOnFatalError) {
273 RTCLogWarning(@"Attempting to recover from fatal capture error.");
274 [self handleNonFatalError];
275 _hasRetriedOnFatalError = YES;
276 } else {
277 RTCLogError(@"Previous fatal error recovery failed.");
278 }
279 }];
280 }
281
282 - (void)handleNonFatalError {
283 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
284 block:^{
285 RTCLog(@"Restarting capture session after error .");
286 if (_isRunning) {
287 [_captureSession startRunning];
288 }
289 }];
290 }
291
292 #if TARGET_OS_IPHONE
293
294 #pragma mark - UIApplication notifications
295
296 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
sakal 2017/03/31 07:42:44 This doesn't seem to be needed necessarily but a s
daniela-webrtc 2017/03/31 09:24:20 According to the CL it's a theoretical fix. I thin
magjed_webrtc 2017/03/31 10:57:22 Let's keep it, we can't simplify the rest of the c
297 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
298 block:^{
299 if (_isRunning && !_captureSession.isRunning) {
300 RTCLog(@"Restarting capture session on active .");
301 [_captureSession startRunning];
302 }
303 }];
304 }
305
306 #endif // TARGET_OS_IPHONE
307
308 #pragma mark - Private
309
310 - (dispatch_queue_t)frameQueue {
311 if (!_frameQueue) {
312 _frameQueue =
313 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL);
314 dispatch_set_target_queue(_frameQueue,
315 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0));
316 }
317 return _frameQueue;
318 }
319
320 - (BOOL)setupCaptureSession {
321 NSAssert(_captureSession == nil, @"Setup capture session called twice.");
322 _captureSession = [[AVCaptureSession alloc] init];
323 #if defined(WEBRTC_IOS)
324 _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
325 _captureSession.usesApplicationAudioSession = NO;
326 #endif
327 [self setupVideoDataOutput];
328 // Add the output.
329 if (![_captureSession canAddOutput:_videoDataOutput]) {
330 RTCLogError(@"Video data output unsupported.");
331 return NO;
332 }
333 [_captureSession addOutput:_videoDataOutput];
334
335 return YES;
336 }
337
338 - (void)setupVideoDataOutput {
339 NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
340 // Make the capturer output NV12. Ideally we want I420 but that's not
341 // currently supported on iPhone / iPad.
342 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
343 videoDataOutput.videoSettings = @{
344 (NSString *)
345 // TODO(denicija): Remove this color conversion and use the original capture format directly.
346 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu llRange)
347 };
348 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
349 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
350 _videoDataOutput = videoDataOutput;
351 }
352
353 #pragma mark - Private, called inside capture queue
354
355 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(int)fps {
356 NSError *error = nil;
357 if ([_currentDevice lockForConfiguration:&error]) {
358 @try {
359 _currentDevice.activeFormat = format;
360 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
361 } @catch (NSException *exception) {
362 RTCLogError(@"Failed to set active format!\n User info:%@", exception.user Info);
363 return;
364 }
365 } else {
366 RTCLogError(@"Failed to lock device %@. Error: %@", _currentDevice, error.us erInfo);
367 return;
368 }
369 }
370
371 - (void)reconfigureCaptureSessionInput {
372 NSError *error = nil;
373 AVCaptureDeviceInput *input =
374 [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
375 if (!input) {
376 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescr iption);
377 return;
378 }
379 for (AVCaptureDeviceInput *oldInput in _captureSession.inputs) {
380 [_captureSession removeInput:oldInput];
381 }
382 if ([_captureSession canAddInput:input]) {
383 [_captureSession addInput:input];
384 } else {
385 RTCLogError(@"Cannot add camera as an input to the session.");
386 return;
387 }
388 }
389
390 - (void)updateOrientation {
391 #if TARGET_OS_IPHONE
392 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t;
393 switch ([UIDevice currentDevice].orientation) {
394 case UIDeviceOrientationPortrait:
395 _rotation = RTCVideoRotation_90;
396 break;
397 case UIDeviceOrientationPortraitUpsideDown:
398 _rotation = RTCVideoRotation_270;
399 break;
400 case UIDeviceOrientationLandscapeLeft:
401 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
402 break;
403 case UIDeviceOrientationLandscapeRight:
404 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
405 break;
406 case UIDeviceOrientationFaceUp:
407 case UIDeviceOrientationFaceDown:
408 case UIDeviceOrientationUnknown:
409 // Ignore.
410 break;
411 }
412 #endif
413 }
414
415 @end
OLDNEW
« no previous file with comments | « webrtc/sdk/BUILD.gn ('k') | webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698