Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(150)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m

Issue 2964703002: [iOS] Fix incorrectly oriented frames when rapidly switching between cameras. (Closed)
Patch Set: use gate keeper for getting exif. Created 3 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #import <Foundation/Foundation.h>
12
13 #import "WebRTC/RTCCameraVideoCapturer.h"
14 #import "WebRTC/RTCLogging.h"
15 #import "WebRTC/RTCVideoFrameBuffer.h"
16
17 #if TARGET_OS_IPHONE
18 #import "WebRTC/UIDevice+RTCDevice.h"
19 #endif
20
21 #import "RTCDispatcher+Private.h"
22
23 const int64_t kNanosecondsPerSecond = 1000000000;
24
25 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
26 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
27 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
28 }
29
30 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat e>
31 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
32 @end
33
34 @implementation RTCCameraVideoCapturer {
35 AVCaptureVideoDataOutput *_videoDataOutput;
36 AVCaptureSession *_captureSession;
37 AVCaptureDevice *_currentDevice;
38 RTCVideoRotation _rotation;
39 BOOL _hasRetriedOnFatalError;
40 BOOL _isRunning;
41 // Will the session be running once all asynchronous operations have been comp leted?
42 BOOL _willBeRunning;
43 }
44
45 @synthesize frameQueue = _frameQueue;
46 @synthesize captureSession = _captureSession;
47
48 - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
49 if (self = [super initWithDelegate:delegate]) {
50 // Create the capture session and all relevant inputs and outputs. We need
51 // to do this in init because the application may want the capture session
52 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
53 // created here are retained until dealloc and never recreated.
54 if (![self setupCaptureSession]) {
55 return nil;
56 }
57 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
58 #if TARGET_OS_IPHONE
59 [center addObserver:self
60 selector:@selector(deviceOrientationDidChange:)
61 name:UIDeviceOrientationDidChangeNotification
62 object:nil];
63 [center addObserver:self
64 selector:@selector(handleCaptureSessionInterruption:)
65 name:AVCaptureSessionWasInterruptedNotification
66 object:_captureSession];
67 [center addObserver:self
68 selector:@selector(handleCaptureSessionInterruptionEnded:)
69 name:AVCaptureSessionInterruptionEndedNotification
70 object:_captureSession];
71 [center addObserver:self
72 selector:@selector(handleApplicationDidBecomeActive:)
73 name:UIApplicationDidBecomeActiveNotification
74 object:[UIApplication sharedApplication]];
75 #endif
76 [center addObserver:self
77 selector:@selector(handleCaptureSessionRuntimeError:)
78 name:AVCaptureSessionRuntimeErrorNotification
79 object:_captureSession];
80 [center addObserver:self
81 selector:@selector(handleCaptureSessionDidStartRunning:)
82 name:AVCaptureSessionDidStartRunningNotification
83 object:_captureSession];
84 [center addObserver:self
85 selector:@selector(handleCaptureSessionDidStopRunning:)
86 name:AVCaptureSessionDidStopRunningNotification
87 object:_captureSession];
88 }
89 return self;
90 }
91
92 - (void)dealloc {
93 NSAssert(
94 !_willBeRunning,
95 @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to c all stopCapture?");
96 [[NSNotificationCenter defaultCenter] removeObserver:self];
97 }
98
99 + (NSArray<AVCaptureDevice *> *)captureDevices {
100 return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
101 }
102
103 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
104 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArr ay array];
105
106 for (AVCaptureDeviceFormat *format in device.formats) {
107 // Filter out subTypes that we currently don't support in the stack
108 FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.format Description);
109 if (IsMediaSubTypeSupported(mediaSubType)) {
110 [eligibleDeviceFormats addObject:format];
111 }
112 }
113
114 return eligibleDeviceFormats;
115 }
116
117 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
118 format:(AVCaptureDeviceFormat *)format
119 fps:(NSInteger)fps {
120 _willBeRunning = true;
121 [RTCDispatcher
122 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
123 block:^{
124 RTCLogInfo("startCaptureWithDevice %@ @ %zd fps", format, fps);
125
126 #if TARGET_OS_IPHONE
127 [[UIDevice currentDevice] beginGeneratingDeviceOrientation Notifications];
128 #endif
129
130 _currentDevice = device;
131
132 NSError *error = nil;
133 if (![_currentDevice lockForConfiguration:&error]) {
134 RTCLogError(
135 @"Failed to lock device %@. Error: %@", _currentDevi ce, error.userInfo);
136 return;
137 }
138
139 [self reconfigureCaptureSessionInput];
140 [self updateOrientation];
141 [_captureSession startRunning];
142 [self updateDeviceCaptureFormat:format fps:fps];
143 [_currentDevice unlockForConfiguration];
144 _isRunning = true;
145 }];
146 }
147
148 - (void)stopCapture {
149 _willBeRunning = false;
150 [RTCDispatcher
151 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
152 block:^{
153 RTCLogInfo("Stop");
154 _currentDevice = nil;
155 for (AVCaptureDeviceInput *oldInput in [_captureSession.in puts copy]) {
156 [_captureSession removeInput:oldInput];
157 }
158 [_captureSession stopRunning];
159
160 #if TARGET_OS_IPHONE
161 [[UIDevice currentDevice] endGeneratingDeviceOrientationNo tifications];
162 #endif
163 _isRunning = false;
164 }];
165 }
166
167 #pragma mark iOS notifications
168
169 #if TARGET_OS_IPHONE
170 - (void)deviceOrientationDidChange:(NSNotification *)notification {
171 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
172 block:^{
173 [self updateOrientation];
174 }];
175 }
176 #endif
177
178 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
179
180 - (void)captureOutput:(AVCaptureOutput *)captureOutput
181 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
182 fromConnection:(AVCaptureConnection *)connection {
183 NSParameterAssert(captureOutput == _videoDataOutput);
184
185 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) ||
186 !CMSampleBufferDataIsReady(sampleBuffer)) {
187 return;
188 }
189
190 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
191 if (pixelBuffer == nil) {
192 return;
193 }
194
195 RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuff er:pixelBuffer];
196 int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp( sampleBuffer)) *
197 kNanosecondsPerSecond;
198 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuff er
199 rotation:_rotation
200 timeStampNs:timeStampNs] ;
201 [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
202 }
203
204 - (void)captureOutput:(AVCaptureOutput *)captureOutput
205 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
206 fromConnection:(AVCaptureConnection *)connection {
207 RTCLogError(@"Dropped sample buffer.");
208 }
209
210 #pragma mark - AVCaptureSession notifications
211
212 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
213 NSString *reasonString = nil;
214 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
215 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
216 if ([UIDevice isIOS9OrLater]) {
217 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonK ey];
218 if (reason) {
219 switch (reason.intValue) {
220 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackgrou nd:
221 reasonString = @"VideoDeviceNotAvailableInBackground";
222 break;
223 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
224 reasonString = @"AudioDeviceInUseByAnotherClient";
225 break;
226 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
227 reasonString = @"VideoDeviceInUseByAnotherClient";
228 break;
229 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultip leForegroundApps:
230 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
231 break;
232 }
233 }
234 }
235 #endif
236 RTCLog(@"Capture session interrupted: %@", reasonString);
237 }
238
239 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
240 RTCLog(@"Capture session interruption ended.");
241 }
242
243 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
244 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ;
245 RTCLogError(@"Capture session runtime error: %@", error);
246
247 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
248 block:^{
249 #if TARGET_OS_IPHONE
250 if (error.code == AVErrorMediaServicesWereReset ) {
251 [self handleNonFatalError];
252 } else {
253 [self handleFatalError];
254 }
255 #else
256 [self handleFatalError];
257 #endif
258 }];
259 }
260
261 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
262 RTCLog(@"Capture session started.");
263
264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
265 block:^{
266 // If we successfully restarted after an unknow n error,
267 // allow future retries on fatal errors.
268 _hasRetriedOnFatalError = NO;
269 }];
270 }
271
272 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
273 RTCLog(@"Capture session stopped.");
274 }
275
276 - (void)handleFatalError {
277 [RTCDispatcher
278 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
279 block:^{
280 if (!_hasRetriedOnFatalError) {
281 RTCLogWarning(@"Attempting to recover from fatal capture error.");
282 [self handleNonFatalError];
283 _hasRetriedOnFatalError = YES;
284 } else {
285 RTCLogError(@"Previous fatal error recovery failed.");
286 }
287 }];
288 }
289
290 - (void)handleNonFatalError {
291 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
292 block:^{
293 RTCLog(@"Restarting capture session after error .");
294 if (_isRunning) {
295 [_captureSession startRunning];
296 }
297 }];
298 }
299
300 #if TARGET_OS_IPHONE
301
302 #pragma mark - UIApplication notifications
303
304 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
305 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
306 block:^{
307 if (_isRunning && !_captureSession.isRunning) {
308 RTCLog(@"Restarting capture session on active .");
309 [_captureSession startRunning];
310 }
311 }];
312 }
313
314 #endif // TARGET_OS_IPHONE
315
316 #pragma mark - Private
317
318 - (dispatch_queue_t)frameQueue {
319 if (!_frameQueue) {
320 _frameQueue =
321 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL);
322 dispatch_set_target_queue(_frameQueue,
323 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0));
324 }
325 return _frameQueue;
326 }
327
328 - (BOOL)setupCaptureSession {
329 NSAssert(_captureSession == nil, @"Setup capture session called twice.");
330 _captureSession = [[AVCaptureSession alloc] init];
331 #if defined(WEBRTC_IOS)
332 _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
333 _captureSession.usesApplicationAudioSession = NO;
334 #endif
335 [self setupVideoDataOutput];
336 // Add the output.
337 if (![_captureSession canAddOutput:_videoDataOutput]) {
338 RTCLogError(@"Video data output unsupported.");
339 return NO;
340 }
341 [_captureSession addOutput:_videoDataOutput];
342
343 return YES;
344 }
345
346 - (void)setupVideoDataOutput {
347 NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
348 // Make the capturer output NV12. Ideally we want I420 but that's not
349 // currently supported on iPhone / iPad.
350 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
351 videoDataOutput.videoSettings = @{
352 (NSString *)
353 // TODO(denicija): Remove this color conversion and use the original capture format directly.
354 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFu llRange)
355 };
356 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
357 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
358 _videoDataOutput = videoDataOutput;
359 }
360
361 #pragma mark - Private, called inside capture queue
362
363 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger )fps {
364 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
365 @"updateDeviceCaptureFormat must be called on the capture queue.");
366 @try {
367 _currentDevice.activeFormat = format;
368 _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
369 _currentDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
370 } @catch (NSException *exception) {
371 RTCLogError(@"Failed to set active format!\n User info:%@", exception.userIn fo);
372 return;
373 }
374 }
375
376 - (void)reconfigureCaptureSessionInput {
377 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
378 @"reconfigureCaptureSessionInput must be called on the capture queue. ");
379 NSError *error = nil;
380 AVCaptureDeviceInput *input =
381 [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
382 if (!input) {
383 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescr iption);
384 return;
385 }
386 [_captureSession beginConfiguration];
387 for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
388 [_captureSession removeInput:oldInput];
389 }
390 if ([_captureSession canAddInput:input]) {
391 [_captureSession addInput:input];
392 } else {
393 RTCLogError(@"Cannot add camera as an input to the session.");
394 }
395 [_captureSession commitConfiguration];
396 }
397
398 - (void)updateOrientation {
399 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
400 @"updateOrientation must be called on the capture queue.");
401 #if TARGET_OS_IPHONE
402 BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFron t;
403 switch ([UIDevice currentDevice].orientation) {
404 case UIDeviceOrientationPortrait:
405 _rotation = RTCVideoRotation_90;
406 break;
407 case UIDeviceOrientationPortraitUpsideDown:
408 _rotation = RTCVideoRotation_270;
409 break;
410 case UIDeviceOrientationLandscapeLeft:
411 _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
412 break;
413 case UIDeviceOrientationLandscapeRight:
414 _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
415 break;
416 case UIDeviceOrientationFaceUp:
417 case UIDeviceOrientationFaceDown:
418 case UIDeviceOrientationUnknown:
419 // Ignore.
420 break;
421 }
422 #endif
423 }
424
425 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698