Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(798)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2488973002: Split avfoundationcapturer classes in separate files. (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #include "avfoundationvideocapturer.h"
12 12
13 #import <AVFoundation/AVFoundation.h> 13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE
16 #import <UIKit/UIKit.h>
17 #endif
18 14
15 #import "RTCAVFoundationVideoCapturerInternal.h"
19 #import "RTCDispatcher+Private.h" 16 #import "RTCDispatcher+Private.h"
20 #import "WebRTC/RTCLogging.h" 17 #import "WebRTC/RTCLogging.h"
21 #if TARGET_OS_IPHONE
22 #import "WebRTC/UIDevice+RTCDevice.h"
23 #endif
24 18
25 #include "libyuv/rotate.h" 19 #include "libyuv/rotate.h"
26 20
27 #include "webrtc/base/bind.h" 21 #include "webrtc/base/bind.h"
28 #include "webrtc/base/checks.h" 22 #include "webrtc/base/checks.h"
29 #include "webrtc/base/logging.h" 23 #include "webrtc/base/logging.h"
30 #include "webrtc/base/thread.h" 24 #include "webrtc/base/thread.h"
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 25 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
32 #include "webrtc/common_video/rotation.h" 26 #include "webrtc/common_video/rotation.h"
33 27
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond), 112 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12); 113 cricket::FOURCC_NV12);
120 supportedFormats.insert(format); 114 supportedFormats.insert(format);
121 } 115 }
122 116
123 return supportedFormats; 117 return supportedFormats;
124 } 118 }
125 119
126 // Sets device format for the provided capture device. Returns YES/NO depending on success. 120 // Sets device format for the provided capture device. Returns YES/NO depending on success.
127 // TODO(denicija): When this file is split this static method should be reconsid ered. 121 // TODO(denicija): When this file is split this static method should be reconsid ered.
128 // Perhaps adding a category on AVCaptureDevice would be better. 122 // Perhaps adding a category on AVCaptureDevice would be better.
kthelgason 2016/11/10 09:28:10 Update TODO, since this file is now split :)
daniela-webrtc 2016/11/11 13:23:10 Done.
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device, 123 BOOL SetFormatForCaptureDevice(AVCaptureDevice* device,
130 AVCaptureSession *session, 124 AVCaptureSession* session,
kthelgason 2016/11/10 09:28:10 Change this back, star should be on the right for
daniela-webrtc 2016/11/11 13:23:10 Done.
131 const cricket::VideoFormat &format) { 125 const cricket::VideoFormat& format) {
132 AVCaptureDeviceFormat *deviceFormat = 126 AVCaptureDeviceFormat *deviceFormat =
133 GetDeviceFormatForVideoFormat(device, format); 127 GetDeviceFormatForVideoFormat(device, format);
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval); 128 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
135 129
136 NSError *error = nil; 130 NSError *error = nil;
137 BOOL success = YES; 131 BOOL success = YES;
138 [session beginConfiguration]; 132 [session beginConfiguration];
139 if ([device lockForConfiguration:&error]) { 133 if ([device lockForConfiguration:&error]) {
140 @try { 134 @try {
141 device.activeFormat = deviceFormat; 135 device.activeFormat = deviceFormat;
(...skipping 10 matching lines...) Expand all
152 RTCLogError( 146 RTCLogError(
153 @"Failed to lock device %@. Error: %@", 147 @"Failed to lock device %@. Error: %@",
154 device, error.userInfo); 148 device, error.userInfo);
155 success = NO; 149 success = NO;
156 } 150 }
157 [session commitConfiguration]; 151 [session commitConfiguration];
158 152
159 return success; 153 return success;
160 } 154 }
161 155
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
164 // because other webrtc objects own cricket::VideoCapturer, which is not
165 // ref counted. To prevent bad behavior we do not expose this class directly.
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject
167 <AVCaptureVideoDataOutputSampleBufferDelegate>
168
169 @property(nonatomic, readonly) AVCaptureSession *captureSession;
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
171 @property(nonatomic, readonly) BOOL canUseBackCamera;
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning.
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
175
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
177 // when we receive frames. This is safe because this object should be owned by
178 // it.
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
180 - (AVCaptureDevice *)getActiveCaptureDevice;
181
182 - (nullable AVCaptureDevice *)frontCaptureDevice;
183 - (nullable AVCaptureDevice *)backCaptureDevice;
184
185 // Starts and stops the capture session asynchronously. We cannot do this
186 // synchronously without blocking a WebRTC thread.
187 - (void)start;
188 - (void)stop;
189
190 @end
191
192 @implementation RTCAVFoundationVideoCapturerInternal {
193 // Keep pointers to inputs for convenience.
194 AVCaptureDeviceInput *_frontCameraInput;
195 AVCaptureDeviceInput *_backCameraInput;
196 AVCaptureVideoDataOutput *_videoDataOutput;
197 // The cricket::VideoCapturer that owns this class. Should never be NULL.
198 webrtc::AVFoundationVideoCapturer *_capturer;
199 webrtc::VideoRotation _rotation;
200 BOOL _hasRetriedOnFatalError;
201 BOOL _isRunning;
202 BOOL _hasStarted;
203 rtc::CriticalSection _crit;
204 }
205
206 @synthesize captureSession = _captureSession;
207 @synthesize frameQueue = _frameQueue;
208 @synthesize useBackCamera = _useBackCamera;
209
210 @synthesize isRunning = _isRunning;
211 @synthesize hasStarted = _hasStarted;
212
213 // This is called from the thread that creates the video source, which is likely
214 // the main thread.
215 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
216 RTC_DCHECK(capturer);
217 if (self = [super init]) {
218 _capturer = capturer;
219 // Create the capture session and all relevant inputs and outputs. We need
220 // to do this in init because the application may want the capture session
221 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
222 // created here are retained until dealloc and never recreated.
223 if (![self setupCaptureSession]) {
224 return nil;
225 }
226 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
227 #if TARGET_OS_IPHONE
228 [center addObserver:self
229 selector:@selector(deviceOrientationDidChange:)
230 name:UIDeviceOrientationDidChangeNotification
231 object:nil];
232 [center addObserver:self
233 selector:@selector(handleCaptureSessionInterruption:)
234 name:AVCaptureSessionWasInterruptedNotification
235 object:_captureSession];
236 [center addObserver:self
237 selector:@selector(handleCaptureSessionInterruptionEnded:)
238 name:AVCaptureSessionInterruptionEndedNotification
239 object:_captureSession];
240 [center addObserver:self
241 selector:@selector(handleApplicationDidBecomeActive:)
242 name:UIApplicationDidBecomeActiveNotification
243 object:[UIApplication sharedApplication]];
244 #endif
245 [center addObserver:self
246 selector:@selector(handleCaptureSessionRuntimeError:)
247 name:AVCaptureSessionRuntimeErrorNotification
248 object:_captureSession];
249 [center addObserver:self
250 selector:@selector(handleCaptureSessionDidStartRunning:)
251 name:AVCaptureSessionDidStartRunningNotification
252 object:_captureSession];
253 [center addObserver:self
254 selector:@selector(handleCaptureSessionDidStopRunning:)
255 name:AVCaptureSessionDidStopRunningNotification
256 object:_captureSession];
257 }
258 return self;
259 }
260
261 - (void)dealloc {
262 RTC_DCHECK(!self.hasStarted);
263 [[NSNotificationCenter defaultCenter] removeObserver:self];
264 _capturer = nullptr;
265 }
266
267 - (AVCaptureSession *)captureSession {
268 return _captureSession;
269 }
270
271 - (AVCaptureDevice *)getActiveCaptureDevice {
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
273 }
274
275 - (AVCaptureDevice *)frontCaptureDevice {
276 return _frontCameraInput.device;
277 }
278
279 - (AVCaptureDevice *)backCaptureDevice {
280 return _backCameraInput.device;
281 }
282
283 - (dispatch_queue_t)frameQueue {
284 if (!_frameQueue) {
285 _frameQueue =
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
287 DISPATCH_QUEUE_SERIAL);
288 dispatch_set_target_queue(
289 _frameQueue,
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
291 }
292 return _frameQueue;
293 }
294
295 // Called from any thread (likely main thread).
296 - (BOOL)canUseBackCamera {
297 return _backCameraInput != nil;
298 }
299
300 // Called from any thread (likely main thread).
301 - (BOOL)useBackCamera {
302 @synchronized(self) {
303 return _useBackCamera;
304 }
305 }
306
307 // Called from any thread (likely main thread).
308 - (void)setUseBackCamera:(BOOL)useBackCamera {
309 if (!self.canUseBackCamera) {
310 if (useBackCamera) {
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
312 "not switching.");
313 }
314 return;
315 }
316 @synchronized(self) {
317 if (_useBackCamera == useBackCamera) {
318 return;
319 }
320 _useBackCamera = useBackCamera;
321 [self updateSessionInputForUseBackCamera:useBackCamera];
322 }
323 }
324
325 // Called from WebRTC thread.
326 - (void)start {
327 if (self.hasStarted) {
328 return;
329 }
330 self.hasStarted = YES;
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
332 block:^{
333 #if TARGET_OS_IPHONE
334 // Default to portrait orientation on iPhone. This will be reset in
335 // updateOrientation unless orientation is unknown/faceup/facedown.
336 _rotation = webrtc::kVideoRotation_90;
337 #else
338 // No rotation on Mac.
339 _rotation = webrtc::kVideoRotation_0;
340 #endif
341 [self updateOrientation];
342 #if TARGET_OS_IPHONE
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
344 #endif
345 AVCaptureSession *captureSession = self.captureSession;
346 [captureSession startRunning];
347 }];
348 }
349
350 // Called from same thread as start.
351 - (void)stop {
352 if (!self.hasStarted) {
353 return;
354 }
355 self.hasStarted = NO;
356 // Due to this async block, it's possible that the ObjC object outlives the
357 // C++ one. In order to not invoke functions on the C++ object, we set
358 // hasStarted immediately instead of dispatching it async.
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
362 [_captureSession stopRunning];
363 #if TARGET_OS_IPHONE
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
365 #endif
366 }];
367 }
368
369 #pragma mark iOS notifications
370
371 #if TARGET_OS_IPHONE
372 - (void)deviceOrientationDidChange:(NSNotification *)notification {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{
375 [self updateOrientation];
376 }];
377 }
378 #endif
379
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
381
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
384 fromConnection:(AVCaptureConnection *)connection {
385 NSParameterAssert(captureOutput == _videoDataOutput);
386 if (!self.hasStarted) {
387 return;
388 }
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
390 }
391
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
394 fromConnection:(AVCaptureConnection *)connection {
395 RTCLogError(@"Dropped sample buffer.");
396 }
397
398 #pragma mark - AVCaptureSession notifications
399
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification {
401 NSString *reasonString = nil;
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
404 NSNumber *reason =
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
406 if (reason) {
407 switch (reason.intValue) {
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground :
409 reasonString = @"VideoDeviceNotAvailableInBackground";
410 break;
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
412 reasonString = @"AudioDeviceInUseByAnotherClient";
413 break;
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
415 reasonString = @"VideoDeviceInUseByAnotherClient";
416 break;
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps:
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
419 break;
420 }
421 }
422 #endif
423 RTCLog(@"Capture session interrupted: %@", reasonString);
424 // TODO(tkchin): Handle this case.
425 }
426
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
428 RTCLog(@"Capture session interruption ended.");
429 // TODO(tkchin): Handle this case.
430 }
431
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
433 NSError *error =
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
435 RTCLogError(@"Capture session runtime error: %@", error);
436
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
438 block:^{
439 #if TARGET_OS_IPHONE
440 if (error.code == AVErrorMediaServicesWereReset) {
441 [self handleNonFatalError];
442 } else {
443 [self handleFatalError];
444 }
445 #else
446 [self handleFatalError];
447 #endif
448 }];
449 }
450
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
452 RTCLog(@"Capture session started.");
453
454 self.isRunning = YES;
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
456 block:^{
457 // If we successfully restarted after an unknown error, allow future
458 // retries on fatal errors.
459 _hasRetriedOnFatalError = NO;
460 }];
461 }
462
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
464 RTCLog(@"Capture session stopped.");
465 self.isRunning = NO;
466 }
467
468 - (void)handleFatalError {
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
470 block:^{
471 if (!_hasRetriedOnFatalError) {
472 RTCLogWarning(@"Attempting to recover from fatal capture error.");
473 [self handleNonFatalError];
474 _hasRetriedOnFatalError = YES;
475 } else {
476 RTCLogError(@"Previous fatal error recovery failed.");
477 }
478 }];
479 }
480
481 - (void)handleNonFatalError {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{
484 if (self.hasStarted) {
485 RTCLog(@"Restarting capture session after error.");
486 [self.captureSession startRunning];
487 }
488 }];
489 }
490
491 #if TARGET_OS_IPHONE
492
493 #pragma mark - UIApplication notifications
494
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
497 block:^{
498 if (self.hasStarted && !self.captureSession.isRunning) {
499 RTCLog(@"Restarting capture session on active.");
500 [self.captureSession startRunning];
501 }
502 }];
503 }
504
505 #endif // TARGET_OS_IPHONE
506
507 #pragma mark - Private
508
509 - (BOOL)setupCaptureSession {
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
511 #if defined(WEBRTC_IOS)
512 captureSession.usesApplicationAudioSession = NO;
513 #endif
514 // Add the output.
515 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
516 if (![captureSession canAddOutput:videoDataOutput]) {
517 RTCLogError(@"Video data output unsupported.");
518 return NO;
519 }
520 [captureSession addOutput:videoDataOutput];
521
522 // Get the front and back cameras. If there isn't a front camera
523 // give up.
524 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
525 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
526 if (!frontCameraInput) {
527 RTCLogError(@"No front camera for capture session.");
528 return NO;
529 }
530
531 // Add the inputs.
532 if (![captureSession canAddInput:frontCameraInput] ||
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
534 RTCLogError(@"Session does not support capture inputs.");
535 return NO;
536 }
537 AVCaptureDeviceInput *input = self.useBackCamera ?
538 backCameraInput : frontCameraInput;
539 [captureSession addInput:input];
540
541 _captureSession = captureSession;
542 return YES;
543 }
544
545 - (AVCaptureVideoDataOutput *)videoDataOutput {
546 if (!_videoDataOutput) {
547 // Make the capturer output NV12. Ideally we want I420 but that's not
548 // currently supported on iPhone / iPad.
549 AVCaptureVideoDataOutput *videoDataOutput =
550 [[AVCaptureVideoDataOutput alloc] init];
551 videoDataOutput.videoSettings = @{
552 (NSString *)kCVPixelBufferPixelFormatTypeKey :
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
554 };
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
557 _videoDataOutput = videoDataOutput;
558 }
559 return _videoDataOutput;
560 }
561
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
563 (AVCaptureDevicePosition)position {
564 for (AVCaptureDevice *captureDevice in
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
566 if (captureDevice.position == position) {
567 return captureDevice;
568 }
569 }
570 return nil;
571 }
572
573 - (AVCaptureDeviceInput *)frontCameraInput {
574 if (!_frontCameraInput) {
575 #if TARGET_OS_IPHONE
576 AVCaptureDevice *frontCameraDevice =
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
578 #else
579 AVCaptureDevice *frontCameraDevice =
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
581 #endif
582 if (!frontCameraDevice) {
583 RTCLogWarning(@"Failed to find front capture device.");
584 return nil;
585 }
586 NSError *error = nil;
587 AVCaptureDeviceInput *frontCameraInput =
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
589 error:&error];
590 if (!frontCameraInput) {
591 RTCLogError(@"Failed to create front camera input: %@",
592 error.localizedDescription);
593 return nil;
594 }
595 _frontCameraInput = frontCameraInput;
596 }
597 return _frontCameraInput;
598 }
599
600 - (AVCaptureDeviceInput *)backCameraInput {
601 if (!_backCameraInput) {
602 AVCaptureDevice *backCameraDevice =
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
604 if (!backCameraDevice) {
605 RTCLogWarning(@"Failed to find front capture device.");
606 return nil;
607 }
608 NSError *error = nil;
609 AVCaptureDeviceInput *backCameraInput =
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
611 error:&error];
612 if (!backCameraInput) {
613 RTCLogError(@"Failed to create front camera input: %@",
614 error.localizedDescription);
615 return nil;
616 }
617 _backCameraInput = backCameraInput;
618 }
619 return _backCameraInput;
620 }
621
622 // Called from capture session queue.
623 - (void)updateOrientation {
624 #if TARGET_OS_IPHONE
625 switch ([UIDevice currentDevice].orientation) {
626 case UIDeviceOrientationPortrait:
627 _rotation = webrtc::kVideoRotation_90;
628 break;
629 case UIDeviceOrientationPortraitUpsideDown:
630 _rotation = webrtc::kVideoRotation_270;
631 break;
632 case UIDeviceOrientationLandscapeLeft:
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0
634 : webrtc::kVideoRotation_180;
635 break;
636 case UIDeviceOrientationLandscapeRight:
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180
638 : webrtc::kVideoRotation_0;
639 break;
640 case UIDeviceOrientationFaceUp:
641 case UIDeviceOrientationFaceDown:
642 case UIDeviceOrientationUnknown:
643 // Ignore.
644 break;
645 }
646 #endif
647 }
648
649 // Update the current session input to match what's stored in _useBackCamera.
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
652 block:^{
653 [_captureSession beginConfiguration];
654 AVCaptureDeviceInput *oldInput = _backCameraInput;
655 AVCaptureDeviceInput *newInput = _frontCameraInput;
656 if (useBackCamera) {
657 oldInput = _frontCameraInput;
658 newInput = _backCameraInput;
659 }
660 if (oldInput) {
661 // Ok to remove this even if it's not attached. Will be no-op.
662 [_captureSession removeInput:oldInput];
663 }
664 if (newInput) {
665 [_captureSession addInput:newInput];
666 }
667 [self updateOrientation];
668 AVCaptureDevice *newDevice = newInput.device;
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
671 [_captureSession commitConfiguration];
672 }];
673 }
674
675 @end
676
677 namespace webrtc { 156 namespace webrtc {
678 157
679 enum AVFoundationVideoCapturerMessageType : uint32_t { 158 enum AVFoundationVideoCapturerMessageType : uint32_t {
680 kMessageTypeFrame, 159 kMessageTypeFrame,
681 }; 160 };
682 161
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { 162 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
684 _capturer = 163 _capturer =
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 164 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
686 165
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
821 buffer->width(), buffer->height(), 300 buffer->width(), buffer->height(),
822 static_cast<libyuv::RotationMode>(rotation)); 301 static_cast<libyuv::RotationMode>(rotation));
823 buffer = rotated_buffer; 302 buffer = rotated_buffer;
824 } 303 }
825 304
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), 305 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us),
827 captured_width, captured_height); 306 captured_width, captured_height);
828 } 307 }
829 308
830 } // namespace webrtc 309 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698