Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(313)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2349223002: Replace SessionPresets with AVCaptureDeviceFormats (Closed)
Patch Set: Address reviewer's comments Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 12 matching lines...) Expand all
23 #endif 23 #endif
24 24
25 #include "libyuv/rotate.h" 25 #include "libyuv/rotate.h"
26 26
27 #include "webrtc/base/bind.h" 27 #include "webrtc/base/bind.h"
28 #include "webrtc/base/checks.h" 28 #include "webrtc/base/checks.h"
29 #include "webrtc/base/thread.h" 29 #include "webrtc/base/thread.h"
30 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 30 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
31 #include "webrtc/common_video/rotation.h" 31 #include "webrtc/common_video/rotation.h"
32 32
33 struct AVCaptureSessionPresetResolution { 33 static const int kFramesPerSecond = 30;
magjed_webrtc 2016/09/27 14:36:10 Can you add a todo, something like: // TODO(denici
daniela-webrtc 2016/09/28 15:29:26 Done.
34 NSString *sessionPreset;
35 int width;
36 int height;
37 };
38 34
39 #if TARGET_OS_IPHONE 35 // Maping from cricket::VideoFormat to AVCaptureDeviceFormat.
tkchin_webrtc 2016/09/27 13:39:34 nit: Mapping
daniela-webrtc 2016/09/28 15:29:27 Done.
40 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { 36 static AVCaptureDeviceFormat* GetDeviceFormatForVideoFormat(
41 { AVCaptureSessionPreset352x288, 352, 288}, 37 const AVCaptureDevice* device,
42 { AVCaptureSessionPreset640x480, 640, 480}, 38 const cricket::VideoFormat& videoFormat) {
43 { AVCaptureSessionPreset1280x720, 1280, 720}, 39 AVCaptureDeviceFormat* desiredDeviceFormat = nil;
tkchin_webrtc 2016/09/27 13:39:34 if treating this as a C/C++ function, should be de
daniela-webrtc 2016/09/28 15:29:26 Done.
44 { AVCaptureSessionPreset1920x1080, 1920, 1080}, 40 for (AVCaptureDeviceFormat* deviceFormat in [device formats]) {
tkchin_webrtc 2016/09/27 13:39:33 dot syntax for properties. Here and elsewhere
daniela-webrtc 2016/09/28 15:29:27 Done.
45 }; 41 CMVideoDimensions dimension =
46 #else // macOS 42 CMVideoFormatDescriptionGetDimensions([deviceFormat formatDescription]);
47 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { 43 FourCharCode code =
48 { AVCaptureSessionPreset320x240, 320, 240}, 44 CMFormatDescriptionGetMediaSubType([deviceFormat formatDescription]);
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53 };
54 #endif
55 45
56 // Mapping from cricket::VideoFormat to AVCaptureSession presets. 46 if (code != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange &&
tkchin_webrtc 2016/09/27 13:39:33 If we care about only selectively picking out NV12
magjed_webrtc 2016/09/27 14:36:10 Ah, true. We hardcode the pixel format to FullRang
daniela-webrtc 2016/09/28 15:29:26 Not sure I follow. The reason why we allow both fu
57 static NSString *GetSessionPresetForVideoFormat( 47 code != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
58 const cricket::VideoFormat& format) { 48 continue;
59 for (const auto preset : kAvailablePresets) { 49 }
60 // Check both orientations 50
61 if ((format.width == preset.width && format.height == preset.height) || 51 if (videoFormat.width == dimension.width &&
62 (format.width == preset.height && format.height == preset.width)) { 52 videoFormat.height == dimension.height) {
63 return preset.sessionPreset; 53 for (AVFrameRateRange* framerate in
54 [deviceFormat videoSupportedFrameRateRanges]) {
55 if (!(framerate.minFrameRate >= kFramesPerSecond &&
tkchin_webrtc 2016/09/27 13:39:34 perhaps frameRate.minFrameRate < kFramesPerSecond
daniela-webrtc 2016/09/28 15:29:26 Great catch. Thanks
56 framerate.maxFrameRate <= kFramesPerSecond)) {
magjed_webrtc 2016/09/27 14:36:10 I think this check is incorrect, the comparison is
daniela-webrtc 2016/09/28 15:29:26 Done.
57 continue;
58 }
59 }
60 if (code == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
61 // this is the preferred format so no need to wait for better option
tkchin_webrtc 2016/09/27 13:39:34 nit: Capitalize first letter of comment and end wi
daniela-webrtc 2016/09/28 15:29:26 Done.
62 return deviceFormat;
63 } else {
64 // this is good candidate, but let's wait for something better
65 desiredDeviceFormat = deviceFormat;
magjed_webrtc 2016/09/27 14:36:10 I'm wondering how we should select between multipl
66 }
64 } 67 }
65 } 68 }
66 // If no matching preset is found, use a default one. 69 return desiredDeviceFormat;
67 return AVCaptureSessionPreset640x480; 70 }
71
72 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
73 // device.
74 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
75 AVCaptureDevice* device) {
tkchin_webrtc 2016/09/27 13:39:33 ditto pick one of C++/ObjC style conventions to us
daniela-webrtc 2016/09/28 15:29:26 Done.
76 std::set<cricket::VideoFormat> supportedFormats;
77
78 for (AVCaptureDeviceFormat* deviceFormat in [device formats]) {
79 FourCharCode code =
80 CMFormatDescriptionGetMediaSubType([deviceFormat formatDescription]);
81
82 if (code != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange &&
tkchin_webrtc 2016/09/27 13:39:33 I would've expected this code to output all possib
magjed_webrtc 2016/09/27 14:36:10 FYI - We currently only support NV12 because the c
83 code != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
84 continue;
85 }
86
87 CMVideoDimensions dimension =
88 CMVideoFormatDescriptionGetDimensions([deviceFormat formatDescription]);
89
magjed_webrtc 2016/09/27 14:36:10 I think we should have the fps check here as well,
daniela-webrtc 2016/09/28 15:29:26 Agreed. I don't like how I handled the filtering o
daniela-webrtc 2016/09/30 11:35:59 I've uploaded a patch with this change. Please let
90 cricket::VideoFormat format = cricket::VideoFormat(
91 dimension.width, dimension.height,
92 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
93 cricket::FOURCC_NV12);
94 supportedFormats.insert(format);
95 }
96
97 return supportedFormats;
68 } 98 }
69 99
70 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 100 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
71 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 101 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72 // because other webrtc objects own cricket::VideoCapturer, which is not 102 // because other webrtc objects own cricket::VideoCapturer, which is not
73 // ref counted. To prevent bad behavior we do not expose this class directly. 103 // ref counted. To prevent bad behavior we do not expose this class directly.
74 @interface RTCAVFoundationVideoCapturerInternal : NSObject 104 @interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate> 105 <AVCaptureVideoDataOutputSampleBufferDelegate>
76 106
77 @property(nonatomic, readonly) AVCaptureSession *captureSession; 107 @property(nonatomic, readonly) AVCaptureSession *captureSession;
78 @property(nonatomic, readonly) dispatch_queue_t frameQueue; 108 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
79 @property(nonatomic, readonly) BOOL canUseBackCamera; 109 @property(nonatomic, readonly) BOOL canUseBackCamera;
80 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 110 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
81 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running. 111 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
82 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. 112 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
83 113
84 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 114 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85 // when we receive frames. This is safe because this object should be owned by 115 // when we receive frames. This is safe because this object should be owned by
86 // it. 116 // it.
87 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; 117 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
88 - (AVCaptureDevice *)getActiveCaptureDevice; 118 - (AVCaptureDevice *)getActiveCaptureDevice;
89 119
120 - (nullable AVCaptureDevice*)frontCaptureDevice;
tkchin_webrtc 2016/09/27 13:39:34 not sure we need nullability annotations for inter
daniela-webrtc 2016/09/28 15:29:26 In general I like nullable/nonnull annotations as
121 - (nullable AVCaptureDevice*)backCaptureDevice;
122
90 // Starts and stops the capture session asynchronously. We cannot do this 123 // Starts and stops the capture session asynchronously. We cannot do this
91 // synchronously without blocking a WebRTC thread. 124 // synchronously without blocking a WebRTC thread.
92 - (void)start; 125 - (void)start;
93 - (void)stop; 126 - (void)stop;
94 127
95 @end 128 @end
96 129
97 @implementation RTCAVFoundationVideoCapturerInternal { 130 @implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience. 131 // Keep pointers to inputs for convenience.
99 AVCaptureDeviceInput *_frontCameraInput; 132 AVCaptureDeviceInput *_frontCameraInput;
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
168 } 201 }
169 202
170 - (AVCaptureSession *)captureSession { 203 - (AVCaptureSession *)captureSession {
171 return _captureSession; 204 return _captureSession;
172 } 205 }
173 206
174 - (AVCaptureDevice *)getActiveCaptureDevice { 207 - (AVCaptureDevice *)getActiveCaptureDevice {
175 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; 208 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
176 } 209 }
177 210
211 - (AVCaptureDevice*)frontCaptureDevice {
212 return _frontCameraInput.device;
tkchin_webrtc 2016/09/27 13:39:33 nit: Device *) here and below
daniela-webrtc 2016/09/28 15:29:26 Done.
213 }
214
215 - (AVCaptureDevice*)backCaptureDevice {
216 return _backCameraInput.device;
217 }
218
178 - (dispatch_queue_t)frameQueue { 219 - (dispatch_queue_t)frameQueue {
179 if (!_frameQueue) { 220 if (!_frameQueue) {
180 _frameQueue = 221 _frameQueue =
181 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 222 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
182 DISPATCH_QUEUE_SERIAL); 223 DISPATCH_QUEUE_SERIAL);
183 dispatch_set_target_queue( 224 dispatch_set_target_queue(
184 _frameQueue, 225 _frameQueue,
185 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 226 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
186 } 227 }
187 return _frameQueue; 228 return _frameQueue;
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
519 if (!backCameraInput) { 560 if (!backCameraInput) {
520 RTCLogError(@"Failed to create front camera input: %@", 561 RTCLogError(@"Failed to create front camera input: %@",
521 error.localizedDescription); 562 error.localizedDescription);
522 return nil; 563 return nil;
523 } 564 }
524 _backCameraInput = backCameraInput; 565 _backCameraInput = backCameraInput;
525 } 566 }
526 return _backCameraInput; 567 return _backCameraInput;
527 } 568 }
528 569
529 - (void)setMinFrameDuration:(CMTime)minFrameDuration
530 forDevice:(AVCaptureDevice *)device {
531 NSError *error = nil;
532 if (![device lockForConfiguration:&error]) {
533 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription);
534 return;
535 }
536 device.activeVideoMinFrameDuration = minFrameDuration;
537 [device unlockForConfiguration];
538 }
539
540 // Called from capture session queue. 570 // Called from capture session queue.
541 - (void)updateOrientation { 571 - (void)updateOrientation {
542 #if TARGET_OS_IPHONE 572 #if TARGET_OS_IPHONE
543 switch ([UIDevice currentDevice].orientation) { 573 switch ([UIDevice currentDevice].orientation) {
544 case UIDeviceOrientationPortrait: 574 case UIDeviceOrientationPortrait:
545 _rotation = webrtc::kVideoRotation_90; 575 _rotation = webrtc::kVideoRotation_90;
546 break; 576 break;
547 case UIDeviceOrientationPortraitUpsideDown: 577 case UIDeviceOrientationPortraitUpsideDown:
548 _rotation = webrtc::kVideoRotation_270; 578 _rotation = webrtc::kVideoRotation_270;
549 break; 579 break;
(...skipping 24 matching lines...) Expand all
574 newInput = _backCameraInput; 604 newInput = _backCameraInput;
575 } 605 }
576 if (oldInput) { 606 if (oldInput) {
577 // Ok to remove this even if it's not attached. Will be no-op. 607 // Ok to remove this even if it's not attached. Will be no-op.
578 [_captureSession removeInput:oldInput]; 608 [_captureSession removeInput:oldInput];
579 } 609 }
580 if (newInput) { 610 if (newInput) {
581 [_captureSession addInput:newInput]; 611 [_captureSession addInput:newInput];
582 } 612 }
583 [self updateOrientation]; 613 [self updateOrientation];
614 AVCaptureDevice* newDevice = [newInput device];
tkchin_webrtc 2016/09/27 13:39:34 Device *newDevice here and below
daniela-webrtc 2016/09/28 15:29:26 Done.
615
616 NSError* error = nil;
617 const cricket::VideoFormat* format = _capturer->GetCaptureFormat();
tkchin_webrtc 2016/09/27 13:39:34 is this thread safe? (Calling from different queue
daniela-webrtc 2016/09/29 11:02:31 Not sure. If i understand correctly the capture fo
618 AVCaptureDeviceFormat* newFormat =
619 GetDeviceFormatForVideoFormat(newDevice, *format);
tkchin_webrtc 2016/09/27 13:39:33 How expensive is this operation? Is it slow to que
daniela-webrtc 2016/09/29 11:02:31 Shouldn't be too expensive and ideally shouldn't h
620 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFo rmat()->interval);
621 if ([newDevice lockForConfiguration:&error]) {
622 @try {
623 [newDevice setActiveFormat:newFormat];
tkchin_webrtc 2016/09/27 13:39:33 Use dot syntax where able .activeFormat =
daniela-webrtc 2016/09/28 15:29:26 Done.
624 [newDevice setActiveVideoMinFrameDuration:CMTimeMake(1, fps)];
625 } @catch (NSException* exception) {
626 LOG(LS_ERROR) << [NSString stringWithFormat:@"Exception occured while "
tkchin_webrtc 2016/09/27 13:39:34 We've used RTCLogError as a convenience
daniela-webrtc 2016/09/28 15:29:26 Done.
daniela-webrtc 2016/09/29 11:02:31 For future reference, do we use RTCLogError in Obj
627 @"setting active "
628 @"format!\n User info:%@",
629 exception.userInfo];
630 }
631 [newDevice unlockForConfiguration];
632 }
584 [_captureSession commitConfiguration]; 633 [_captureSession commitConfiguration];
585
586 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFo rmat()->interval);
587 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
588 }]; 634 }];
589 } 635 }
590 636
591 @end 637 @end
592 638
593 namespace webrtc { 639 namespace webrtc {
594 640
595 enum AVFoundationVideoCapturerMessageType : uint32_t { 641 enum AVFoundationVideoCapturerMessageType : uint32_t {
596 kMessageTypeFrame, 642 kMessageTypeFrame,
597 }; 643 };
598 644
599 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { 645 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
600 // Set our supported formats. This matches kAvailablePresets.
601 _capturer = 646 _capturer =
602 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 647 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
603 648
604 std::vector<cricket::VideoFormat> supported_formats; 649 std::set<cricket::VideoFormat> frontCameraSupportedVideoFormats =
tkchin_webrtc 2016/09/27 13:39:34 c++ style here so front_camera_supported_video_for
daniela-webrtc 2016/09/28 15:29:26 Done.
605 int framerate = 30; 650 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
606 651
607 #if TARGET_OS_IPHONE 652 std::set<cricket::VideoFormat> backCameraSupportedVideoFormats =
608 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 653 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
609 set_enable_video_adapter(false);
610 framerate = 15;
611 }
612 #endif
613 654
614 for (const auto preset : kAvailablePresets) { 655 std::vector<cricket::VideoFormat> intersectionVideoFormats;
615 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) { 656 if (backCameraSupportedVideoFormats.size() == 0) {
616 const auto format = cricket::VideoFormat( 657 std::copy(frontCameraSupportedVideoFormats.begin(),
617 preset.width, 658 frontCameraSupportedVideoFormats.end(),
618 preset.height, 659 std::back_inserter(intersectionVideoFormats));
619 cricket::VideoFormat::FpsToInterval(framerate), 660 SetSupportedFormats(intersectionVideoFormats);
620 cricket::FOURCC_NV12); 661 return;
621 supported_formats.push_back(format);
622 }
623 } 662 }
624 663
625 SetSupportedFormats(supported_formats); 664 if (frontCameraSupportedVideoFormats.size() == 0) {
665 std::copy(backCameraSupportedVideoFormats.begin(),
666 backCameraSupportedVideoFormats.end(),
667 std::back_inserter(intersectionVideoFormats));
668 SetSupportedFormats(intersectionVideoFormats);
669 return;
670 }
671
672 std::set_intersection(frontCameraSupportedVideoFormats.begin(),
673 frontCameraSupportedVideoFormats.end(),
674 backCameraSupportedVideoFormats.begin(),
675 backCameraSupportedVideoFormats.end(),
676 std::back_inserter(intersectionVideoFormats));
677
678 SetSupportedFormats(intersectionVideoFormats);
626 } 679 }
627 680
628 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { 681 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
629 _capturer = nil; 682 _capturer = nil;
630 } 683 }
631 684
632 cricket::CaptureState AVFoundationVideoCapturer::Start( 685 cricket::CaptureState AVFoundationVideoCapturer::Start(
633 const cricket::VideoFormat& format) { 686 const cricket::VideoFormat& format) {
634 if (!_capturer) { 687 if (!_capturer) {
635 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; 688 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
636 return cricket::CaptureState::CS_FAILED; 689 return cricket::CaptureState::CS_FAILED;
637 } 690 }
638 if (_capturer.isRunning) { 691 if (_capturer.isRunning) {
639 LOG(LS_ERROR) << "The capturer is already running."; 692 LOG(LS_ERROR) << "The capturer is already running.";
640 return cricket::CaptureState::CS_FAILED; 693 return cricket::CaptureState::CS_FAILED;
641 } 694 }
642 695
643 NSString *desiredPreset = GetSessionPresetForVideoFormat(format); 696 AVCaptureDeviceFormat* deviceFormat =
644 RTC_DCHECK(desiredPreset); 697 GetDeviceFormatForVideoFormat([_capturer getActiveCaptureDevice], format);
698 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
645 699
646 [_capturer.captureSession beginConfiguration]; 700 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
647 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) { 701 AVCaptureSession* session = [_capturer captureSession];
648 LOG(LS_ERROR) << "Unsupported video format."; 702 NSError* error = nil;
649 [_capturer.captureSession commitConfiguration]; 703
650 return cricket::CaptureState::CS_FAILED; 704 [session beginConfiguration]; // the session to which the receiver's
tkchin_webrtc 2016/09/27 13:39:34 nit: don't split comments out at end of line, plac
daniela-webrtc 2016/09/28 15:29:26 Actually this comment is not needed. I'll remove i
daniela-webrtc 2016/09/30 11:35:59 Done.
705 // AVCaptureDeviceInput is added.
706 if ([device lockForConfiguration:&error]) {
magjed_webrtc 2016/09/27 14:36:10 Can we extract this out in a common function: void
daniela-webrtc 2016/09/29 11:02:31 YES! I forgot to do this. Good catch. Thanks
daniela-webrtc 2016/09/30 11:35:59 Done.
707 @try {
708 [device setActiveFormat:deviceFormat];
709 [device setActiveVideoMinFrameDuration:CMTimeMake(1, fps)];
710 } @catch (NSException* exception) {
711 LOG(LS_ERROR) << [NSString
712 stringWithFormat:
713 @"Exception occured while setting active format!\n User info:%@",
714 exception.userInfo];
715 return cricket::CaptureState::CS_FAILED;
716 }
717
718 [device unlockForConfiguration];
651 } 719 }
652 _capturer.captureSession.sessionPreset = desiredPreset; 720
653 [_capturer.captureSession commitConfiguration]; 721 [session commitConfiguration];
654 722
655 SetCaptureFormat(&format); 723 SetCaptureFormat(&format);
656 // This isn't super accurate because it takes a while for the AVCaptureSession 724 // This isn't super accurate because it takes a while for the AVCaptureSession
657 // to spin up, and this call returns async. 725 // to spin up, and this call returns async.
658 // TODO(tkchin): make this better. 726 // TODO(tkchin): make this better.
659 [_capturer start]; 727 [_capturer start];
660 SetCaptureState(cricket::CaptureState::CS_RUNNING); 728 SetCaptureState(cricket::CaptureState::CS_RUNNING);
661 729
662 // Adjust the framerate for all capture devices.
663 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
664 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
665 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
666
667 return cricket::CaptureState::CS_STARTING; 730 return cricket::CaptureState::CS_STARTING;
668 } 731 }
669 732
670 void AVFoundationVideoCapturer::Stop() { 733 void AVFoundationVideoCapturer::Stop() {
671 [_capturer stop]; 734 [_capturer stop];
672 SetCaptureFormat(NULL); 735 SetCaptureFormat(NULL);
673 } 736 }
674 737
675 bool AVFoundationVideoCapturer::IsRunning() { 738 bool AVFoundationVideoCapturer::IsRunning() {
676 return _capturer.isRunning; 739 return _capturer.isRunning;
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
756 buffer = rotated_buffer; 819 buffer = rotated_buffer;
757 } 820 }
758 } 821 }
759 822
760 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation, 823 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
761 translated_camera_time_us, 0), 824 translated_camera_time_us, 0),
762 captured_width, captured_height); 825 captured_width, captured_height);
763 } 826 }
764 827
765 } // namespace webrtc 828 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698