Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(222)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2349223002: Replace SessionPresets with AVCaptureDeviceFormats (Closed)
Patch Set: Address style comments Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 12 matching lines...) Expand all
23 #endif 23 #endif
24 24
25 #include "libyuv/rotate.h" 25 #include "libyuv/rotate.h"
26 26
27 #include "webrtc/base/bind.h" 27 #include "webrtc/base/bind.h"
28 #include "webrtc/base/checks.h" 28 #include "webrtc/base/checks.h"
29 #include "webrtc/base/thread.h" 29 #include "webrtc/base/thread.h"
30 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 30 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
31 #include "webrtc/common_video/rotation.h" 31 #include "webrtc/common_video/rotation.h"
32 32
33 struct AVCaptureSessionPresetResolution { 33 // TODO(denicija): add support for higher frame rates.
34 NSString *sessionPreset; 34 // See http://crbug/webrtc/6355 for more info.
35 int width; 35 static const int kFramesPerSecond = 30;
36 int height;
37 };
38 36
39 #if TARGET_OS_IPHONE 37 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
40 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { 38 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
41 { AVCaptureSessionPreset352x288, 352, 288}, 39 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
42 { AVCaptureSessionPreset640x480, 640, 480}, 40 }
43 { AVCaptureSessionPreset1280x720, 1280, 720},
44 { AVCaptureSessionPreset1920x1080, 1920, 1080},
45 };
46 #else // macOS
47 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
48 { AVCaptureSessionPreset320x240, 320, 240},
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53 };
54 #endif
55 41
56 // Mapping from cricket::VideoFormat to AVCaptureSession presets. 42 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
57 static NSString *GetSessionPresetForVideoFormat( 43 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
58 const cricket::VideoFormat& format) { 44 }
59 for (const auto preset : kAvailablePresets) { 45
60 // Check both orientations 46 // Returns filtered array of device formats based on predefined constraints our
61 if ((format.width == preset.width && format.height == preset.height) || 47 // stack imposes.
62 (format.width == preset.height && format.height == preset.width)) { 48 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
63 return preset.sessionPreset; 49 const AVCaptureDevice *device,
50 int supportedFps) {
51 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
52 [NSMutableArray array];
53
54 for (AVCaptureDeviceFormat *format in device.formats) {
55 // Filter out subTypes that we currently don't support in the stack
56 FourCharCode mediaSubType =
57 CMFormatDescriptionGetMediaSubType(format.formatDescription);
58 if (!IsMediaSubTypeSupported(mediaSubType)) {
59 continue;
60 }
61
62 // Filter out frame rate ranges that we currently don't support in the stack
63 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange s) {
64 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
65 [eligibleDeviceFormats addObject:format];
66 break;
67 }
64 } 68 }
65 } 69 }
66 // If no matching preset is found, use a default one. 70
67 return AVCaptureSessionPreset640x480; 71 return [eligibleDeviceFormats copy];
72 }
73
74 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
75 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
76 const AVCaptureDevice *device,
77 const cricket::VideoFormat &videoFormat) {
78 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
79 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
80 GetEligibleDeviceFormats(device, videoFormat.framerate());
81
82 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
83 CMVideoDimensions dimension =
84 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
85 FourCharCode mediaSubType =
86 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
87
88 if (videoFormat.width == dimension.width &&
89 videoFormat.height == dimension.height) {
90 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
91 // This is the preferred format so no need to wait for better option.
92 return deviceFormat;
93 } else {
94 // This is good candidate, but let's wait for something better.
95 desiredDeviceFormat = deviceFormat;
96 }
97 }
98 }
99
100 return desiredDeviceFormat;
101 }
102
103 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
104 // device.
105 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
106 AVCaptureDevice *device) {
107 std::set<cricket::VideoFormat> supportedFormats;
108
109 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
110 GetEligibleDeviceFormats(device, kFramesPerSecond);
111
112 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
113 CMVideoDimensions dimension =
114 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
115 cricket::VideoFormat format = cricket::VideoFormat(
116 dimension.width, dimension.height,
117 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
118 cricket::FOURCC_NV12);
119 supportedFormats.insert(format);
120 }
121
122 return supportedFormats;
123 }
124
125 // Sets device format for the provided capture device. Returns YES/NO depending on success.
126 // TODO(denicija): When this file is split this static method should be reconsid ered.
127 // Perhaps adding a category on AVCaptureDevice would be better.
128 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
129 AVCaptureSession *session,
130 const cricket::VideoFormat &format) {
131 AVCaptureDeviceFormat *deviceFormat =
132 GetDeviceFormatForVideoFormat(device, format);
133 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
134
135 NSError *error = nil;
136 BOOL success = YES;
137 [session beginConfiguration];
138 if ([device lockForConfiguration:&error]) {
139 @try {
140 device.activeFormat = deviceFormat;
141 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
142 } @catch (NSException *exception) {
143 RTCLogError(
144 @"Failed to set active format!\n User info:%@",
145 exception.userInfo);
146 success = NO;
147 }
148
149 [device unlockForConfiguration];
150 } else {
151 RTCLogError(
152 @"Failed to lock device %@. Error: %@",
153 device, error.userInfo);
154 success = NO;
155 }
156 [session commitConfiguration];
157
158 return success;
68 } 159 }
69 160
70 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 161 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
71 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 162 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72 // because other webrtc objects own cricket::VideoCapturer, which is not 163 // because other webrtc objects own cricket::VideoCapturer, which is not
73 // ref counted. To prevent bad behavior we do not expose this class directly. 164 // ref counted. To prevent bad behavior we do not expose this class directly.
74 @interface RTCAVFoundationVideoCapturerInternal : NSObject 165 @interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate> 166 <AVCaptureVideoDataOutputSampleBufferDelegate>
76 167
77 @property(nonatomic, readonly) AVCaptureSession *captureSession; 168 @property(nonatomic, readonly) AVCaptureSession *captureSession;
78 @property(nonatomic, readonly) dispatch_queue_t frameQueue; 169 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
79 @property(nonatomic, readonly) BOOL canUseBackCamera; 170 @property(nonatomic, readonly) BOOL canUseBackCamera;
80 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 171 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
81 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning. 172 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning.
daniela-webrtc 2016/10/06 08:40:04 This is from the rebase
82 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. 173 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
83 174
84 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 175 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85 // when we receive frames. This is safe because this object should be owned by 176 // when we receive frames. This is safe because this object should be owned by
86 // it. 177 // it.
87 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; 178 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
88 - (AVCaptureDevice *)getActiveCaptureDevice; 179 - (AVCaptureDevice *)getActiveCaptureDevice;
89 180
181 - (nullable AVCaptureDevice *)frontCaptureDevice;
182 - (nullable AVCaptureDevice *)backCaptureDevice;
183
90 // Starts and stops the capture session asynchronously. We cannot do this 184 // Starts and stops the capture session asynchronously. We cannot do this
91 // synchronously without blocking a WebRTC thread. 185 // synchronously without blocking a WebRTC thread.
92 - (void)start; 186 - (void)start;
93 - (void)stop; 187 - (void)stop;
94 188
95 @end 189 @end
96 190
97 @implementation RTCAVFoundationVideoCapturerInternal { 191 @implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience. 192 // Keep pointers to inputs for convenience.
99 AVCaptureDeviceInput *_frontCameraInput; 193 AVCaptureDeviceInput *_frontCameraInput;
100 AVCaptureDeviceInput *_backCameraInput; 194 AVCaptureDeviceInput *_backCameraInput;
101 AVCaptureVideoDataOutput *_videoDataOutput; 195 AVCaptureVideoDataOutput *_videoDataOutput;
102 // The cricket::VideoCapturer that owns this class. Should never be NULL. 196 // The cricket::VideoCapturer that owns this class. Should never be NULL.
103 webrtc::AVFoundationVideoCapturer *_capturer; 197 webrtc::AVFoundationVideoCapturer *_capturer;
104 webrtc::VideoRotation _rotation; 198 webrtc::VideoRotation _rotation;
105 BOOL _hasRetriedOnFatalError; 199 BOOL _hasRetriedOnFatalError;
106 BOOL _isRunning; 200 BOOL _isRunning;
107 BOOL _hasStarted; 201 BOOL _hasStarted;
108 rtc::CriticalSection _crit; 202 rtc::CriticalSection _crit;
109 } 203 }
110 204
111 @synthesize captureSession = _captureSession; 205 @synthesize captureSession = _captureSession;
112 @synthesize frameQueue = _frameQueue; 206 @synthesize frameQueue = _frameQueue;
113 @synthesize useBackCamera = _useBackCamera; 207 @synthesize useBackCamera = _useBackCamera;
114 208
115 @synthesize isRunning = _isRunning; 209 @synthesize isRunning = _isRunning;
daniela-webrtc 2016/10/06 08:40:03 This is from the rebase.
116 @synthesize hasStarted = _hasStarted; 210 @synthesize hasStarted = _hasStarted;
117 211
118 // This is called from the thread that creates the video source, which is likely 212 // This is called from the thread that creates the video source, which is likely
119 // the main thread. 213 // the main thread.
120 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { 214 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
121 RTC_DCHECK(capturer); 215 RTC_DCHECK(capturer);
122 if (self = [super init]) { 216 if (self = [super init]) {
123 _capturer = capturer; 217 _capturer = capturer;
124 // Create the capture session and all relevant inputs and outputs. We need 218 // Create the capture session and all relevant inputs and outputs. We need
125 // to do this in init because the application may want the capture session 219 // to do this in init because the application may want the capture session
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
170 } 264 }
171 265
172 - (AVCaptureSession *)captureSession { 266 - (AVCaptureSession *)captureSession {
173 return _captureSession; 267 return _captureSession;
174 } 268 }
175 269
176 - (AVCaptureDevice *)getActiveCaptureDevice { 270 - (AVCaptureDevice *)getActiveCaptureDevice {
177 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; 271 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
178 } 272 }
179 273
274 - (AVCaptureDevice *)frontCaptureDevice {
275 return _frontCameraInput.device;
276 }
277
278 - (AVCaptureDevice *)backCaptureDevice {
279 return _backCameraInput.device;
280 }
281
180 - (dispatch_queue_t)frameQueue { 282 - (dispatch_queue_t)frameQueue {
181 if (!_frameQueue) { 283 if (!_frameQueue) {
182 _frameQueue = 284 _frameQueue =
183 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 285 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
184 DISPATCH_QUEUE_SERIAL); 286 DISPATCH_QUEUE_SERIAL);
185 dispatch_set_target_queue( 287 dispatch_set_target_queue(
186 _frameQueue, 288 _frameQueue,
187 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 289 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
188 } 290 }
189 return _frameQueue; 291 return _frameQueue;
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
400 } 502 }
401 503
402 #endif // TARGET_OS_IPHONE 504 #endif // TARGET_OS_IPHONE
403 505
404 #pragma mark - Private 506 #pragma mark - Private
405 507
406 - (BOOL)setupCaptureSession { 508 - (BOOL)setupCaptureSession {
407 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; 509 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
408 #if defined(WEBRTC_IOS) 510 #if defined(WEBRTC_IOS)
409 captureSession.usesApplicationAudioSession = NO; 511 captureSession.usesApplicationAudioSession = NO;
410 #endif 512 #endif
daniela-webrtc 2016/10/06 08:40:04 This is from the rebase
411 // Add the output. 513 // Add the output.
412 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; 514 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
413 if (![captureSession canAddOutput:videoDataOutput]) { 515 if (![captureSession canAddOutput:videoDataOutput]) {
414 RTCLogError(@"Video data output unsupported."); 516 RTCLogError(@"Video data output unsupported.");
415 return NO; 517 return NO;
416 } 518 }
417 [captureSession addOutput:videoDataOutput]; 519 [captureSession addOutput:videoDataOutput];
418 520
419 // Get the front and back cameras. If there isn't a front camera 521 // Get the front and back cameras. If there isn't a front camera
420 // give up. 522 // give up.
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
509 if (!backCameraInput) { 611 if (!backCameraInput) {
510 RTCLogError(@"Failed to create front camera input: %@", 612 RTCLogError(@"Failed to create front camera input: %@",
511 error.localizedDescription); 613 error.localizedDescription);
512 return nil; 614 return nil;
513 } 615 }
514 _backCameraInput = backCameraInput; 616 _backCameraInput = backCameraInput;
515 } 617 }
516 return _backCameraInput; 618 return _backCameraInput;
517 } 619 }
518 620
519 - (void)setMinFrameDuration:(CMTime)minFrameDuration
520 forDevice:(AVCaptureDevice *)device {
521 NSError *error = nil;
522 if (![device lockForConfiguration:&error]) {
523 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription);
524 return;
525 }
526 device.activeVideoMinFrameDuration = minFrameDuration;
527 [device unlockForConfiguration];
528 }
529
530 // Called from capture session queue. 621 // Called from capture session queue.
531 - (void)updateOrientation { 622 - (void)updateOrientation {
532 #if TARGET_OS_IPHONE 623 #if TARGET_OS_IPHONE
533 switch ([UIDevice currentDevice].orientation) { 624 switch ([UIDevice currentDevice].orientation) {
534 case UIDeviceOrientationPortrait: 625 case UIDeviceOrientationPortrait:
535 _rotation = webrtc::kVideoRotation_90; 626 _rotation = webrtc::kVideoRotation_90;
536 break; 627 break;
537 case UIDeviceOrientationPortraitUpsideDown: 628 case UIDeviceOrientationPortraitUpsideDown:
538 _rotation = webrtc::kVideoRotation_270; 629 _rotation = webrtc::kVideoRotation_270;
539 break; 630 break;
(...skipping 24 matching lines...) Expand all
564 newInput = _backCameraInput; 655 newInput = _backCameraInput;
565 } 656 }
566 if (oldInput) { 657 if (oldInput) {
567 // Ok to remove this even if it's not attached. Will be no-op. 658 // Ok to remove this even if it's not attached. Will be no-op.
568 [_captureSession removeInput:oldInput]; 659 [_captureSession removeInput:oldInput];
569 } 660 }
570 if (newInput) { 661 if (newInput) {
571 [_captureSession addInput:newInput]; 662 [_captureSession addInput:newInput];
572 } 663 }
573 [self updateOrientation]; 664 [self updateOrientation];
665 AVCaptureDevice *newDevice = newInput.device;
666 const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
667 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
574 [_captureSession commitConfiguration]; 668 [_captureSession commitConfiguration];
575
576 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFo rmat()->interval);
577 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
578 }]; 669 }];
579 } 670 }
580 671
581 @end 672 @end
582 673
583 namespace webrtc { 674 namespace webrtc {
584 675
585 enum AVFoundationVideoCapturerMessageType : uint32_t { 676 enum AVFoundationVideoCapturerMessageType : uint32_t {
586 kMessageTypeFrame, 677 kMessageTypeFrame,
587 }; 678 };
588 679
589 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { 680 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
590 // Set our supported formats. This matches kAvailablePresets.
591 _capturer = 681 _capturer =
592 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 682 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
593 683
594 std::vector<cricket::VideoFormat> supported_formats; 684 std::set<cricket::VideoFormat> front_camera_video_formats =
595 int framerate = 30; 685 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
596 686
597 #if TARGET_OS_IPHONE 687 std::set<cricket::VideoFormat> back_camera_video_formats =
598 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 688 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
599 set_enable_video_adapter(false); 689
600 framerate = 15; 690 std::vector<cricket::VideoFormat> intersection_video_formats;
691 if (back_camera_video_formats.empty()) {
692 intersection_video_formats.assign(front_camera_video_formats.begin(),
693 front_camera_video_formats.end());
694
695 } else if (front_camera_video_formats.empty()) {
696 intersection_video_formats.assign(back_camera_video_formats.begin(),
697 back_camera_video_formats.end());
698 } else {
699 std::set_intersection(
700 front_camera_video_formats.begin(), front_camera_video_formats.end(),
701 back_camera_video_formats.begin(), back_camera_video_formats.end(),
702 std::back_inserter(intersection_video_formats));
601 } 703 }
602 #endif 704 SetSupportedFormats(intersection_video_formats);
603
604 for (const auto preset : kAvailablePresets) {
605 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
606 const auto format = cricket::VideoFormat(
607 preset.width,
608 preset.height,
609 cricket::VideoFormat::FpsToInterval(framerate),
610 cricket::FOURCC_NV12);
611 supported_formats.push_back(format);
612 }
613 }
614
615 SetSupportedFormats(supported_formats);
616 } 705 }
617 706
618 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { 707 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
619 _capturer = nil; 708 _capturer = nil;
620 } 709 }
621 710
622 cricket::CaptureState AVFoundationVideoCapturer::Start( 711 cricket::CaptureState AVFoundationVideoCapturer::Start(
623 const cricket::VideoFormat& format) { 712 const cricket::VideoFormat& format) {
624 if (!_capturer) { 713 if (!_capturer) {
625 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; 714 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
626 return cricket::CaptureState::CS_FAILED; 715 return cricket::CaptureState::CS_FAILED;
627 } 716 }
628 if (_capturer.isRunning) { 717 if (_capturer.isRunning) {
629 LOG(LS_ERROR) << "The capturer is already running."; 718 LOG(LS_ERROR) << "The capturer is already running.";
630 return cricket::CaptureState::CS_FAILED; 719 return cricket::CaptureState::CS_FAILED;
631 } 720 }
632 721
633 NSString *desiredPreset = GetSessionPresetForVideoFormat(format); 722 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
634 RTC_DCHECK(desiredPreset); 723 AVCaptureSession* session = _capturer.captureSession;
635 724
636 [_capturer.captureSession beginConfiguration]; 725 if (!SetFormatForCaptureDevice(device, session, format)) {
637 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
638 LOG(LS_ERROR) << "Unsupported video format.";
639 [_capturer.captureSession commitConfiguration];
640 return cricket::CaptureState::CS_FAILED; 726 return cricket::CaptureState::CS_FAILED;
641 } 727 }
642 _capturer.captureSession.sessionPreset = desiredPreset;
643 [_capturer.captureSession commitConfiguration];
644 728
645 SetCaptureFormat(&format); 729 SetCaptureFormat(&format);
646 // This isn't super accurate because it takes a while for the AVCaptureSession 730 // This isn't super accurate because it takes a while for the AVCaptureSession
647 // to spin up, and this call returns async. 731 // to spin up, and this call returns async.
648 // TODO(tkchin): make this better. 732 // TODO(tkchin): make this better.
649 [_capturer start]; 733 [_capturer start];
650 SetCaptureState(cricket::CaptureState::CS_RUNNING); 734 SetCaptureState(cricket::CaptureState::CS_RUNNING);
651 735
652 // Adjust the framerate for all capture devices.
653 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
654 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
655 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
656
657 return cricket::CaptureState::CS_STARTING; 736 return cricket::CaptureState::CS_STARTING;
658 } 737 }
659 738
660 void AVFoundationVideoCapturer::Stop() { 739 void AVFoundationVideoCapturer::Stop() {
661 [_capturer stop]; 740 [_capturer stop];
662 SetCaptureFormat(NULL); 741 SetCaptureFormat(NULL);
663 } 742 }
664 743
665 bool AVFoundationVideoCapturer::IsRunning() { 744 bool AVFoundationVideoCapturer::IsRunning() {
666 return _capturer.isRunning; 745 return _capturer.isRunning;
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
746 buffer = rotated_buffer; 825 buffer = rotated_buffer;
747 } 826 }
748 } 827 }
749 828
750 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation, 829 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
751 translated_camera_time_us, 0), 830 translated_camera_time_us, 0),
752 captured_width, captured_height); 831 captured_width, captured_height);
753 } 832 }
754 833
755 } // namespace webrtc 834 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698