Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(465)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2349223002: Replace SessionPresets with AVCaptureDeviceFormats (Closed)
Patch Set: Extracting methods to avoid duplication of code. Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 12 matching lines...) Expand all
23 #endif 23 #endif
24 24
25 #include "libyuv/rotate.h" 25 #include "libyuv/rotate.h"
26 26
27 #include "webrtc/base/bind.h" 27 #include "webrtc/base/bind.h"
28 #include "webrtc/base/checks.h" 28 #include "webrtc/base/checks.h"
29 #include "webrtc/base/thread.h" 29 #include "webrtc/base/thread.h"
30 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 30 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
31 #include "webrtc/common_video/rotation.h" 31 #include "webrtc/common_video/rotation.h"
32 32
33 struct AVCaptureSessionPresetResolution { 33 // TODO(denicija): add support for higher frame rates
34 NSString *sessionPreset; 34 // See http://crbug/webrtc/6355 for more info.
35 int width; 35 static const int kFramesPerSecond = 30;
36 int height;
37 };
38 36
39 #if TARGET_OS_IPHONE 37 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
40 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { 38 return (mediaSubType != kCVPixelFormatType_420YpCbCr8PlanarFullRange &&
daniela-webrtc 2016/09/30 12:01:52 I've actually made mistake. The check should be in
41 { AVCaptureSessionPreset352x288, 352, 288}, 39 mediaSubType != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
42 { AVCaptureSessionPreset640x480, 640, 480}, 40 }
43 { AVCaptureSessionPreset1280x720, 1280, 720},
44 { AVCaptureSessionPreset1920x1080, 1920, 1080},
45 };
46 #else // macOS
47 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
48 { AVCaptureSessionPreset320x240, 320, 240},
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53 };
54 #endif
55 41
56 // Mapping from cricket::VideoFormat to AVCaptureSession presets. 42 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange* range) {
57 static NSString *GetSessionPresetForVideoFormat( 43 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
58 const cricket::VideoFormat& format) { 44 }
59 for (const auto preset : kAvailablePresets) { 45
60 // Check both orientations 46 // Returns filtered array of device formats based on predefined constraints our
61 if ((format.width == preset.width && format.height == preset.height) || 47 // stack imposes.
62 (format.width == preset.height && format.height == preset.width)) { 48 static NSArray<AVCaptureDeviceFormat *>* GetEligibleDeviceFormats(
63 return preset.sessionPreset; 49 const AVCaptureDevice *device,
50 int supportedFps) {
51 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
52 [NSMutableArray array];
53
54 for (AVCaptureDeviceFormat *format in device.formats) {
55 // Filter out subTypes that we currently don't support in the stack
56 FourCharCode mediaSubType =
57 CMFormatDescriptionGetMediaSubType(format.formatDescription);
58 if (!IsMediaSubTypeSupported(mediaSubType)) {
59 continue;
60 }
61
62 // Filter out frame rate ranges that we currently don't support in the stack
63 for (AVFrameRateRange *frameRateRange in format
64 .videoSupportedFrameRateRanges) {
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
66 [eligibleDeviceFormats addObject:format];
67 continue;
68 }
64 } 69 }
65 } 70 }
66 // If no matching preset is found, use a default one. 71
67 return AVCaptureSessionPreset640x480; 72 return [eligibleDeviceFormats copy];
73 }
74
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
77 const AVCaptureDevice *device,
78 const cricket::VideoFormat& videoFormat) {
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
80 NSArray<AVCaptureDeviceFormat *>* eligibleFormats =
81 GetEligibleDeviceFormats(device, videoFormat.framerate());
82
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
84 CMVideoDimensions dimension =
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
86 FourCharCode mediaSubType =
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
88
89 if (videoFormat.width == dimension.width &&
90 videoFormat.height == dimension.height) {
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
92 // This is the preferred format so no need to wait for better option.
93 return deviceFormat;
94 } else {
95 // This is good candidate, but let's wait for something better.
96 desiredDeviceFormat = deviceFormat;
97 }
98 }
99 }
100
101 return desiredDeviceFormat;
102 }
103
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
105 // device.
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
107 AVCaptureDevice *device) {
108 std::set<cricket::VideoFormat> supportedFormats;
109
110 NSArray<AVCaptureDeviceFormat *>* eligibleFormats =
111 GetEligibleDeviceFormats(device, kFramesPerSecond);
112
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
114 CMVideoDimensions dimension =
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
116 cricket::VideoFormat format = cricket::VideoFormat(
117 dimension.width, dimension.height,
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12);
120 supportedFormats.insert(format);
121 }
122
123 return supportedFormats;
124 }
125
126 // Sets device format for the provided capture device. Returns YES/NO depending on success.
127 // TODO(denicija): When this file is split this static method should be recnosid ered.
128 // Perhaps adding a category on AVCaptureDevice would be better.
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice* device,
130 AVCaptureSession* session,
131 const cricket::VideoFormat& format) {
132
133 AVCaptureDeviceFormat* deviceFormat =
134 GetDeviceFormatForVideoFormat(device, format);
135 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
136
137 NSError* error = nil;
138 BOOL success = YES;
139 [session beginConfiguration];
140 if ([device lockForConfiguration:&error]) {
141 @try {
142 device.activeFormat = deviceFormat;
143 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
144 } @catch (NSException* exception) {
145 RTCLogError(
146 @"Exception occured while setting active format!\n User info:%@",
147 exception.userInfo);
148 success = NO;
149 }
150
151 [device unlockForConfiguration];
152 } else {
153 RTCLogError(
154 @"Error occured while locking device %@. Underlying error: %@",
155 device, error.userInfo);
156 success = NO;
157 }
158 [session commitConfiguration];
159
160 return success;
68 } 161 }
69 162
70 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 163 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
71 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 164 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72 // because other webrtc objects own cricket::VideoCapturer, which is not 165 // because other webrtc objects own cricket::VideoCapturer, which is not
73 // ref counted. To prevent bad behavior we do not expose this class directly. 166 // ref counted. To prevent bad behavior we do not expose this class directly.
74 @interface RTCAVFoundationVideoCapturerInternal : NSObject 167 @interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate> 168 <AVCaptureVideoDataOutputSampleBufferDelegate>
76 169
77 @property(nonatomic, readonly) AVCaptureSession *captureSession; 170 @property(nonatomic, readonly) AVCaptureSession *captureSession;
78 @property(nonatomic, readonly) dispatch_queue_t frameQueue; 171 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
79 @property(nonatomic, readonly) BOOL canUseBackCamera; 172 @property(nonatomic, readonly) BOOL canUseBackCamera;
80 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 173 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
81 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running. 174 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
82 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. 175 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
83 176
84 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 177 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85 // when we receive frames. This is safe because this object should be owned by 178 // when we receive frames. This is safe because this object should be owned by
86 // it. 179 // it.
87 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; 180 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
88 - (AVCaptureDevice *)getActiveCaptureDevice; 181 - (AVCaptureDevice *)getActiveCaptureDevice;
89 182
183 - (nonnull AVCaptureDevice *)frontCaptureDevice;
184 - (AVCaptureDevice *)backCaptureDevice;
185
90 // Starts and stops the capture session asynchronously. We cannot do this 186 // Starts and stops the capture session asynchronously. We cannot do this
91 // synchronously without blocking a WebRTC thread. 187 // synchronously without blocking a WebRTC thread.
92 - (void)start; 188 - (void)start;
93 - (void)stop; 189 - (void)stop;
94 190
95 @end 191 @end
96 192
97 @implementation RTCAVFoundationVideoCapturerInternal { 193 @implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience. 194 // Keep pointers to inputs for convenience.
99 AVCaptureDeviceInput *_frontCameraInput; 195 AVCaptureDeviceInput *_frontCameraInput;
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
168 } 264 }
169 265
170 - (AVCaptureSession *)captureSession { 266 - (AVCaptureSession *)captureSession {
171 return _captureSession; 267 return _captureSession;
172 } 268 }
173 269
174 - (AVCaptureDevice *)getActiveCaptureDevice { 270 - (AVCaptureDevice *)getActiveCaptureDevice {
175 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; 271 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
176 } 272 }
177 273
274 - (AVCaptureDevice *)frontCaptureDevice {
275 return _frontCameraInput.device;
276 }
277
278 - (AVCaptureDevice *)backCaptureDevice {
279 return _backCameraInput.device;
280 }
281
178 - (dispatch_queue_t)frameQueue { 282 - (dispatch_queue_t)frameQueue {
179 if (!_frameQueue) { 283 if (!_frameQueue) {
180 _frameQueue = 284 _frameQueue =
181 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 285 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
182 DISPATCH_QUEUE_SERIAL); 286 DISPATCH_QUEUE_SERIAL);
183 dispatch_set_target_queue( 287 dispatch_set_target_queue(
184 _frameQueue, 288 _frameQueue,
185 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 289 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
186 } 290 }
187 return _frameQueue; 291 return _frameQueue;
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
519 if (!backCameraInput) { 623 if (!backCameraInput) {
520 RTCLogError(@"Failed to create front camera input: %@", 624 RTCLogError(@"Failed to create front camera input: %@",
521 error.localizedDescription); 625 error.localizedDescription);
522 return nil; 626 return nil;
523 } 627 }
524 _backCameraInput = backCameraInput; 628 _backCameraInput = backCameraInput;
525 } 629 }
526 return _backCameraInput; 630 return _backCameraInput;
527 } 631 }
528 632
529 - (void)setMinFrameDuration:(CMTime)minFrameDuration
530 forDevice:(AVCaptureDevice *)device {
531 NSError *error = nil;
532 if (![device lockForConfiguration:&error]) {
533 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription);
534 return;
535 }
536 device.activeVideoMinFrameDuration = minFrameDuration;
537 [device unlockForConfiguration];
538 }
539
540 // Called from capture session queue. 633 // Called from capture session queue.
541 - (void)updateOrientation { 634 - (void)updateOrientation {
542 #if TARGET_OS_IPHONE 635 #if TARGET_OS_IPHONE
543 switch ([UIDevice currentDevice].orientation) { 636 switch ([UIDevice currentDevice].orientation) {
544 case UIDeviceOrientationPortrait: 637 case UIDeviceOrientationPortrait:
545 _rotation = webrtc::kVideoRotation_90; 638 _rotation = webrtc::kVideoRotation_90;
546 break; 639 break;
547 case UIDeviceOrientationPortraitUpsideDown: 640 case UIDeviceOrientationPortraitUpsideDown:
548 _rotation = webrtc::kVideoRotation_270; 641 _rotation = webrtc::kVideoRotation_270;
549 break; 642 break;
(...skipping 24 matching lines...) Expand all
574 newInput = _backCameraInput; 667 newInput = _backCameraInput;
575 } 668 }
576 if (oldInput) { 669 if (oldInput) {
577 // Ok to remove this even if it's not attached. Will be no-op. 670 // Ok to remove this even if it's not attached. Will be no-op.
578 [_captureSession removeInput:oldInput]; 671 [_captureSession removeInput:oldInput];
579 } 672 }
580 if (newInput) { 673 if (newInput) {
581 [_captureSession addInput:newInput]; 674 [_captureSession addInput:newInput];
582 } 675 }
583 [self updateOrientation]; 676 [self updateOrientation];
584 [_captureSession commitConfiguration]; 677 AVCaptureDevice *newDevice = newInput.device;
585 678 const cricket::VideoFormat* format = _capturer->GetCaptureFormat();
586 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFo rmat()->interval); 679 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
587 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
588 }]; 680 }];
589 } 681 }
590 682
591 @end 683 @end
592 684
593 namespace webrtc { 685 namespace webrtc {
594 686
595 enum AVFoundationVideoCapturerMessageType : uint32_t { 687 enum AVFoundationVideoCapturerMessageType : uint32_t {
596 kMessageTypeFrame, 688 kMessageTypeFrame,
597 }; 689 };
598 690
599 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { 691 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
600 // Set our supported formats. This matches kAvailablePresets.
601 _capturer = 692 _capturer =
602 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 693 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
603 694
604 std::vector<cricket::VideoFormat> supported_formats; 695 std::set<cricket::VideoFormat> front_camera_video_formats =
605 int framerate = 30; 696 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
606 697
607 #if TARGET_OS_IPHONE 698 std::set<cricket::VideoFormat> back_camera_video_formats =
608 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 699 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
609 set_enable_video_adapter(false);
610 framerate = 15;
611 }
612 #endif
613 700
614 for (const auto preset : kAvailablePresets) { 701 std::vector<cricket::VideoFormat> intersection_video_formats;
615 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) { 702 if (back_camera_video_formats.size() == 0) {
616 const auto format = cricket::VideoFormat( 703 std::copy(front_camera_video_formats.begin(),
617 preset.width, 704 front_camera_video_formats.end(),
618 preset.height, 705 std::back_inserter(intersection_video_formats));
619 cricket::VideoFormat::FpsToInterval(framerate), 706 SetSupportedFormats(intersection_video_formats);
620 cricket::FOURCC_NV12); 707 return;
621 supported_formats.push_back(format);
622 }
623 } 708 }
624 709
625 SetSupportedFormats(supported_formats); 710 if (front_camera_video_formats.size() == 0) {
711 std::copy(back_camera_video_formats.begin(),
712 back_camera_video_formats.end(),
713 std::back_inserter(intersection_video_formats));
714 SetSupportedFormats(intersection_video_formats);
715 return;
716 }
717
718 std::set_intersection(
719 front_camera_video_formats.begin(), front_camera_video_formats.end(),
720 back_camera_video_formats.begin(), back_camera_video_formats.end(),
721 std::back_inserter(intersection_video_formats));
722
723 SetSupportedFormats(intersection_video_formats);
626 } 724 }
627 725
628 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { 726 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
629 _capturer = nil; 727 _capturer = nil;
630 } 728 }
631 729
632 cricket::CaptureState AVFoundationVideoCapturer::Start( 730 cricket::CaptureState AVFoundationVideoCapturer::Start(
633 const cricket::VideoFormat& format) { 731 const cricket::VideoFormat& format) {
634 if (!_capturer) { 732 if (!_capturer) {
635 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; 733 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
636 return cricket::CaptureState::CS_FAILED; 734 return cricket::CaptureState::CS_FAILED;
637 } 735 }
638 if (_capturer.isRunning) { 736 if (_capturer.isRunning) {
639 LOG(LS_ERROR) << "The capturer is already running."; 737 LOG(LS_ERROR) << "The capturer is already running.";
640 return cricket::CaptureState::CS_FAILED; 738 return cricket::CaptureState::CS_FAILED;
641 } 739 }
642 740
643 NSString *desiredPreset = GetSessionPresetForVideoFormat(format); 741 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
644 RTC_DCHECK(desiredPreset); 742 AVCaptureSession* session = _capturer.captureSession;
645 743
646 [_capturer.captureSession beginConfiguration]; 744 if (!SetFormatForCaptureDevice(device, session, format)) {
647 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
648 LOG(LS_ERROR) << "Unsupported video format.";
649 [_capturer.captureSession commitConfiguration];
650 return cricket::CaptureState::CS_FAILED; 745 return cricket::CaptureState::CS_FAILED;
651 } 746 }
652 _capturer.captureSession.sessionPreset = desiredPreset;
653 [_capturer.captureSession commitConfiguration];
654 747
655 SetCaptureFormat(&format); 748 SetCaptureFormat(&format);
656 // This isn't super accurate because it takes a while for the AVCaptureSession 749 // This isn't super accurate because it takes a while for the AVCaptureSession
657 // to spin up, and this call returns async. 750 // to spin up, and this call returns async.
658 // TODO(tkchin): make this better. 751 // TODO(tkchin): make this better.
659 [_capturer start]; 752 [_capturer start];
660 SetCaptureState(cricket::CaptureState::CS_RUNNING); 753 SetCaptureState(cricket::CaptureState::CS_RUNNING);
661 754
662 // Adjust the framerate for all capture devices.
663 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
664 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
665 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
666
667 return cricket::CaptureState::CS_STARTING; 755 return cricket::CaptureState::CS_STARTING;
668 } 756 }
669 757
670 void AVFoundationVideoCapturer::Stop() { 758 void AVFoundationVideoCapturer::Stop() {
671 [_capturer stop]; 759 [_capturer stop];
672 SetCaptureFormat(NULL); 760 SetCaptureFormat(NULL);
673 } 761 }
674 762
675 bool AVFoundationVideoCapturer::IsRunning() { 763 bool AVFoundationVideoCapturer::IsRunning() {
676 return _capturer.isRunning; 764 return _capturer.isRunning;
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
756 buffer = rotated_buffer; 844 buffer = rotated_buffer;
757 } 845 }
758 } 846 }
759 847
760 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation, 848 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
761 translated_camera_time_us, 0), 849 translated_camera_time_us, 0),
762 captured_width, captured_height); 850 captured_width, captured_height);
763 } 851 }
764 852
765 } // namespace webrtc 853 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698