Index: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
index 331db6d9557d4a6d74dd84d2692bc346ae396037..8dc6e99b1a375d861c1bb232d59482770edc7560 100644 |
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
@@ -27,20 +27,47 @@ |
#include "webrtc/base/thread.h" |
#include "webrtc/common_video/include/corevideo_frame_buffer.h" |
-// TODO(tkchin): support other formats. |
-static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; |
-static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288; |
-static cricket::VideoFormat const kDefaultFormat = |
- cricket::VideoFormat(640, |
- 480, |
- cricket::VideoFormat::FpsToInterval(30), |
- cricket::FOURCC_NV12); |
-// iPhone4S is too slow to handle 30fps. |
-static cricket::VideoFormat const kIPhone4SFormat = |
- cricket::VideoFormat(352, |
- 288, |
- cricket::VideoFormat::FpsToInterval(15), |
- cricket::FOURCC_NV12); |
+struct AVCaptureSessionPresetResolution { |
+ NSString *sessionPreset; |
+ int width; |
+ int height; |
+ cricket::VideoFormat getVideoFormat(int framerate) const { |
magjed_webrtc
2016/08/15 10:10:23
Structs are not allowed to have methods providing
|
+ return cricket::VideoFormat(height, width, |
+ cricket::VideoFormat::FpsToInterval(framerate), |
+ cricket::FOURCC_NV12); |
+ } |
+}; |
+ |
+#if TARGET_OS_IPHONE |
+static const AVCaptureSessionPresetResolution kAvailablePresets[] = { |
+ { AVCaptureSessionPreset352x288, 352, 288}, |
+ { AVCaptureSessionPreset640x480, 640, 480}, |
+ { AVCaptureSessionPreset1280x720, 1280, 720}, |
+ { AVCaptureSessionPreset1920x1080, 1920, 1080}, |
+}; |
+#else // macOS |
+static const AVCaptureSessionPresetResolution kAvailablePresets[] = { |
+ { AVCaptureSessionPreset320x240, 320, 240}, |
+ { AVCaptureSessionPreset352x288, 352, 288}, |
+ { AVCaptureSessionPreset640x480, 640, 480}, |
+ { AVCaptureSessionPreset960x540, 960, 540}, |
+ { AVCaptureSessionPreset1280x720, 1280, 720}, |
+}; |
+#endif |
+ |
+// mapping from cricket::VideoFormat to AVCaptureSession presets |
magjed_webrtc
2016/08/15 10:10:23
Super-nit: comments should have proper capitalizat
|
+static NSString *GetSessionPresetForVideoFormat( |
+ const cricket::VideoFormat& format) { |
+ for (const auto preset : kAvailablePresets) { |
+ // Check both orientations |
+ if ((format.width == preset.width && format.height == preset.height) || |
+ (format.width == preset.height && format.height == preset.width)) { |
+ return preset.sessionPreset; |
+ } |
+ } |
+ // Nothing found, use default |
+ return AVCaptureSessionPreset640x480; |
+} |
// This class used to capture frames using AVFoundation APIs on iOS. It is meant |
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
@@ -361,17 +388,6 @@ static cricket::VideoFormat const kIPhone4SFormat = |
captureSession.usesApplicationAudioSession = NO; |
} |
#endif |
- NSString *preset = kDefaultPreset; |
-#if TARGET_OS_IPHONE |
- if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { |
- preset = kIPhone4SPreset; |
- } |
-#endif |
- if (![captureSession canSetSessionPreset:preset]) { |
- RTCLogError(@"Session preset unsupported."); |
- return NO; |
- } |
- captureSession.sessionPreset = preset; |
kthelgason
2016/08/15 08:43:37
I don't see why we set the sessionPreset here, and
magjed_webrtc
2016/08/15 10:10:23
sessionPreset used to be set only from here, so yo
|
// Add the output. |
AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; |
@@ -577,21 +593,27 @@ struct AVFoundationFrame { |
AVFoundationVideoCapturer::AVFoundationVideoCapturer() |
: _capturer(nil), _startThread(nullptr) { |
- // Set our supported formats. This matches preset. |
+ // Set our supported formats. This matches kAvailablePresets |
+ _capturer = |
+ [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
+ |
std::vector<cricket::VideoFormat> supported_formats; |
+ int framerate = 30; |
+ |
#if TARGET_OS_IPHONE |
if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { |
- supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat)); |
set_enable_video_adapter(false); |
- } else { |
- supported_formats.push_back(cricket::VideoFormat(kDefaultFormat)); |
+ framerate = 15; |
} |
-#else |
- supported_formats.push_back(cricket::VideoFormat(kDefaultFormat)); |
#endif |
+ |
+ for (const auto preset : kAvailablePresets) { |
+ if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) { |
+ supported_formats.push_back(preset.getVideoFormat(framerate)); |
+ } |
+ } |
+ |
SetSupportedFormats(supported_formats); |
- _capturer = |
- [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
} |
AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { |
@@ -608,10 +630,16 @@ cricket::CaptureState AVFoundationVideoCapturer::Start( |
LOG(LS_ERROR) << "The capturer is already running."; |
return cricket::CaptureState::CS_FAILED; |
} |
- if (format != kDefaultFormat && format != kIPhone4SFormat) { |
- LOG(LS_ERROR) << "Unsupported format provided."; |
+ |
+ NSString *desiredPreset = GetSessionPresetForVideoFormat(format); |
+ |
+ [_capturer.captureSession beginConfiguration]; |
+ if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) { |
+ LOG(LS_ERROR) << "Unsupported video format."; |
return cricket::CaptureState::CS_FAILED; |
} |
+ _capturer.captureSession.sessionPreset = desiredPreset; |
+ [_capturer.captureSession commitConfiguration]; |
// Keep track of which thread capture started on. This is the thread that |
// frames need to be sent to. |