Index: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
index 331db6d9557d4a6d74dd84d2692bc346ae396037..701d0436bd5d6edc5a6aa92e81fde06535c313d5 100644 |
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm |
@@ -27,20 +27,50 @@ |
#include "webrtc/base/thread.h" |
#include "webrtc/common_video/include/corevideo_frame_buffer.h" |
-// TODO(tkchin): support other formats. |
+// mapping from AVCaptureSession presets to cricket::VideoFormat |
+static cricket::VideoFormat GetVideoFormatForSessionPreset( |
+ NSString *preset, int framerate) { |
+ |
+ // Default parameter values |
+ int w = 640; |
+ int h = 480; |
+ |
+ if (preset == AVCaptureSessionPreset1280x720) { |
magjed_webrtc
2016/08/11 12:34:07
I would prefer if this logic was data driven inste
kthelgason
2016/08/11 13:14:29
I like that, That's a way better idea. Thanks!
|
+ w = 1280; |
+ h = 720; |
+ } else if (preset == AVCaptureSessionPreset352x288) { |
+ w = 352; |
+ h = 288; |
+ } |
+ return cricket::VideoFormat(w, h, |
+ cricket::VideoFormat::FpsToInterval(framerate), |
+ cricket::FOURCC_NV12); |
+} |
+ |
static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; |
-static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288; |
-static cricket::VideoFormat const kDefaultFormat = |
- cricket::VideoFormat(640, |
- 480, |
- cricket::VideoFormat::FpsToInterval(30), |
- cricket::FOURCC_NV12); |
// iPhone4S is too slow to handle 30fps. |
static cricket::VideoFormat const kIPhone4SFormat = |
magjed_webrtc
2016/08/11 12:34:07
This variable is unused now so you can remove it.
kthelgason
2016/08/11 13:14:29
Acknowledged.
|
- cricket::VideoFormat(352, |
- 288, |
- cricket::VideoFormat::FpsToInterval(15), |
- cricket::FOURCC_NV12); |
+ GetVideoFormatForSessionPreset(kDefaultPreset, 15); |
+ |
+// mapping from cricket::VideoFormat to AVCaptureSession presets |
+static NSString *GetSessionPresetForVideoFormat( |
+ const cricket::VideoFormat& format) { |
+ |
+ if (format.width == 1280 && format.height == 720) { |
+ return AVCaptureSessionPreset1280x720; |
+ } else if (format.width == 352 && format.height == 288) { |
+ return AVCaptureSessionPreset352x288; |
+ } |
+ // Nothing found, return default preset |
+ return kDefaultPreset; |
+} |
+ |
+static NSString *const kSupportedPresets[] = { |
+ AVCaptureSessionPreset1280x720, |
+ AVCaptureSessionPreset640x480, |
+ AVCaptureSessionPreset352x288 |
+}; |
+ |
// This class used to capture frames using AVFoundation APIs on iOS. It is meant |
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
@@ -577,21 +607,30 @@ struct AVFoundationFrame { |
AVFoundationVideoCapturer::AVFoundationVideoCapturer() |
: _capturer(nil), _startThread(nullptr) { |
- // Set our supported formats. This matches preset. |
+ // Set our supported formats. This matches kSupportedPresets |
std::vector<cricket::VideoFormat> supported_formats; |
+ |
#if TARGET_OS_IPHONE |
if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { |
- supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat)); |
set_enable_video_adapter(false); |
- } else { |
- supported_formats.push_back(cricket::VideoFormat(kDefaultFormat)); |
+ } |
+ |
+ for (auto preset : kSupportedPresets) { |
+ if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { |
magjed_webrtc
2016/08/11 12:34:07
Can we use '_capturer.captureSession canSetSession
kthelgason
2016/08/11 13:14:29
I did that initially but decided against it as I t
|
+ // iPhone 4S does not support 720p |
+ if (preset == AVCaptureSessionPreset1280x720) { |
+ continue; |
+ } |
+ supported_formats.push_back(GetVideoFormatForSessionPreset(preset, 15)); |
+ } else { |
+ supported_formats.push_back(GetVideoFormatForSessionPreset(preset, 30)); |
+ } |
} |
#else |
- supported_formats.push_back(cricket::VideoFormat(kDefaultFormat)); |
+ supported_formats.push_back( |
+ GetVideoFormatForSessionPreset(kDefaultPreset, 30)); |
#endif |
SetSupportedFormats(supported_formats); |
- _capturer = |
- [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
magjed_webrtc
2016/08/11 12:34:07
This looks like critical code we can't just remove
kthelgason
2016/08/11 13:14:29
Ugh, sorry about that. I'd fixed this locally but
|
} |
AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { |
@@ -608,10 +647,16 @@ cricket::CaptureState AVFoundationVideoCapturer::Start( |
LOG(LS_ERROR) << "The capturer is already running."; |
return cricket::CaptureState::CS_FAILED; |
} |
- if (format != kDefaultFormat && format != kIPhone4SFormat) { |
- LOG(LS_ERROR) << "Unsupported format provided."; |
+ |
+ NSString *desiredPreset = GetSessionPresetForVideoFormat(format); |
+ |
+ [_capturer.captureSession beginConfiguration]; |
+ if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) { |
+ LOG(LS_ERROR) << "Unsupported video format."; |
return cricket::CaptureState::CS_FAILED; |
} |
+ _capturer.captureSession.sessionPreset = desiredPreset; |
+ [_capturer.captureSession commitConfiguration]; |
// Keep track of which thread capture started on. This is the thread that |
// frames need to be sent to. |
@@ -619,6 +664,8 @@ cricket::CaptureState AVFoundationVideoCapturer::Start( |
_startThread = rtc::Thread::Current(); |
SetCaptureFormat(&format); |
+ |
+ |
// This isn't super accurate because it takes a while for the AVCaptureSession |
// to spin up, and this call returns async. |
// TODO(tkchin): make this better. |