Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(176)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2231033002: Add support for more resolutions on iOS/macOS (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Persist device frame rate setting when switching inputs Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #include "avfoundationvideocapturer.h"
12 12
13 #import <AVFoundation/AVFoundation.h> 13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h> 14 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE 15 #if TARGET_OS_IPHONE
16 #import <UIKit/UIKit.h> 16 #import <UIKit/UIKit.h>
17 #endif 17 #endif
18 18
19 #import "RTCDispatcher+Private.h" 19 #import "RTCDispatcher+Private.h"
20 #import "WebRTC/RTCLogging.h" 20 #import "WebRTC/RTCLogging.h"
21 #if TARGET_OS_IPHONE 21 #if TARGET_OS_IPHONE
22 #import "WebRTC/UIDevice+RTCDevice.h" 22 #import "WebRTC/UIDevice+RTCDevice.h"
23 #endif 23 #endif
24 24
25 #include "webrtc/base/bind.h" 25 #include "webrtc/base/bind.h"
26 #include "webrtc/base/checks.h" 26 #include "webrtc/base/checks.h"
27 #include "webrtc/base/thread.h" 27 #include "webrtc/base/thread.h"
28 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 28 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
29 29
30 // TODO(tkchin): support other formats. 30 struct AVCaptureSessionPresetResolution {
31 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; 31 NSString *sessionPreset;
32 static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288; 32 int width;
33 static cricket::VideoFormat const kDefaultFormat = 33 int height;
34 cricket::VideoFormat(640, 34 };
35 480, 35
36 cricket::VideoFormat::FpsToInterval(30), 36 #if TARGET_OS_IPHONE
37 cricket::FOURCC_NV12); 37 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
38 // iPhone4S is too slow to handle 30fps. 38 { AVCaptureSessionPreset352x288, 352, 288},
39 static cricket::VideoFormat const kIPhone4SFormat = 39 { AVCaptureSessionPreset640x480, 640, 480},
40 cricket::VideoFormat(352, 40 { AVCaptureSessionPreset1280x720, 1280, 720},
41 288, 41 { AVCaptureSessionPreset1920x1080, 1920, 1080},
42 cricket::VideoFormat::FpsToInterval(15), 42 };
43 cricket::FOURCC_NV12); 43 #else // macOS
44 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
45 { AVCaptureSessionPreset320x240, 320, 240},
46 { AVCaptureSessionPreset352x288, 352, 288},
47 { AVCaptureSessionPreset640x480, 640, 480},
48 { AVCaptureSessionPreset960x540, 960, 540},
49 { AVCaptureSessionPreset1280x720, 1280, 720},
50 };
51 #endif
52
53 // Mapping from cricket::VideoFormat to AVCaptureSession presets.
54 static NSString *GetSessionPresetForVideoFormat(
55 const cricket::VideoFormat& format) {
56 for (const auto preset : kAvailablePresets) {
57 // Check both orientations
58 if ((format.width == preset.width && format.height == preset.height) ||
59 (format.width == preset.height && format.height == preset.width)) {
60 return preset.sessionPreset;
61 }
62 }
63 // If no matching preset is found, use a default one.
64 return AVCaptureSessionPreset640x480;
65 }
44 66
45 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 67 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
46 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 68 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
47 // because other webrtc objects own cricket::VideoCapturer, which is not 69 // because other webrtc objects own cricket::VideoCapturer, which is not
48 // ref counted. To prevent bad behavior we do not expose this class directly. 70 // ref counted. To prevent bad behavior we do not expose this class directly.
49 @interface RTCAVFoundationVideoCapturerInternal : NSObject 71 @interface RTCAVFoundationVideoCapturerInternal : NSObject
50 <AVCaptureVideoDataOutputSampleBufferDelegate> 72 <AVCaptureVideoDataOutputSampleBufferDelegate>
51 73
52 @property(nonatomic, readonly) AVCaptureSession *captureSession; 74 @property(nonatomic, readonly) AVCaptureSession *captureSession;
53 @property(nonatomic, readonly) dispatch_queue_t frameQueue; 75 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
54 @property(nonatomic, readonly) BOOL canUseBackCamera; 76 @property(nonatomic, readonly) BOOL canUseBackCamera;
55 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 77 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
56 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running. 78 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
57 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. 79 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t.
58 80
59 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 81 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
60 // when we receive frames. This is safe because this object should be owned by 82 // when we receive frames. This is safe because this object should be owned by
61 // it. 83 // it.
62 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; 84 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
85 - (AVCaptureDevice *)getActiveCaptureDevice;
63 86
64 // Starts and stops the capture session asynchronously. We cannot do this 87 // Starts and stops the capture session asynchronously. We cannot do this
65 // synchronously without blocking a WebRTC thread. 88 // synchronously without blocking a WebRTC thread.
66 - (void)start; 89 - (void)start;
67 - (void)stop; 90 - (void)stop;
68 91
69 @end 92 @end
70 93
71 @implementation RTCAVFoundationVideoCapturerInternal { 94 @implementation RTCAVFoundationVideoCapturerInternal {
72 // Keep pointers to inputs for convenience. 95 // Keep pointers to inputs for convenience.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 - (void)dealloc { 157 - (void)dealloc {
135 RTC_DCHECK(!self.hasStarted); 158 RTC_DCHECK(!self.hasStarted);
136 [[NSNotificationCenter defaultCenter] removeObserver:self]; 159 [[NSNotificationCenter defaultCenter] removeObserver:self];
137 _capturer = nullptr; 160 _capturer = nullptr;
138 } 161 }
139 162
140 - (AVCaptureSession *)captureSession { 163 - (AVCaptureSession *)captureSession {
141 return _captureSession; 164 return _captureSession;
142 } 165 }
143 166
167 - (AVCaptureDevice *)getActiveCaptureDevice {
168 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ;
169 }
170
144 - (dispatch_queue_t)frameQueue { 171 - (dispatch_queue_t)frameQueue {
145 if (!_frameQueue) { 172 if (!_frameQueue) {
146 _frameQueue = 173 _frameQueue =
147 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", 174 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
148 DISPATCH_QUEUE_SERIAL); 175 DISPATCH_QUEUE_SERIAL);
149 dispatch_set_target_queue( 176 dispatch_set_target_queue(
150 _frameQueue, 177 _frameQueue,
151 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 178 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
152 } 179 }
153 return _frameQueue; 180 return _frameQueue;
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after
354 #pragma mark - Private 381 #pragma mark - Private
355 382
356 - (BOOL)setupCaptureSession { 383 - (BOOL)setupCaptureSession {
357 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; 384 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
358 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 385 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
359 NSString *version = [[UIDevice currentDevice] systemVersion]; 386 NSString *version = [[UIDevice currentDevice] systemVersion];
360 if ([version integerValue] >= 7) { 387 if ([version integerValue] >= 7) {
361 captureSession.usesApplicationAudioSession = NO; 388 captureSession.usesApplicationAudioSession = NO;
362 } 389 }
363 #endif 390 #endif
364 NSString *preset = kDefaultPreset;
365 #if TARGET_OS_IPHONE
366 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
367 preset = kIPhone4SPreset;
368 }
369 #endif
370 if (![captureSession canSetSessionPreset:preset]) {
371 RTCLogError(@"Session preset unsupported.");
372 return NO;
373 }
374 captureSession.sessionPreset = preset;
375 391
376 // Add the output. 392 // Add the output.
377 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; 393 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
378 if (![captureSession canAddOutput:videoDataOutput]) { 394 if (![captureSession canAddOutput:videoDataOutput]) {
379 RTCLogError(@"Video data output unsupported."); 395 RTCLogError(@"Video data output unsupported.");
380 return NO; 396 return NO;
381 } 397 }
382 [captureSession addOutput:videoDataOutput]; 398 [captureSession addOutput:videoDataOutput];
383 399
384 // Get the front and back cameras. If there isn't a front camera 400 // Get the front and back cameras. If there isn't a front camera
385 // give up. 401 // give up.
386 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; 402 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
387 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; 403 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
388 if (!frontCameraInput) { 404 if (!frontCameraInput) {
389 RTCLogError(@"No front camera for capture session."); 405 RTCLogError(@"No front camera for capture session.");
390 return NO; 406 return NO;
391 } 407 }
392 408
393 // Add the inputs. 409 // Add the inputs.
394 if (![captureSession canAddInput:frontCameraInput] || 410 if (![captureSession canAddInput:frontCameraInput] ||
395 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { 411 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
396 RTCLogError(@"Session does not support capture inputs."); 412 RTCLogError(@"Session does not support capture inputs.");
397 return NO; 413 return NO;
398 } 414 }
399 AVCaptureDeviceInput *input = self.useBackCamera ? 415 AVCaptureDeviceInput *input = self.useBackCamera ?
400 backCameraInput : frontCameraInput; 416 backCameraInput : frontCameraInput;
401 [captureSession addInput:input]; 417 [captureSession addInput:input];
402 #if TARGET_OS_IPHONE 418
403 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
404 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:input.device];
405 }
406 #endif
407 _captureSession = captureSession; 419 _captureSession = captureSession;
408 return YES; 420 return YES;
409 } 421 }
410 422
411 - (AVCaptureVideoDataOutput *)videoDataOutput { 423 - (AVCaptureVideoDataOutput *)videoDataOutput {
412 if (!_videoDataOutput) { 424 if (!_videoDataOutput) {
413 // Make the capturer output NV12. Ideally we want I420 but that's not 425 // Make the capturer output NV12. Ideally we want I420 but that's not
414 // currently supported on iPhone / iPad. 426 // currently supported on iPhone / iPad.
415 AVCaptureVideoDataOutput *videoDataOutput = 427 AVCaptureVideoDataOutput *videoDataOutput =
416 [[AVCaptureVideoDataOutput alloc] init]; 428 [[AVCaptureVideoDataOutput alloc] init];
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
545 } 557 }
546 if (oldInput) { 558 if (oldInput) {
547 // Ok to remove this even if it's not attached. Will be no-op. 559 // Ok to remove this even if it's not attached. Will be no-op.
548 [_captureSession removeInput:oldInput]; 560 [_captureSession removeInput:oldInput];
549 } 561 }
550 if (newInput) { 562 if (newInput) {
551 [_captureSession addInput:newInput]; 563 [_captureSession addInput:newInput];
552 } 564 }
553 [self updateOrientation]; 565 [self updateOrientation];
554 [_captureSession commitConfiguration]; 566 [_captureSession commitConfiguration];
555 #if TARGET_OS_IPHONE 567
556 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 568 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFo rmat()->interval);
557 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:newInput.device]; 569 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
558 }
559 #endif
560 }]; 570 }];
561 } 571 }
562 572
563 @end 573 @end
564 574
565 namespace webrtc { 575 namespace webrtc {
566 576
567 enum AVFoundationVideoCapturerMessageType : uint32_t { 577 enum AVFoundationVideoCapturerMessageType : uint32_t {
568 kMessageTypeFrame, 578 kMessageTypeFrame,
569 }; 579 };
570 580
571 struct AVFoundationFrame { 581 struct AVFoundationFrame {
572 AVFoundationFrame(CVImageBufferRef buffer, int64_t time) 582 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
573 : image_buffer(buffer), capture_time(time) {} 583 : image_buffer(buffer), capture_time(time) {}
574 CVImageBufferRef image_buffer; 584 CVImageBufferRef image_buffer;
575 int64_t capture_time; 585 int64_t capture_time;
576 }; 586 };
577 587
578 AVFoundationVideoCapturer::AVFoundationVideoCapturer() 588 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
579 : _capturer(nil), _startThread(nullptr) { 589 : _capturer(nil), _startThread(nullptr) {
580 // Set our supported formats. This matches preset. 590 // Set our supported formats. This matches kAvailablePresets.
591 _capturer =
592 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
593
581 std::vector<cricket::VideoFormat> supported_formats; 594 std::vector<cricket::VideoFormat> supported_formats;
595 int framerate = 30;
596
582 #if TARGET_OS_IPHONE 597 #if TARGET_OS_IPHONE
583 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 598 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
584 supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat));
585 set_enable_video_adapter(false); 599 set_enable_video_adapter(false);
586 } else { 600 framerate = 15;
587 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
588 } 601 }
589 #else
590 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
591 #endif 602 #endif
603
604 for (const auto preset : kAvailablePresets) {
605 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
606 const auto format = cricket::VideoFormat(
607 preset.width,
608 preset.height,
609 cricket::VideoFormat::FpsToInterval(framerate),
610 cricket::FOURCC_NV12);
611 supported_formats.push_back(format);
612 }
613 }
614
592 SetSupportedFormats(supported_formats); 615 SetSupportedFormats(supported_formats);
593 _capturer =
594 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
595 } 616 }
596 617
597 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { 618 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
598 _capturer = nil; 619 _capturer = nil;
599 } 620 }
600 621
601 cricket::CaptureState AVFoundationVideoCapturer::Start( 622 cricket::CaptureState AVFoundationVideoCapturer::Start(
602 const cricket::VideoFormat& format) { 623 const cricket::VideoFormat& format) {
603 if (!_capturer) { 624 if (!_capturer) {
604 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; 625 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
605 return cricket::CaptureState::CS_FAILED; 626 return cricket::CaptureState::CS_FAILED;
606 } 627 }
607 if (_capturer.isRunning) { 628 if (_capturer.isRunning) {
608 LOG(LS_ERROR) << "The capturer is already running."; 629 LOG(LS_ERROR) << "The capturer is already running.";
609 return cricket::CaptureState::CS_FAILED; 630 return cricket::CaptureState::CS_FAILED;
610 } 631 }
611 if (format != kDefaultFormat && format != kIPhone4SFormat) { 632
612 LOG(LS_ERROR) << "Unsupported format provided."; 633 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
634 RTC_DCHECK(desiredPreset);
635
636 [_capturer.captureSession beginConfiguration];
637 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
638 LOG(LS_ERROR) << "Unsupported video format.";
639 [_capturer.captureSession commitConfiguration];
613 return cricket::CaptureState::CS_FAILED; 640 return cricket::CaptureState::CS_FAILED;
614 } 641 }
642 _capturer.captureSession.sessionPreset = desiredPreset;
643 [_capturer.captureSession commitConfiguration];
615 644
616 // Keep track of which thread capture started on. This is the thread that 645 // Keep track of which thread capture started on. This is the thread that
617 // frames need to be sent to. 646 // frames need to be sent to.
618 RTC_DCHECK(!_startThread); 647 RTC_DCHECK(!_startThread);
619 _startThread = rtc::Thread::Current(); 648 _startThread = rtc::Thread::Current();
620 649
621 SetCaptureFormat(&format); 650 SetCaptureFormat(&format);
622 // This isn't super accurate because it takes a while for the AVCaptureSession 651 // This isn't super accurate because it takes a while for the AVCaptureSession
623 // to spin up, and this call returns async. 652 // to spin up, and this call returns async.
624 // TODO(tkchin): make this better. 653 // TODO(tkchin): make this better.
625 [_capturer start]; 654 [_capturer start];
626 SetCaptureState(cricket::CaptureState::CS_RUNNING); 655 SetCaptureState(cricket::CaptureState::CS_RUNNING);
627 656
657 // Adjust the framerate for all capture devices.
658 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
659 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
660 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
661
628 return cricket::CaptureState::CS_STARTING; 662 return cricket::CaptureState::CS_STARTING;
629 } 663 }
630 664
631 void AVFoundationVideoCapturer::Stop() { 665 void AVFoundationVideoCapturer::Stop() {
632 [_capturer stop]; 666 [_capturer stop];
633 SetCaptureFormat(NULL); 667 SetCaptureFormat(NULL);
634 _startThread = nullptr; 668 _startThread = nullptr;
635 } 669 }
636 670
637 bool AVFoundationVideoCapturer::IsRunning() { 671 bool AVFoundationVideoCapturer::IsRunning() {
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
726 } 760 }
727 761
728 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, 762 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
729 translated_camera_time_us, 0), 763 translated_camera_time_us, 0),
730 captured_width, captured_height); 764 captured_width, captured_height);
731 765
732 CVBufferRelease(image_buffer); 766 CVBufferRelease(image_buffer);
733 } 767 }
734 768
735 } // namespace webrtc 769 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698