Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(284)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2231033002: Add support for more resolutions on iOS/macOS (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Set capture device framerate from VideoFormat Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #include "avfoundationvideocapturer.h"
12 12
13 #import <AVFoundation/AVFoundation.h> 13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h> 14 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE 15 #if TARGET_OS_IPHONE
16 #import <UIKit/UIKit.h> 16 #import <UIKit/UIKit.h>
17 #endif 17 #endif
18 18
19 #import "RTCDispatcher+Private.h" 19 #import "RTCDispatcher+Private.h"
20 #import "WebRTC/RTCLogging.h" 20 #import "WebRTC/RTCLogging.h"
21 #if TARGET_OS_IPHONE 21 #if TARGET_OS_IPHONE
22 #import "WebRTC/UIDevice+RTCDevice.h" 22 #import "WebRTC/UIDevice+RTCDevice.h"
23 #endif 23 #endif
24 24
25 #include "webrtc/base/bind.h" 25 #include "webrtc/base/bind.h"
26 #include "webrtc/base/checks.h" 26 #include "webrtc/base/checks.h"
27 #include "webrtc/base/thread.h" 27 #include "webrtc/base/thread.h"
28 #include "webrtc/common_video/include/corevideo_frame_buffer.h" 28 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
29 29
30 // TODO(tkchin): support other formats. 30 struct AVCaptureSessionPresetResolution {
31 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; 31 NSString *sessionPreset;
32 static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288; 32 int width;
33 static cricket::VideoFormat const kDefaultFormat = 33 int height;
34 cricket::VideoFormat(640, 34 };
35 480, 35
36 cricket::VideoFormat::FpsToInterval(30), 36 #if TARGET_OS_IPHONE
37 cricket::FOURCC_NV12); 37 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
38 // iPhone4S is too slow to handle 30fps. 38 { AVCaptureSessionPreset352x288, 352, 288},
39 static cricket::VideoFormat const kIPhone4SFormat = 39 { AVCaptureSessionPreset640x480, 640, 480},
40 cricket::VideoFormat(352, 40 { AVCaptureSessionPreset1280x720, 1280, 720},
41 288, 41 { AVCaptureSessionPreset1920x1080, 1920, 1080},
42 cricket::VideoFormat::FpsToInterval(15), 42 };
43 cricket::FOURCC_NV12); 43 #else // macOS
44 static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
45 { AVCaptureSessionPreset320x240, 320, 240},
46 { AVCaptureSessionPreset352x288, 352, 288},
47 { AVCaptureSessionPreset640x480, 640, 480},
48 { AVCaptureSessionPreset960x540, 960, 540},
49 { AVCaptureSessionPreset1280x720, 1280, 720},
50 };
51 #endif
52
53 // Mapping from cricket::VideoFormat to AVCaptureSession presets.
54 static NSString *GetSessionPresetForVideoFormat(
55 const cricket::VideoFormat& format) {
56 for (const auto preset : kAvailablePresets) {
57 // Check both orientations
58 if ((format.width == preset.width && format.height == preset.height) ||
59 (format.width == preset.height && format.height == preset.width)) {
60 return preset.sessionPreset;
61 }
62 }
63 // If no matching preset is found, use a default one.
64 return AVCaptureSessionPreset640x480;
65 }
44 66
45 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 67 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
46 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 68 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
47 // because other webrtc objects own cricket::VideoCapturer, which is not 69 // because other webrtc objects own cricket::VideoCapturer, which is not
48 // ref counted. To prevent bad behavior we do not expose this class directly. 70 // ref counted. To prevent bad behavior we do not expose this class directly.
49 @interface RTCAVFoundationVideoCapturerInternal : NSObject 71 @interface RTCAVFoundationVideoCapturerInternal : NSObject
50 <AVCaptureVideoDataOutputSampleBufferDelegate> 72 <AVCaptureVideoDataOutputSampleBufferDelegate>
51 73
52 @property(nonatomic, readonly) AVCaptureSession *captureSession; 74 @property(nonatomic, readonly) AVCaptureSession *captureSession;
53 @property(nonatomic, readonly) dispatch_queue_t frameQueue; 75 @property(nonatomic, readonly) dispatch_queue_t frameQueue;
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
354 #pragma mark - Private 376 #pragma mark - Private
355 377
356 - (BOOL)setupCaptureSession { 378 - (BOOL)setupCaptureSession {
357 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; 379 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
358 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 380 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
359 NSString *version = [[UIDevice currentDevice] systemVersion]; 381 NSString *version = [[UIDevice currentDevice] systemVersion];
360 if ([version integerValue] >= 7) { 382 if ([version integerValue] >= 7) {
361 captureSession.usesApplicationAudioSession = NO; 383 captureSession.usesApplicationAudioSession = NO;
362 } 384 }
363 #endif 385 #endif
364 NSString *preset = kDefaultPreset;
365 #if TARGET_OS_IPHONE
366 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
367 preset = kIPhone4SPreset;
368 }
369 #endif
370 if (![captureSession canSetSessionPreset:preset]) {
371 RTCLogError(@"Session preset unsupported.");
372 return NO;
373 }
374 captureSession.sessionPreset = preset;
375 386
376 // Add the output. 387 // Add the output.
377 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; 388 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
378 if (![captureSession canAddOutput:videoDataOutput]) { 389 if (![captureSession canAddOutput:videoDataOutput]) {
379 RTCLogError(@"Video data output unsupported."); 390 RTCLogError(@"Video data output unsupported.");
380 return NO; 391 return NO;
381 } 392 }
382 [captureSession addOutput:videoDataOutput]; 393 [captureSession addOutput:videoDataOutput];
383 394
384 // Get the front and back cameras. If there isn't a front camera 395 // Get the front and back cameras. If there isn't a front camera
385 // give up. 396 // give up.
386 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; 397 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
387 AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; 398 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
388 if (!frontCameraInput) { 399 if (!frontCameraInput) {
389 RTCLogError(@"No front camera for capture session."); 400 RTCLogError(@"No front camera for capture session.");
390 return NO; 401 return NO;
391 } 402 }
392 403
393 // Add the inputs. 404 // Add the inputs.
394 if (![captureSession canAddInput:frontCameraInput] || 405 if (![captureSession canAddInput:frontCameraInput] ||
395 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { 406 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
396 RTCLogError(@"Session does not support capture inputs."); 407 RTCLogError(@"Session does not support capture inputs.");
397 return NO; 408 return NO;
398 } 409 }
399 AVCaptureDeviceInput *input = self.useBackCamera ? 410 AVCaptureDeviceInput *input = self.useBackCamera ?
400 backCameraInput : frontCameraInput; 411 backCameraInput : frontCameraInput;
401 [captureSession addInput:input]; 412 [captureSession addInput:input];
402 #if TARGET_OS_IPHONE 413
403 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
404 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:input.device];
405 }
406 #endif
407 _captureSession = captureSession; 414 _captureSession = captureSession;
408 return YES; 415 return YES;
409 } 416 }
410 417
411 - (AVCaptureVideoDataOutput *)videoDataOutput { 418 - (AVCaptureVideoDataOutput *)videoDataOutput {
412 if (!_videoDataOutput) { 419 if (!_videoDataOutput) {
413 // Make the capturer output NV12. Ideally we want I420 but that's not 420 // Make the capturer output NV12. Ideally we want I420 but that's not
414 // currently supported on iPhone / iPad. 421 // currently supported on iPhone / iPad.
415 AVCaptureVideoDataOutput *videoDataOutput = 422 AVCaptureVideoDataOutput *videoDataOutput =
416 [[AVCaptureVideoDataOutput alloc] init]; 423 [[AVCaptureVideoDataOutput alloc] init];
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
479 if (!backCameraInput) { 486 if (!backCameraInput) {
480 RTCLogError(@"Failed to create front camera input: %@", 487 RTCLogError(@"Failed to create front camera input: %@",
481 error.localizedDescription); 488 error.localizedDescription);
482 return nil; 489 return nil;
483 } 490 }
484 _backCameraInput = backCameraInput; 491 _backCameraInput = backCameraInput;
485 } 492 }
486 return _backCameraInput; 493 return _backCameraInput;
487 } 494 }
488 495
489 - (void)setMinFrameDuration:(CMTime)minFrameDuration 496 - (void)setMinFrameDuration:(CMTime)minFrameDuration {
490 forDevice:(AVCaptureDevice *)device {
491 NSError *error = nil; 497 NSError *error = nil;
492 if (![device lockForConfiguration:&error]) { 498 for (AVCaptureDevice *device in
493 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription); 499 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
tkchin_webrtc 2016/08/18 15:17:24 Seems odd to do this for every capture device. Cou
494 return; 500 if (![device lockForConfiguration:&error]) {
501 RTCLogError(@"Failed to lock device for configuration. Error: %@",
502 error.localizedDescription);
503 return;
magjed_webrtc 2016/08/18 09:27:37 nit: continue might be more appropriate than retur
504 }
505 device.activeVideoMinFrameDuration = minFrameDuration;
506 [device unlockForConfiguration];
495 } 507 }
496 device.activeVideoMinFrameDuration = minFrameDuration;
497 [device unlockForConfiguration];
498 } 508 }
499 509
500 // Called from capture session queue. 510 // Called from capture session queue.
501 - (void)updateOrientation { 511 - (void)updateOrientation {
502 AVCaptureConnection *connection = 512 AVCaptureConnection *connection =
503 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; 513 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
504 if (!connection.supportsVideoOrientation) { 514 if (!connection.supportsVideoOrientation) {
505 // TODO(tkchin): set rotation bit on frames. 515 // TODO(tkchin): set rotation bit on frames.
506 return; 516 return;
507 } 517 }
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
545 } 555 }
546 if (oldInput) { 556 if (oldInput) {
547 // Ok to remove this even if it's not attached. Will be no-op. 557 // Ok to remove this even if it's not attached. Will be no-op.
548 [_captureSession removeInput:oldInput]; 558 [_captureSession removeInput:oldInput];
549 } 559 }
550 if (newInput) { 560 if (newInput) {
551 [_captureSession addInput:newInput]; 561 [_captureSession addInput:newInput];
552 } 562 }
553 [self updateOrientation]; 563 [self updateOrientation];
554 [_captureSession commitConfiguration]; 564 [_captureSession commitConfiguration];
555 #if TARGET_OS_IPHONE
556 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
557 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:newInput.device];
558 }
559 #endif
560 }]; 565 }];
561 } 566 }
562 567
563 @end 568 @end
564 569
565 namespace webrtc { 570 namespace webrtc {
566 571
567 enum AVFoundationVideoCapturerMessageType : uint32_t { 572 enum AVFoundationVideoCapturerMessageType : uint32_t {
568 kMessageTypeFrame, 573 kMessageTypeFrame,
569 }; 574 };
570 575
571 struct AVFoundationFrame { 576 struct AVFoundationFrame {
572 AVFoundationFrame(CVImageBufferRef buffer, int64_t time) 577 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
573 : image_buffer(buffer), capture_time(time) {} 578 : image_buffer(buffer), capture_time(time) {}
574 CVImageBufferRef image_buffer; 579 CVImageBufferRef image_buffer;
575 int64_t capture_time; 580 int64_t capture_time;
576 }; 581 };
577 582
578 AVFoundationVideoCapturer::AVFoundationVideoCapturer() 583 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
579 : _capturer(nil), _startThread(nullptr) { 584 : _capturer(nil), _startThread(nullptr) {
580 // Set our supported formats. This matches preset. 585 // Set our supported formats. This matches kAvailablePresets.
586 _capturer =
587 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
588
581 std::vector<cricket::VideoFormat> supported_formats; 589 std::vector<cricket::VideoFormat> supported_formats;
590 int framerate = 30;
591
582 #if TARGET_OS_IPHONE 592 #if TARGET_OS_IPHONE
583 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) { 593 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
584 supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat));
585 set_enable_video_adapter(false); 594 set_enable_video_adapter(false);
586 } else { 595 framerate = 15;
587 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
588 } 596 }
589 #else
590 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
591 #endif 597 #endif
598
599 for (const auto preset : kAvailablePresets) {
600 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
601 const auto format = cricket::VideoFormat(
602 preset.width,
603 preset.height,
604 cricket::VideoFormat::FpsToInterval(framerate),
605 cricket::FOURCC_NV12);
606 supported_formats.push_back(format);
607 }
608 }
609
592 SetSupportedFormats(supported_formats); 610 SetSupportedFormats(supported_formats);
593 _capturer =
594 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
595 } 611 }
596 612
597 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { 613 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
598 _capturer = nil; 614 _capturer = nil;
599 } 615 }
600 616
601 cricket::CaptureState AVFoundationVideoCapturer::Start( 617 cricket::CaptureState AVFoundationVideoCapturer::Start(
602 const cricket::VideoFormat& format) { 618 const cricket::VideoFormat& format) {
603 if (!_capturer) { 619 if (!_capturer) {
604 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; 620 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
605 return cricket::CaptureState::CS_FAILED; 621 return cricket::CaptureState::CS_FAILED;
606 } 622 }
607 if (_capturer.isRunning) { 623 if (_capturer.isRunning) {
608 LOG(LS_ERROR) << "The capturer is already running."; 624 LOG(LS_ERROR) << "The capturer is already running.";
609 return cricket::CaptureState::CS_FAILED; 625 return cricket::CaptureState::CS_FAILED;
610 } 626 }
611 if (format != kDefaultFormat && format != kIPhone4SFormat) { 627
612 LOG(LS_ERROR) << "Unsupported format provided."; 628 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
629 RTC_DCHECK(desiredPreset);
630
631 [_capturer.captureSession beginConfiguration];
632 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
633 LOG(LS_ERROR) << "Unsupported video format.";
634 [_capturer.captureSession commitConfiguration];
613 return cricket::CaptureState::CS_FAILED; 635 return cricket::CaptureState::CS_FAILED;
614 } 636 }
637 _capturer.captureSession.sessionPreset = desiredPreset;
638 [_capturer.captureSession commitConfiguration];
615 639
616 // Keep track of which thread capture started on. This is the thread that 640 // Keep track of which thread capture started on. This is the thread that
617 // frames need to be sent to. 641 // frames need to be sent to.
618 RTC_DCHECK(!_startThread); 642 RTC_DCHECK(!_startThread);
619 _startThread = rtc::Thread::Current(); 643 _startThread = rtc::Thread::Current();
620 644
621 SetCaptureFormat(&format); 645 SetCaptureFormat(&format);
622 // This isn't super accurate because it takes a while for the AVCaptureSession 646 // This isn't super accurate because it takes a while for the AVCaptureSession
623 // to spin up, and this call returns async. 647 // to spin up, and this call returns async.
624 // TODO(tkchin): make this better. 648 // TODO(tkchin): make this better.
625 [_capturer start]; 649 [_capturer start];
626 SetCaptureState(cricket::CaptureState::CS_RUNNING); 650 SetCaptureState(cricket::CaptureState::CS_RUNNING);
627 651
652 // Adjust the framerate for all capture devices.
653 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
magjed_webrtc 2016/08/18 09:27:37 nit: use int instead of auto here. We typically on
654 [_capturer setMinFrameDuration:CMTimeMake(1, fps)];
655
628 return cricket::CaptureState::CS_STARTING; 656 return cricket::CaptureState::CS_STARTING;
629 } 657 }
630 658
631 void AVFoundationVideoCapturer::Stop() { 659 void AVFoundationVideoCapturer::Stop() {
632 [_capturer stop]; 660 [_capturer stop];
633 SetCaptureFormat(NULL); 661 SetCaptureFormat(NULL);
634 _startThread = nullptr; 662 _startThread = nullptr;
635 } 663 }
636 664
637 bool AVFoundationVideoCapturer::IsRunning() { 665 bool AVFoundationVideoCapturer::IsRunning() {
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
726 } 754 }
727 755
728 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, 756 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
729 translated_camera_time_us, 0), 757 translated_camera_time_us, 0),
730 captured_width, captured_height); 758 captured_width, captured_height);
731 759
732 CVBufferRelease(image_buffer); 760 CVBufferRelease(image_buffer);
733 } 761 }
734 762
735 } // namespace webrtc 763 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698