Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
| 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
| 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
| 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
| 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 */ | 26 */ |
| 27 | 27 |
| 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" | 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" |
| 29 | 29 |
| 30 #include "webrtc/base/bind.h" | 30 #include "webrtc/base/bind.h" |
| 31 #import "webrtc/base/objc/RTCDispatcher.h" | |
|
tkchin_webrtc
2015/12/07 18:27:07
nit: move this under the #import section
Chuck
2015/12/07 19:34:31
Done.
| |
| 31 | 32 |
| 32 #import <AVFoundation/AVFoundation.h> | 33 #import <AVFoundation/AVFoundation.h> |
| 33 #import <Foundation/Foundation.h> | 34 #import <Foundation/Foundation.h> |
| 34 #import <UIKit/UIKit.h> | 35 #import <UIKit/UIKit.h> |
| 35 | 36 |
| 36 // TODO(tkchin): support other formats. | 37 // TODO(tkchin): support other formats. |
| 37 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; | 38 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; |
| 38 static cricket::VideoFormat const kDefaultFormat = | 39 static cricket::VideoFormat const kDefaultFormat = |
| 39 cricket::VideoFormat(640, | 40 cricket::VideoFormat(640, |
| 40 480, | 41 480, |
| 41 cricket::VideoFormat::FpsToInterval(30), | 42 cricket::VideoFormat::FpsToInterval(30), |
| 42 cricket::FOURCC_NV12); | 43 cricket::FOURCC_NV12); |
| 43 | 44 |
| 44 // This queue is used to start and stop the capturer without blocking the | |
| 45 // calling thread. -[AVCaptureSession startRunning] blocks until the camera is | |
| 46 // running. | |
| 47 static dispatch_queue_t kBackgroundQueue = nil; | |
| 48 | |
| 49 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 45 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
| 50 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 46 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
| 51 // because other webrtc objects own cricket::VideoCapturer, which is not | 47 // because other webrtc objects own cricket::VideoCapturer, which is not |
| 52 // ref counted. To prevent bad behavior we do not expose this class directly. | 48 // ref counted. To prevent bad behavior we do not expose this class directly. |
| 53 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 49 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
| 54 <AVCaptureVideoDataOutputSampleBufferDelegate> | 50 <AVCaptureVideoDataOutputSampleBufferDelegate> |
| 55 | 51 |
| 56 @property(nonatomic, readonly) AVCaptureSession* captureSession; | 52 @property(nonatomic, readonly) AVCaptureSession* captureSession; |
| 57 @property(nonatomic, readonly) BOOL isRunning; | 53 @property(nonatomic, readonly) BOOL isRunning; |
| 58 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 54 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 73 AVCaptureVideoDataOutput* _videoOutput; | 69 AVCaptureVideoDataOutput* _videoOutput; |
| 74 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 70 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
| 75 webrtc::AVFoundationVideoCapturer* _capturer; | 71 webrtc::AVFoundationVideoCapturer* _capturer; |
| 76 BOOL _orientationHasChanged; | 72 BOOL _orientationHasChanged; |
| 77 } | 73 } |
| 78 | 74 |
| 79 @synthesize captureSession = _captureSession; | 75 @synthesize captureSession = _captureSession; |
| 80 @synthesize useBackCamera = _useBackCamera; | 76 @synthesize useBackCamera = _useBackCamera; |
| 81 @synthesize isRunning = _isRunning; | 77 @synthesize isRunning = _isRunning; |
| 82 | 78 |
| 83 + (void)initialize { | |
| 84 static dispatch_once_t onceToken; | |
| 85 dispatch_once(&onceToken, ^{ | |
| 86 kBackgroundQueue = dispatch_queue_create( | |
| 87 "com.google.webrtc.RTCAVFoundationCapturerBackground", | |
| 88 DISPATCH_QUEUE_SERIAL); | |
| 89 }); | |
| 90 } | |
| 91 | |
| 92 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { | 79 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { |
| 93 NSParameterAssert(capturer); | 80 NSParameterAssert(capturer); |
| 94 if (self = [super init]) { | 81 if (self = [super init]) { |
| 95 _capturer = capturer; | 82 _capturer = capturer; |
| 96 if (![self setupCaptureSession]) { | 83 if (![self setupCaptureSession]) { |
| 97 return nil; | 84 return nil; |
| 98 } | 85 } |
| 99 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; | 86 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; |
| 100 [center addObserver:self | 87 [center addObserver:self |
| 101 selector:@selector(deviceOrientationDidChange:) | 88 selector:@selector(deviceOrientationDidChange:) |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 125 [self updateSessionInput]; | 112 [self updateSessionInput]; |
| 126 } | 113 } |
| 127 | 114 |
| 128 - (void)startCaptureAsync { | 115 - (void)startCaptureAsync { |
| 129 if (_isRunning) { | 116 if (_isRunning) { |
| 130 return; | 117 return; |
| 131 } | 118 } |
| 132 _orientationHasChanged = NO; | 119 _orientationHasChanged = NO; |
| 133 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 120 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
| 134 AVCaptureSession* session = _captureSession; | 121 AVCaptureSession* session = _captureSession; |
| 135 dispatch_async(kBackgroundQueue, ^{ | 122 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 123 block:^{ | |
| 136 [session startRunning]; | 124 [session startRunning]; |
| 137 }); | 125 }]; |
| 138 _isRunning = YES; | 126 _isRunning = YES; |
| 139 } | 127 } |
| 140 | 128 |
| 141 - (void)stopCaptureAsync { | 129 - (void)stopCaptureAsync { |
| 142 if (!_isRunning) { | 130 if (!_isRunning) { |
| 143 return; | 131 return; |
| 144 } | 132 } |
| 145 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; | 133 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; |
| 146 AVCaptureSession* session = _captureSession; | 134 AVCaptureSession* session = _captureSession; |
| 147 dispatch_async(kBackgroundQueue, ^{ | 135 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 136 block:^{ | |
| 148 [session stopRunning]; | 137 [session stopRunning]; |
| 149 }); | 138 }]; |
| 150 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 139 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
| 151 _isRunning = NO; | 140 _isRunning = NO; |
| 152 } | 141 } |
| 153 | 142 |
| 154 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 143 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
| 155 | 144 |
| 156 - (void)captureOutput:(AVCaptureOutput*)captureOutput | 145 - (void)captureOutput:(AVCaptureOutput*)captureOutput |
| 157 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 146 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 158 fromConnection:(AVCaptureConnection*)connection { | 147 fromConnection:(AVCaptureConnection*)connection { |
| 159 NSParameterAssert(captureOutput == _videoOutput); | 148 NSParameterAssert(captureOutput == _videoOutput); |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 438 | 427 |
| 439 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( | 428 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( |
| 440 const cricket::CapturedFrame* frame) { | 429 const cricket::CapturedFrame* frame) { |
| 441 RTC_DCHECK(_startThread->IsCurrent()); | 430 RTC_DCHECK(_startThread->IsCurrent()); |
| 442 // This will call a superclass method that will perform the frame conversion | 431 // This will call a superclass method that will perform the frame conversion |
| 443 // to I420. | 432 // to I420. |
| 444 SignalFrameCaptured(this, frame); | 433 SignalFrameCaptured(this, frame); |
| 445 } | 434 } |
| 446 | 435 |
| 447 } // namespace webrtc | 436 } // namespace webrtc |
| OLD | NEW |