Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(729)

Side by Side Diff: talk/app/webrtc/objc/avfoundationvideocapturer.mm

Issue 1838933004: Improve iOS frame capture threading. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Fix * Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 10 matching lines...) Expand all
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */ 26 */
27 27
28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
29 29
30 #include "webrtc/base/bind.h" 30 #include "webrtc/base/bind.h"
31 #include "webrtc/base/checks.h"
31 #include "webrtc/base/thread.h" 32 #include "webrtc/base/thread.h"
32 33
33 #import <AVFoundation/AVFoundation.h> 34 #import <AVFoundation/AVFoundation.h>
34 #import <Foundation/Foundation.h> 35 #import <Foundation/Foundation.h>
35 #import <UIKit/UIKit.h> 36 #import <UIKit/UIKit.h>
36 37
37 #import "webrtc/base/objc/RTCDispatcher.h" 38 #import "webrtc/base/objc/RTCDispatcher+Private.h"
38 #import "webrtc/base/objc/RTCLogging.h" 39 #import "webrtc/base/objc/RTCLogging.h"
39 40
40 // TODO(tkchin): support other formats. 41 // TODO(tkchin): support other formats.
41 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; 42 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
42 static cricket::VideoFormat const kDefaultFormat = 43 static cricket::VideoFormat const kDefaultFormat =
43 cricket::VideoFormat(640, 44 cricket::VideoFormat(640,
44 480, 45 480,
45 cricket::VideoFormat::FpsToInterval(30), 46 cricket::VideoFormat::FpsToInterval(30),
46 cricket::FOURCC_NV12); 47 cricket::FOURCC_NV12);
47 48
48 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 49 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
49 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 50 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
50 // because other webrtc objects own cricket::VideoCapturer, which is not 51 // because other webrtc objects own cricket::VideoCapturer, which is not
51 // ref counted. To prevent bad behavior we do not expose this class directly. 52 // ref counted. To prevent bad behavior we do not expose this class directly.
52 @interface RTCAVFoundationVideoCapturerInternal : NSObject 53 @interface RTCAVFoundationVideoCapturerInternal : NSObject
53 <AVCaptureVideoDataOutputSampleBufferDelegate> 54 <AVCaptureVideoDataOutputSampleBufferDelegate>
54 55
55 @property(nonatomic, readonly) AVCaptureSession* captureSession; 56 @property(nonatomic, readonly) AVCaptureSession *captureSession;
56 @property(nonatomic, readonly) BOOL isRunning; 57 @property(nonatomic, readonly) BOOL isRunning;
57 @property(nonatomic, readonly) BOOL canUseBackCamera; 58 @property(nonatomic, readonly) BOOL canUseBackCamera;
58 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 59 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
59 60
60 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 61 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
61 // when we receive frames. This is safe because this object should be owned by 62 // when we receive frames. This is safe because this object should be owned by
62 // it. 63 // it.
63 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer; 64 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
64 - (void)startCaptureAsync; 65
65 - (void)stopCaptureAsync; 66 // Starts and stops the capture session asynchronously. We cannot do this
67 // synchronously without blocking a WebRTC thread.
68 - (void)start;
69 - (void)stop;
66 70
67 @end 71 @end
68 72
69 @implementation RTCAVFoundationVideoCapturerInternal { 73 @implementation RTCAVFoundationVideoCapturerInternal {
70 // Keep pointers to inputs for convenience. 74 // Keep pointers to inputs for convenience.
71 AVCaptureDeviceInput* _frontDeviceInput; 75 AVCaptureDeviceInput *_frontCameraInput;
72 AVCaptureDeviceInput* _backDeviceInput; 76 AVCaptureDeviceInput *_backCameraInput;
73 AVCaptureVideoDataOutput* _videoOutput; 77 AVCaptureVideoDataOutput *_videoDataOutput;
74 // The cricket::VideoCapturer that owns this class. Should never be NULL. 78 // The cricket::VideoCapturer that owns this class. Should never be NULL.
75 webrtc::AVFoundationVideoCapturer* _capturer; 79 webrtc::AVFoundationVideoCapturer *_capturer;
76 BOOL _orientationHasChanged; 80 BOOL _orientationHasChanged;
77 } 81 }
78 82
79 @synthesize captureSession = _captureSession; 83 @synthesize captureSession = _captureSession;
84 @synthesize isRunning = _isRunning;
80 @synthesize useBackCamera = _useBackCamera; 85 @synthesize useBackCamera = _useBackCamera;
81 @synthesize isRunning = _isRunning;
82 86
83 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { 87 // This is called from the thread that creates the video source, which is likely
84 NSParameterAssert(capturer); 88 // the main thread.
89 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
90 RTC_DCHECK(capturer);
85 if (self = [super init]) { 91 if (self = [super init]) {
86 _capturer = capturer; 92 _capturer = capturer;
93 // Create the capture session and all relevant inputs and outputs. We need
94 // to do this in init because the application may want the capture session
95 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
96 // created here are retained until dealloc and never recreated.
87 if (![self setupCaptureSession]) { 97 if (![self setupCaptureSession]) {
88 return nil; 98 return nil;
89 } 99 }
90 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; 100 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
91 [center addObserver:self 101 [center addObserver:self
92 selector:@selector(deviceOrientationDidChange:) 102 selector:@selector(deviceOrientationDidChange:)
93 name:UIDeviceOrientationDidChangeNotification 103 name:UIDeviceOrientationDidChangeNotification
94 object:nil]; 104 object:nil];
95 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification 105 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
96 object:nil 106 object:nil
97 queue:nil 107 queue:nil
98 usingBlock:^(NSNotification* notification) { 108 usingBlock:^(NSNotification *notification) {
99 NSLog(@"Capture session error: %@", notification.userInfo); 109 RTCLogError(@"Capture session error: %@", notification.userInfo);
100 }]; 110 }];
101 } 111 }
102 return self; 112 return self;
103 } 113 }
104 114
105 - (void)dealloc { 115 - (void)dealloc {
106 [self stopCaptureAsync]; 116 RTC_DCHECK(!_isRunning);
107 [[NSNotificationCenter defaultCenter] removeObserver:self]; 117 [[NSNotificationCenter defaultCenter] removeObserver:self];
108 _capturer = nullptr; 118 _capturer = nullptr;
109 } 119 }
110 120
121 - (AVCaptureSession *)captureSession {
122 return _captureSession;
123 }
124
125 // Called from any thread (likely main thread).
111 - (BOOL)canUseBackCamera { 126 - (BOOL)canUseBackCamera {
112 return _backDeviceInput != nil; 127 return _backCameraInput != nil;
113 } 128 }
114 129
130 // Called from any thread (likely main thread).
131 - (BOOL)useBackCamera {
132 @synchronized(self) {
133 return _useBackCamera;
134 }
135 }
136
137 // Called from any thread (likely main thread).
115 - (void)setUseBackCamera:(BOOL)useBackCamera { 138 - (void)setUseBackCamera:(BOOL)useBackCamera {
116 if (_useBackCamera == useBackCamera) {
117 return;
118 }
119 if (!self.canUseBackCamera) { 139 if (!self.canUseBackCamera) {
120 RTCLog(@"No rear-facing camera exists or it cannot be used;" 140 if (useBackCamera) {
121 "not switching."); 141 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
122 return; 142 "not switching.");
123 } 143 }
124 _useBackCamera = useBackCamera; 144 return;
125 [self updateSessionInput]; 145 }
126 } 146 @synchronized(self) {
127 147 if (_useBackCamera == useBackCamera) {
128 - (void)startCaptureAsync { 148 return;
149 }
150 _useBackCamera = useBackCamera;
151 [self updateSessionInputForUseBackCamera:useBackCamera];
152 }
153 }
154
155 // Called from WebRTC thread.
156 - (void)start {
129 if (_isRunning) { 157 if (_isRunning) {
130 return; 158 return;
131 } 159 }
132 _orientationHasChanged = NO; 160 _isRunning = YES;
133 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
134 AVCaptureSession* session = _captureSession;
135 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 161 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
136 block:^{ 162 block:^{
137 [session startRunning]; 163 _orientationHasChanged = NO;
164 [self updateOrientation];
165 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
166 AVCaptureSession *captureSession = self.captureSession;
167 [captureSession startRunning];
138 }]; 168 }];
139 _isRunning = YES; 169 }
140 } 170
141 171 // Called from same thread as start.
142 - (void)stopCaptureAsync { 172 - (void)stop {
143 if (!_isRunning) { 173 if (!_isRunning) {
144 return; 174 return;
145 } 175 }
146 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; 176 _isRunning = NO;
147 AVCaptureSession* session = _captureSession;
148 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 177 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
149 block:^{ 178 block:^{
150 [session stopRunning]; 179 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
180 [_captureSession stopRunning];
181 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
151 }]; 182 }];
152 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
153 _isRunning = NO;
154 } 183 }
155 184
156 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate 185 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
157 186
158 - (void)captureOutput:(AVCaptureOutput*)captureOutput 187 - (void)captureOutput:(AVCaptureOutput *)captureOutput
159 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 188 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
160 fromConnection:(AVCaptureConnection*)connection { 189 fromConnection:(AVCaptureConnection *)connection {
161 NSParameterAssert(captureOutput == _videoOutput); 190 NSParameterAssert(captureOutput == _videoDataOutput);
162 if (!_isRunning) { 191 if (!_isRunning) {
163 return; 192 return;
164 } 193 }
165 _capturer->CaptureSampleBuffer(sampleBuffer); 194 _capturer->CaptureSampleBuffer(sampleBuffer);
166 } 195 }
167 196
168 - (void)captureOutput:(AVCaptureOutput*)captureOutput 197 - (void)captureOutput:(AVCaptureOutput *)captureOutput
169 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer 198 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
170 fromConnection:(AVCaptureConnection*)connection { 199 fromConnection:(AVCaptureConnection *)connection {
171 NSLog(@"Dropped sample buffer."); 200 RTCLogError(@"Dropped sample buffer.");
172 } 201 }
173 202
174 #pragma mark - Private 203 #pragma mark - Private
175 204
176 - (BOOL)setupCaptureSession { 205 - (BOOL)setupCaptureSession {
177 _captureSession = [[AVCaptureSession alloc] init]; 206 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
178 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 207 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
179 NSString* version = [[UIDevice currentDevice] systemVersion]; 208 NSString *version = [[UIDevice currentDevice] systemVersion];
180 if ([version integerValue] >= 7) { 209 if ([version integerValue] >= 7) {
181 _captureSession.usesApplicationAudioSession = NO; 210 captureSession.usesApplicationAudioSession = NO;
182 } 211 }
183 #endif 212 #endif
184 if (![_captureSession canSetSessionPreset:kDefaultPreset]) { 213 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
185 NSLog(@"Default video capture preset unsupported."); 214 RTCLogError(@"Session preset unsupported.");
186 return NO; 215 return NO;
187 } 216 }
188 _captureSession.sessionPreset = kDefaultPreset; 217 captureSession.sessionPreset = kDefaultPreset;
189 218
190 // Make the capturer output NV12. Ideally we want I420 but that's not 219 // Add the output.
191 // currently supported on iPhone / iPad. 220 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
192 _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 221 if (![captureSession canAddOutput:videoDataOutput]) {
193 _videoOutput.videoSettings = @{ 222 RTCLogError(@"Video data output unsupported.");
194 (NSString*)kCVPixelBufferPixelFormatTypeKey : 223 return NO;
224 }
225 [captureSession addOutput:videoDataOutput];
226
227 // Get the front and back cameras. If there isn't a front camera
228 // give up.
229 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
230 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
231 if (!frontCameraInput) {
232 RTCLogError(@"No front camera for capture session.");
233 return NO;
234 }
235
236 // Add the inputs.
237 if (![captureSession canAddInput:frontCameraInput] ||
238 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
239 RTCLogError(@"Session does not support capture inputs.");
240 return NO;
241 }
242 AVCaptureDeviceInput *input = self.useBackCamera ?
243 backCameraInput : frontCameraInput;
244 [captureSession addInput:input];
245 _captureSession = captureSession;
246 return YES;
247 }
248
249 - (AVCaptureVideoDataOutput *)videoDataOutput {
250 if (!_videoDataOutput) {
251 // Make the capturer output NV12. Ideally we want I420 but that's not
252 // currently supported on iPhone / iPad.
253 AVCaptureVideoDataOutput *videoDataOutput =
254 [[AVCaptureVideoDataOutput alloc] init];
255 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
256 videoDataOutput.videoSettings = @{
257 (NSString *)kCVPixelBufferPixelFormatTypeKey :
195 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) 258 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
196 }; 259 };
197 _videoOutput.alwaysDiscardsLateVideoFrames = NO; 260 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
198 [_videoOutput setSampleBufferDelegate:self 261 dispatch_queue_t queue =
199 queue:dispatch_get_main_queue()]; 262 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
200 if (![_captureSession canAddOutput:_videoOutput]) { 263 [videoDataOutput setSampleBufferDelegate:self queue:queue];
201 NSLog(@"Default video capture output unsupported."); 264 _videoDataOutput = videoDataOutput;
202 return NO; 265 }
203 } 266 return _videoDataOutput;
204 [_captureSession addOutput:_videoOutput]; 267 }
205 268
206 // Find the capture devices. 269 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
207 AVCaptureDevice* frontCaptureDevice = nil; 270 (AVCaptureDevicePosition)position {
208 AVCaptureDevice* backCaptureDevice = nil; 271 for (AVCaptureDevice *captureDevice in
209 for (AVCaptureDevice* captureDevice in
210 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 272 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
211 if (captureDevice.position == AVCaptureDevicePositionBack) { 273 if (captureDevice.position == position) {
212 backCaptureDevice = captureDevice; 274 return captureDevice;
213 } 275 }
214 if (captureDevice.position == AVCaptureDevicePositionFront) { 276 }
215 frontCaptureDevice = captureDevice; 277 return nil;
216 } 278 }
217 } 279
218 if (!frontCaptureDevice) { 280 - (AVCaptureDeviceInput *)frontCameraInput {
219 RTCLog(@"Failed to get front capture device."); 281 if (!_frontCameraInput) {
220 return NO; 282 AVCaptureDevice *frontCameraDevice =
221 } 283 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
222 if (!backCaptureDevice) { 284 if (!frontCameraDevice) {
223 RTCLog(@"Failed to get back capture device"); 285 RTCLogWarning(@"Failed to find front capture device.");
224 // Don't return NO here because devices exist (16GB 5th generation iPod 286 return nil;
225 // Touch) that don't have a rear-facing camera. 287 }
226 } 288 NSError *error = nil;
227 289 AVCaptureDeviceInput *frontCameraInput =
228 // Set up the session inputs. 290 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
229 NSError* error = nil;
230 _frontDeviceInput =
231 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
232 error:&error];
233 if (!_frontDeviceInput) {
234 NSLog(@"Failed to get capture device input: %@",
235 error.localizedDescription);
236 return NO;
237 }
238 if (backCaptureDevice) {
239 error = nil;
240 _backDeviceInput =
241 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
242 error:&error]; 291 error:&error];
243 if (error) { 292 if (!frontCameraInput) {
244 RTCLog(@"Failed to get capture device input: %@", 293 RTCLogError(@"Failed to create front camera input: %@",
245 error.localizedDescription); 294 error.localizedDescription);
246 _backDeviceInput = nil; 295 return nil;
247 } 296 }
248 } 297 _frontCameraInput = frontCameraInput;
249 298 }
250 // Add the inputs. 299 return _frontCameraInput;
251 if (![_captureSession canAddInput:_frontDeviceInput] || 300 }
252 (_backDeviceInput && ![_captureSession canAddInput:_backDeviceInput])) { 301
253 NSLog(@"Session does not support capture inputs."); 302 - (AVCaptureDeviceInput *)backCameraInput {
254 return NO; 303 if (!_backCameraInput) {
255 } 304 AVCaptureDevice *backCameraDevice =
256 [self updateSessionInput]; 305 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
257 306 if (!backCameraDevice) {
258 return YES; 307 RTCLogWarning(@"Failed to find front capture device.");
259 } 308 return nil;
260 309 }
261 - (void)deviceOrientationDidChange:(NSNotification*)notification { 310 NSError *error = nil;
262 _orientationHasChanged = YES; 311 AVCaptureDeviceInput *backCameraInput =
263 [self updateOrientation]; 312 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
264 } 313 error:&error];
265 314 if (!backCameraInput) {
315 RTCLogError(@"Failed to create front camera input: %@",
316 error.localizedDescription);
317 return nil;
318 }
319 _backCameraInput = backCameraInput;
320 }
321 return _backCameraInput;
322 }
323
324 - (void)deviceOrientationDidChange:(NSNotification *)notification {
325 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
326 block:^{
327 _orientationHasChanged = YES;
328 [self updateOrientation];
329 }];
330 }
331
332 // Called from capture session queue.
266 - (void)updateOrientation { 333 - (void)updateOrientation {
267 AVCaptureConnection* connection = 334 AVCaptureConnection *connection =
268 [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; 335 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
269 if (!connection.supportsVideoOrientation) { 336 if (!connection.supportsVideoOrientation) {
270 // TODO(tkchin): set rotation bit on frames. 337 // TODO(tkchin): set rotation bit on frames.
271 return; 338 return;
272 } 339 }
273 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; 340 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
274 switch ([UIDevice currentDevice].orientation) { 341 switch ([UIDevice currentDevice].orientation) {
275 case UIDeviceOrientationPortrait: 342 case UIDeviceOrientationPortrait:
276 orientation = AVCaptureVideoOrientationPortrait; 343 orientation = AVCaptureVideoOrientationPortrait;
277 break; 344 break;
278 case UIDeviceOrientationPortraitUpsideDown: 345 case UIDeviceOrientationPortraitUpsideDown:
279 orientation = AVCaptureVideoOrientationPortraitUpsideDown; 346 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
280 break; 347 break;
281 case UIDeviceOrientationLandscapeLeft: 348 case UIDeviceOrientationLandscapeLeft:
282 orientation = AVCaptureVideoOrientationLandscapeRight; 349 orientation = AVCaptureVideoOrientationLandscapeRight;
283 break; 350 break;
284 case UIDeviceOrientationLandscapeRight: 351 case UIDeviceOrientationLandscapeRight:
285 orientation = AVCaptureVideoOrientationLandscapeLeft; 352 orientation = AVCaptureVideoOrientationLandscapeLeft;
286 break; 353 break;
287 case UIDeviceOrientationFaceUp: 354 case UIDeviceOrientationFaceUp:
288 case UIDeviceOrientationFaceDown: 355 case UIDeviceOrientationFaceDown:
289 case UIDeviceOrientationUnknown: 356 case UIDeviceOrientationUnknown:
290 if (!_orientationHasChanged) { 357 if (!_orientationHasChanged) {
291 connection.videoOrientation = orientation; 358 connection.videoOrientation = orientation;
292 } 359 }
293 return; 360 return;
294 } 361 }
295 connection.videoOrientation = orientation; 362 connection.videoOrientation = orientation;
296 } 363 }
297 364
298 - (void)updateSessionInput { 365 // Update the current session input to match what's stored in _useBackCamera.
299 // Update the current session input to match what's stored in _useBackCamera. 366 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
300 [_captureSession beginConfiguration]; 367 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
301 AVCaptureDeviceInput* oldInput = _backDeviceInput; 368 block:^{
302 AVCaptureDeviceInput* newInput = _frontDeviceInput; 369 [_captureSession beginConfiguration];
303 if (_useBackCamera) { 370 AVCaptureDeviceInput *oldInput = _backCameraInput;
304 oldInput = _frontDeviceInput; 371 AVCaptureDeviceInput *newInput = _frontCameraInput;
305 newInput = _backDeviceInput; 372 if (useBackCamera) {
306 } 373 oldInput = _frontCameraInput;
307 // Ok to remove this even if it's not attached. Will be no-op. 374 newInput = _backCameraInput;
308 [_captureSession removeInput:oldInput]; 375 }
309 [_captureSession addInput:newInput]; 376 if (oldInput) {
310 [self updateOrientation]; 377 // Ok to remove this even if it's not attached. Will be no-op.
311 [_captureSession commitConfiguration]; 378 [_captureSession removeInput:oldInput];
379 }
380 if (newInput) {
381 [_captureSession addInput:newInput];
382 }
383 [self updateOrientation];
384 [_captureSession commitConfiguration];
385 }];
312 } 386 }
313 387
314 @end 388 @end
315 389
316 namespace webrtc { 390 namespace webrtc {
317 391
392 enum AVFoundationVideoCapturerMessageType : uint32_t {
393 kMessageTypeFrame,
394 };
395
396 struct AVFoundationFrame {
397 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
398 : image_buffer(buffer), capture_time(time) {}
399 CVImageBufferRef image_buffer;
400 int64_t capture_time;
401 };
402
318 AVFoundationVideoCapturer::AVFoundationVideoCapturer() 403 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
319 : _capturer(nil), _startThread(nullptr) { 404 : _capturer(nil), _startThread(nullptr) {
320 // Set our supported formats. This matches kDefaultPreset. 405 // Set our supported formats. This matches kDefaultPreset.
321 std::vector<cricket::VideoFormat> supportedFormats; 406 std::vector<cricket::VideoFormat> supportedFormats;
322 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat)); 407 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
323 SetSupportedFormats(supportedFormats); 408 SetSupportedFormats(supportedFormats);
324 _capturer = 409 _capturer =
325 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 410 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
326 } 411 }
327 412
(...skipping 18 matching lines...) Expand all
346 431
347 // Keep track of which thread capture started on. This is the thread that 432 // Keep track of which thread capture started on. This is the thread that
348 // frames need to be sent to. 433 // frames need to be sent to.
349 RTC_DCHECK(!_startThread); 434 RTC_DCHECK(!_startThread);
350 _startThread = rtc::Thread::Current(); 435 _startThread = rtc::Thread::Current();
351 436
352 SetCaptureFormat(&format); 437 SetCaptureFormat(&format);
353 // This isn't super accurate because it takes a while for the AVCaptureSession 438 // This isn't super accurate because it takes a while for the AVCaptureSession
354 // to spin up, and this call returns async. 439 // to spin up, and this call returns async.
355 // TODO(tkchin): make this better. 440 // TODO(tkchin): make this better.
356 [_capturer startCaptureAsync]; 441 [_capturer start];
357 SetCaptureState(cricket::CaptureState::CS_RUNNING); 442 SetCaptureState(cricket::CaptureState::CS_RUNNING);
358 443
359 return cricket::CaptureState::CS_STARTING; 444 return cricket::CaptureState::CS_STARTING;
360 } 445 }
361 446
362 void AVFoundationVideoCapturer::Stop() { 447 void AVFoundationVideoCapturer::Stop() {
363 [_capturer stopCaptureAsync]; 448 [_capturer stop];
364 SetCaptureFormat(NULL); 449 SetCaptureFormat(NULL);
365 _startThread = nullptr; 450 _startThread = nullptr;
366 } 451 }
367 452
368 bool AVFoundationVideoCapturer::IsRunning() { 453 bool AVFoundationVideoCapturer::IsRunning() {
369 return _capturer.isRunning; 454 return _capturer.isRunning;
370 } 455 }
371 456
372 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { 457 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
373 return _capturer.captureSession; 458 return _capturer.captureSession;
(...skipping 12 matching lines...) Expand all
386 } 471 }
387 472
388 void AVFoundationVideoCapturer::CaptureSampleBuffer( 473 void AVFoundationVideoCapturer::CaptureSampleBuffer(
389 CMSampleBufferRef sampleBuffer) { 474 CMSampleBufferRef sampleBuffer) {
390 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || 475 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
391 !CMSampleBufferIsValid(sampleBuffer) || 476 !CMSampleBufferIsValid(sampleBuffer) ||
392 !CMSampleBufferDataIsReady(sampleBuffer)) { 477 !CMSampleBufferDataIsReady(sampleBuffer)) {
393 return; 478 return;
394 } 479 }
395 480
396 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 481 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
397 if (imageBuffer == NULL) { 482 if (image_buffer == NULL) {
398 return; 483 return;
399 } 484 }
400 485
486 // Retain the buffer and post it to the webrtc thread. It will be released
487 // after it has successfully been signaled.
488 CVBufferRetain(image_buffer);
489 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
490 _startThread->Post(this, kMessageTypeFrame,
491 new rtc::TypedMessageData<AVFoundationFrame>(frame));
492 }
493
494 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
495 switch (msg->message_id) {
496 case kMessageTypeFrame: {
497 rtc::TypedMessageData<AVFoundationFrame>* data =
498 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
499 const AVFoundationFrame& frame = data->data();
500 OnFrameMessage(frame.image_buffer, frame.capture_time);
501 delete data;
502 break;
503 }
504 }
505 }
506
507 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
508 int64_t capture_time) {
509 RTC_DCHECK(_startThread->IsCurrent());
510
401 // Base address must be unlocked to access frame data. 511 // Base address must be unlocked to access frame data.
402 CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly; 512 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
403 CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags); 513 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
404 if (ret != kCVReturnSuccess) { 514 if (ret != kCVReturnSuccess) {
405 return; 515 return;
406 } 516 }
407 517
408 static size_t const kYPlaneIndex = 0; 518 static size_t const kYPlaneIndex = 0;
409 static size_t const kUVPlaneIndex = 1; 519 static size_t const kUVPlaneIndex = 1;
410 uint8_t* yPlaneAddress = 520 uint8_t* y_plane_address =
411 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex); 521 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
412 size_t yPlaneHeight = 522 kYPlaneIndex));
413 CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex); 523 size_t y_plane_height =
414 size_t yPlaneWidth = 524 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
415 CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex); 525 size_t y_plane_width =
416 size_t yPlaneBytesPerRow = 526 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
417 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex); 527 size_t y_plane_bytes_per_row =
418 size_t uvPlaneHeight = 528 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
419 CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex); 529 size_t uv_plane_height =
420 size_t uvPlaneBytesPerRow = 530 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
421 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex); 531 size_t uv_plane_bytes_per_row =
422 size_t frameSize = 532 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
423 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight; 533 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
534 uv_plane_bytes_per_row * uv_plane_height;
424 535
425 // Sanity check assumption that planar bytes are contiguous. 536 // Sanity check assumption that planar bytes are contiguous.
426 uint8_t* uvPlaneAddress = 537 uint8_t* uv_plane_address =
427 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex); 538 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
428 RTC_DCHECK( 539 kUVPlaneIndex));
429 uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow); 540 RTC_DCHECK(uv_plane_address ==
541 y_plane_address + y_plane_height * y_plane_bytes_per_row);
430 542
431 // Stuff data into a cricket::CapturedFrame. 543 // Stuff data into a cricket::CapturedFrame.
432 int64_t currentTime = rtc::TimeNanos();
433 cricket::CapturedFrame frame; 544 cricket::CapturedFrame frame;
434 frame.width = yPlaneWidth; 545 frame.width = y_plane_width;
435 frame.height = yPlaneHeight; 546 frame.height = y_plane_height;
436 frame.pixel_width = 1; 547 frame.pixel_width = 1;
437 frame.pixel_height = 1; 548 frame.pixel_height = 1;
438 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12); 549 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
439 frame.time_stamp = currentTime; 550 frame.time_stamp = capture_time;
440 frame.data = yPlaneAddress; 551 frame.data = y_plane_address;
441 frame.data_size = frameSize; 552 frame.data_size = frame_size;
442 553
443 if (_startThread->IsCurrent()) {
444 SignalFrameCaptured(this, &frame);
445 } else {
446 _startThread->Invoke<void>(
447 rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
448 this, &frame));
449 }
450 CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
451 }
452
453 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
454 const cricket::CapturedFrame* frame) {
455 RTC_DCHECK(_startThread->IsCurrent());
456 // This will call a superclass method that will perform the frame conversion 554 // This will call a superclass method that will perform the frame conversion
457 // to I420. 555 // to I420.
458 SignalFrameCaptured(this, frame); 556 SignalFrameCaptured(this, &frame);
557
558 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
559 CVBufferRelease(image_buffer);
459 } 560 }
460 561
461 } // namespace webrtc 562 } // namespace webrtc
OLDNEW
« no previous file with comments | « talk/app/webrtc/objc/avfoundationvideocapturer.h ('k') | webrtc/api/objc/avfoundationvideocapturer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698