Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(224)

Side by Side Diff: webrtc/api/objc/avfoundationvideocapturer.mm

Issue 1838933004: Improve iOS frame capture threading. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Fix * Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/api/objc/avfoundationvideocapturer.h ('k') | webrtc/base/objc/RTCDispatcher.m » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/api/objc/avfoundationvideocapturer.h" 11 #include "webrtc/api/objc/avfoundationvideocapturer.h"
12 12
13 #include "webrtc/base/bind.h" 13 #include "webrtc/base/bind.h"
14 #include "webrtc/base/checks.h"
14 #include "webrtc/base/thread.h" 15 #include "webrtc/base/thread.h"
15 16
16 #import <AVFoundation/AVFoundation.h> 17 #import <AVFoundation/AVFoundation.h>
17 #import <Foundation/Foundation.h> 18 #import <Foundation/Foundation.h>
18 #import <UIKit/UIKit.h> 19 #import <UIKit/UIKit.h>
19 20
20 #import "webrtc/base/objc/RTCDispatcher.h" 21 #import "webrtc/base/objc/RTCDispatcher+Private.h"
21 #import "webrtc/base/objc/RTCLogging.h" 22 #import "webrtc/base/objc/RTCLogging.h"
22 23
23 // TODO(tkchin): support other formats. 24 // TODO(tkchin): support other formats.
24 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; 25 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
25 static cricket::VideoFormat const kDefaultFormat = 26 static cricket::VideoFormat const kDefaultFormat =
26 cricket::VideoFormat(640, 27 cricket::VideoFormat(640,
27 480, 28 480,
28 cricket::VideoFormat::FpsToInterval(30), 29 cricket::VideoFormat::FpsToInterval(30),
29 cricket::FOURCC_NV12); 30 cricket::FOURCC_NV12);
30 31
31 // This class used to capture frames using AVFoundation APIs on iOS. It is meant 32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
32 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this 33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
33 // because other webrtc objects own cricket::VideoCapturer, which is not 34 // because other webrtc objects own cricket::VideoCapturer, which is not
34 // ref counted. To prevent bad behavior we do not expose this class directly. 35 // ref counted. To prevent bad behavior we do not expose this class directly.
35 @interface RTCAVFoundationVideoCapturerInternal : NSObject 36 @interface RTCAVFoundationVideoCapturerInternal : NSObject
36 <AVCaptureVideoDataOutputSampleBufferDelegate> 37 <AVCaptureVideoDataOutputSampleBufferDelegate>
37 38
38 @property(nonatomic, readonly) AVCaptureSession *captureSession; 39 @property(nonatomic, readonly) AVCaptureSession *captureSession;
39 @property(nonatomic, readonly) BOOL isRunning; 40 @property(nonatomic, readonly) BOOL isRunning;
40 @property(nonatomic, readonly) BOOL canUseBackCamera; 41 @property(nonatomic, readonly) BOOL canUseBackCamera;
41 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. 42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
42 43
43 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it 44 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
44 // when we receive frames. This is safe because this object should be owned by 45 // when we receive frames. This is safe because this object should be owned by
45 // it. 46 // it.
46 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; 47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
47 - (void)startCaptureAsync; 48
48 - (void)stopCaptureAsync; 49 // Starts and stops the capture session asynchronously. We cannot do this
50 // synchronously without blocking a WebRTC thread.
51 - (void)start;
52 - (void)stop;
49 53
50 @end 54 @end
51 55
52 @implementation RTCAVFoundationVideoCapturerInternal { 56 @implementation RTCAVFoundationVideoCapturerInternal {
53 // Keep pointers to inputs for convenience. 57 // Keep pointers to inputs for convenience.
54 AVCaptureDeviceInput *_frontDeviceInput; 58 AVCaptureDeviceInput *_frontCameraInput;
55 AVCaptureDeviceInput *_backDeviceInput; 59 AVCaptureDeviceInput *_backCameraInput;
56 AVCaptureVideoDataOutput *_videoOutput; 60 AVCaptureVideoDataOutput *_videoDataOutput;
57 // The cricket::VideoCapturer that owns this class. Should never be NULL. 61 // The cricket::VideoCapturer that owns this class. Should never be NULL.
58 webrtc::AVFoundationVideoCapturer *_capturer; 62 webrtc::AVFoundationVideoCapturer *_capturer;
59 BOOL _orientationHasChanged; 63 BOOL _orientationHasChanged;
60 } 64 }
61 65
62 @synthesize captureSession = _captureSession; 66 @synthesize captureSession = _captureSession;
67 @synthesize isRunning = _isRunning;
63 @synthesize useBackCamera = _useBackCamera; 68 @synthesize useBackCamera = _useBackCamera;
64 @synthesize isRunning = _isRunning;
65 69
70 // This is called from the thread that creates the video source, which is likely
71 // the main thread.
66 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { 72 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
67 NSParameterAssert(capturer); 73 RTC_DCHECK(capturer);
68 if (self = [super init]) { 74 if (self = [super init]) {
69 _capturer = capturer; 75 _capturer = capturer;
76 // Create the capture session and all relevant inputs and outputs. We need
77 // to do this in init because the application may want the capture session
78 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
79 // created here are retained until dealloc and never recreated.
70 if (![self setupCaptureSession]) { 80 if (![self setupCaptureSession]) {
71 return nil; 81 return nil;
72 } 82 }
73 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; 83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
74 [center addObserver:self 84 [center addObserver:self
75 selector:@selector(deviceOrientationDidChange:) 85 selector:@selector(deviceOrientationDidChange:)
76 name:UIDeviceOrientationDidChangeNotification 86 name:UIDeviceOrientationDidChangeNotification
77 object:nil]; 87 object:nil];
78 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification 88 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
79 object:nil 89 object:nil
80 queue:nil 90 queue:nil
81 usingBlock:^(NSNotification *notification) { 91 usingBlock:^(NSNotification *notification) {
82 NSLog(@"Capture session error: %@", notification.userInfo); 92 RTCLogError(@"Capture session error: %@", notification.userInfo);
83 }]; 93 }];
84 } 94 }
85 return self; 95 return self;
86 } 96 }
87 97
88 - (void)dealloc { 98 - (void)dealloc {
89 [self stopCaptureAsync]; 99 RTC_DCHECK(!_isRunning);
90 [[NSNotificationCenter defaultCenter] removeObserver:self]; 100 [[NSNotificationCenter defaultCenter] removeObserver:self];
91 _capturer = nullptr; 101 _capturer = nullptr;
92 } 102 }
93 103
94 - (BOOL)canUseBackCamera { 104 - (AVCaptureSession *)captureSession {
95 return _backDeviceInput != nil; 105 return _captureSession;
96 } 106 }
97 107
108 // Called from any thread (likely main thread).
109 - (BOOL)canUseBackCamera {
110 return _backCameraInput != nil;
111 }
112
113 // Called from any thread (likely main thread).
114 - (BOOL)useBackCamera {
115 @synchronized(self) {
116 return _useBackCamera;
117 }
118 }
119
120 // Called from any thread (likely main thread).
98 - (void)setUseBackCamera:(BOOL)useBackCamera { 121 - (void)setUseBackCamera:(BOOL)useBackCamera {
99 if (_useBackCamera == useBackCamera) { 122 if (!self.canUseBackCamera) {
123 if (useBackCamera) {
124 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
125 "not switching.");
126 }
100 return; 127 return;
101 } 128 }
102 if (!self.canUseBackCamera) { 129 @synchronized(self) {
103 RTCLog(@"No rear-facing camera exists or it cannot be used;" 130 if (_useBackCamera == useBackCamera) {
104 "not switching."); 131 return;
105 return; 132 }
133 _useBackCamera = useBackCamera;
134 [self updateSessionInputForUseBackCamera:useBackCamera];
106 } 135 }
107 _useBackCamera = useBackCamera;
108 [self updateSessionInput];
109 } 136 }
110 137
111 - (void)startCaptureAsync { 138 // Called from WebRTC thread.
139 - (void)start {
112 if (_isRunning) { 140 if (_isRunning) {
113 return; 141 return;
114 } 142 }
115 _orientationHasChanged = NO; 143 _isRunning = YES;
116 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
117 AVCaptureSession* session = _captureSession;
118 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 144 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
119 block:^{ 145 block:^{
120 [session startRunning]; 146 _orientationHasChanged = NO;
147 [self updateOrientation];
148 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
149 AVCaptureSession *captureSession = self.captureSession;
150 [captureSession startRunning];
121 }]; 151 }];
122 _isRunning = YES;
123 } 152 }
124 153
125 - (void)stopCaptureAsync { 154 // Called from same thread as start.
155 - (void)stop {
126 if (!_isRunning) { 156 if (!_isRunning) {
127 return; 157 return;
128 } 158 }
129 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; 159 _isRunning = NO;
130 AVCaptureSession* session = _captureSession;
131 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 160 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
132 block:^{ 161 block:^{
133 [session stopRunning]; 162 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
163 [_captureSession stopRunning];
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
134 }]; 165 }];
135 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
136 _isRunning = NO;
137 } 166 }
138 167
139 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate 168 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
140 169
141 - (void)captureOutput:(AVCaptureOutput *)captureOutput 170 - (void)captureOutput:(AVCaptureOutput *)captureOutput
142 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 171 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
143 fromConnection:(AVCaptureConnection *)connection { 172 fromConnection:(AVCaptureConnection *)connection {
144 NSParameterAssert(captureOutput == _videoOutput); 173 NSParameterAssert(captureOutput == _videoDataOutput);
145 if (!_isRunning) { 174 if (!_isRunning) {
146 return; 175 return;
147 } 176 }
148 _capturer->CaptureSampleBuffer(sampleBuffer); 177 _capturer->CaptureSampleBuffer(sampleBuffer);
149 } 178 }
150 179
151 - (void)captureOutput:(AVCaptureOutput *)captureOutput 180 - (void)captureOutput:(AVCaptureOutput *)captureOutput
152 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer 181 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
153 fromConnection:(AVCaptureConnection *)connection { 182 fromConnection:(AVCaptureConnection *)connection {
154 NSLog(@"Dropped sample buffer."); 183 RTCLogError(@"Dropped sample buffer.");
155 } 184 }
156 185
157 #pragma mark - Private 186 #pragma mark - Private
158 187
159 - (BOOL)setupCaptureSession { 188 - (BOOL)setupCaptureSession {
160 _captureSession = [[AVCaptureSession alloc] init]; 189 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
161 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 190 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
162 NSString *version = [[UIDevice currentDevice] systemVersion]; 191 NSString *version = [[UIDevice currentDevice] systemVersion];
163 if ([version integerValue] >= 7) { 192 if ([version integerValue] >= 7) {
164 _captureSession.usesApplicationAudioSession = NO; 193 captureSession.usesApplicationAudioSession = NO;
165 } 194 }
166 #endif 195 #endif
167 if (![_captureSession canSetSessionPreset:kDefaultPreset]) { 196 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
168 NSLog(@"Default video capture preset unsupported."); 197 RTCLogError(@"Session preset unsupported.");
169 return NO; 198 return NO;
170 } 199 }
171 _captureSession.sessionPreset = kDefaultPreset; 200 captureSession.sessionPreset = kDefaultPreset;
172 201
173 // Make the capturer output NV12. Ideally we want I420 but that's not 202 // Add the output.
174 // currently supported on iPhone / iPad. 203 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
175 _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 204 if (![captureSession canAddOutput:videoDataOutput]) {
176 _videoOutput.videoSettings = @{ 205 RTCLogError(@"Video data output unsupported.");
177 (NSString *)kCVPixelBufferPixelFormatTypeKey :
178 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
179 };
180 _videoOutput.alwaysDiscardsLateVideoFrames = NO;
181 [_videoOutput setSampleBufferDelegate:self
182 queue:dispatch_get_main_queue()];
183 if (![_captureSession canAddOutput:_videoOutput]) {
184 NSLog(@"Default video capture output unsupported.");
185 return NO; 206 return NO;
186 } 207 }
187 [_captureSession addOutput:_videoOutput]; 208 [captureSession addOutput:videoDataOutput];
188 209
189 // Find the capture devices. 210 // Get the front and back cameras. If there isn't a front camera
190 AVCaptureDevice *frontCaptureDevice = nil; 211 // give up.
191 AVCaptureDevice *backCaptureDevice = nil; 212 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
192 for (AVCaptureDevice *captureDevice in 213 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
193 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 214 if (!frontCameraInput) {
194 if (captureDevice.position == AVCaptureDevicePositionBack) { 215 RTCLogError(@"No front camera for capture session.");
195 backCaptureDevice = captureDevice;
196 }
197 if (captureDevice.position == AVCaptureDevicePositionFront) {
198 frontCaptureDevice = captureDevice;
199 }
200 }
201 if (!frontCaptureDevice) {
202 RTCLog(@"Failed to get front capture device.");
203 return NO; 216 return NO;
204 } 217 }
205 if (!backCaptureDevice) {
206 RTCLog(@"Failed to get back capture device");
207 // Don't return NO here because devices exist (16GB 5th generation iPod
208 // Touch) that don't have a rear-facing camera.
209 }
210
211 // Set up the session inputs.
212 NSError *error = nil;
213 _frontDeviceInput =
214 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
215 error:&error];
216 if (!_frontDeviceInput) {
217 NSLog(@"Failed to get capture device input: %@",
218 error.localizedDescription);
219 return NO;
220 }
221 if (backCaptureDevice) {
222 error = nil;
223 _backDeviceInput =
224 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
225 error:&error];
226 if (error) {
227 RTCLog(@"Failed to get capture device input: %@",
228 error.localizedDescription);
229 _backDeviceInput = nil;
230 }
231 }
232 218
233 // Add the inputs. 219 // Add the inputs.
234 if (![_captureSession canAddInput:_frontDeviceInput] || 220 if (![captureSession canAddInput:frontCameraInput] ||
235 (_backDeviceInput && ![_captureSession canAddInput:_backDeviceInput])) { 221 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
236 NSLog(@"Session does not support capture inputs."); 222 RTCLogError(@"Session does not support capture inputs.");
237 return NO; 223 return NO;
238 } 224 }
239 [self updateSessionInput]; 225 AVCaptureDeviceInput *input = self.useBackCamera ?
240 226 backCameraInput : frontCameraInput;
227 [captureSession addInput:input];
228 _captureSession = captureSession;
241 return YES; 229 return YES;
242 } 230 }
243 231
244 - (void)deviceOrientationDidChange:(NSNotification *)notification { 232 - (AVCaptureVideoDataOutput *)videoDataOutput {
245 _orientationHasChanged = YES; 233 if (!_videoDataOutput) {
246 [self updateOrientation]; 234 // Make the capturer output NV12. Ideally we want I420 but that's not
235 // currently supported on iPhone / iPad.
236 AVCaptureVideoDataOutput *videoDataOutput =
237 [[AVCaptureVideoDataOutput alloc] init];
238 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
239 videoDataOutput.videoSettings = @{
240 (NSString *)kCVPixelBufferPixelFormatTypeKey :
241 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
242 };
243 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
244 dispatch_queue_t queue =
245 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
246 [videoDataOutput setSampleBufferDelegate:self queue:queue];
247 _videoDataOutput = videoDataOutput;
248 }
249 return _videoDataOutput;
247 } 250 }
248 251
252 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
253 (AVCaptureDevicePosition)position {
254 for (AVCaptureDevice *captureDevice in
255 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
256 if (captureDevice.position == position) {
257 return captureDevice;
258 }
259 }
260 return nil;
261 }
262
263 - (AVCaptureDeviceInput *)frontCameraInput {
264 if (!_frontCameraInput) {
265 AVCaptureDevice *frontCameraDevice =
266 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
267 if (!frontCameraDevice) {
268 RTCLogWarning(@"Failed to find front capture device.");
269 return nil;
270 }
271 NSError *error = nil;
272 AVCaptureDeviceInput *frontCameraInput =
273 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
274 error:&error];
275 if (!frontCameraInput) {
276 RTCLogError(@"Failed to create front camera input: %@",
277 error.localizedDescription);
278 return nil;
279 }
280 _frontCameraInput = frontCameraInput;
281 }
282 return _frontCameraInput;
283 }
284
285 - (AVCaptureDeviceInput *)backCameraInput {
286 if (!_backCameraInput) {
287 AVCaptureDevice *backCameraDevice =
288 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
289 if (!backCameraDevice) {
290 RTCLogWarning(@"Failed to find front capture device.");
291 return nil;
292 }
293 NSError *error = nil;
294 AVCaptureDeviceInput *backCameraInput =
295 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
296 error:&error];
297 if (!backCameraInput) {
298 RTCLogError(@"Failed to create front camera input: %@",
299 error.localizedDescription);
300 return nil;
301 }
302 _backCameraInput = backCameraInput;
303 }
304 return _backCameraInput;
305 }
306
307 - (void)deviceOrientationDidChange:(NSNotification *)notification {
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
309 block:^{
310 _orientationHasChanged = YES;
311 [self updateOrientation];
312 }];
313 }
314
315 // Called from capture session queue.
249 - (void)updateOrientation { 316 - (void)updateOrientation {
250 AVCaptureConnection *connection = 317 AVCaptureConnection *connection =
251 [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; 318 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
252 if (!connection.supportsVideoOrientation) { 319 if (!connection.supportsVideoOrientation) {
253 // TODO(tkchin): set rotation bit on frames. 320 // TODO(tkchin): set rotation bit on frames.
254 return; 321 return;
255 } 322 }
256 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; 323 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
257 switch ([UIDevice currentDevice].orientation) { 324 switch ([UIDevice currentDevice].orientation) {
258 case UIDeviceOrientationPortrait: 325 case UIDeviceOrientationPortrait:
259 orientation = AVCaptureVideoOrientationPortrait; 326 orientation = AVCaptureVideoOrientationPortrait;
260 break; 327 break;
261 case UIDeviceOrientationPortraitUpsideDown: 328 case UIDeviceOrientationPortraitUpsideDown:
262 orientation = AVCaptureVideoOrientationPortraitUpsideDown; 329 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
263 break; 330 break;
264 case UIDeviceOrientationLandscapeLeft: 331 case UIDeviceOrientationLandscapeLeft:
265 orientation = AVCaptureVideoOrientationLandscapeRight; 332 orientation = AVCaptureVideoOrientationLandscapeRight;
266 break; 333 break;
267 case UIDeviceOrientationLandscapeRight: 334 case UIDeviceOrientationLandscapeRight:
268 orientation = AVCaptureVideoOrientationLandscapeLeft; 335 orientation = AVCaptureVideoOrientationLandscapeLeft;
269 break; 336 break;
270 case UIDeviceOrientationFaceUp: 337 case UIDeviceOrientationFaceUp:
271 case UIDeviceOrientationFaceDown: 338 case UIDeviceOrientationFaceDown:
272 case UIDeviceOrientationUnknown: 339 case UIDeviceOrientationUnknown:
273 if (!_orientationHasChanged) { 340 if (!_orientationHasChanged) {
274 connection.videoOrientation = orientation; 341 connection.videoOrientation = orientation;
275 } 342 }
276 return; 343 return;
277 } 344 }
278 connection.videoOrientation = orientation; 345 connection.videoOrientation = orientation;
279 } 346 }
280 347
281 - (void)updateSessionInput { 348 // Update the current session input to match what's stored in _useBackCamera.
282 // Update the current session input to match what's stored in _useBackCamera. 349 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
283 [_captureSession beginConfiguration]; 350 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
284 AVCaptureDeviceInput *oldInput = _backDeviceInput; 351 block:^{
285 AVCaptureDeviceInput *newInput = _frontDeviceInput; 352 [_captureSession beginConfiguration];
286 if (_useBackCamera) { 353 AVCaptureDeviceInput *oldInput = _backCameraInput;
287 oldInput = _frontDeviceInput; 354 AVCaptureDeviceInput *newInput = _frontCameraInput;
288 newInput = _backDeviceInput; 355 if (useBackCamera) {
289 } 356 oldInput = _frontCameraInput;
290 // Ok to remove this even if it's not attached. Will be no-op. 357 newInput = _backCameraInput;
291 [_captureSession removeInput:oldInput]; 358 }
292 [_captureSession addInput:newInput]; 359 if (oldInput) {
293 [self updateOrientation]; 360 // Ok to remove this even if it's not attached. Will be no-op.
294 [_captureSession commitConfiguration]; 361 [_captureSession removeInput:oldInput];
362 }
363 if (newInput) {
364 [_captureSession addInput:newInput];
365 }
366 [self updateOrientation];
367 [_captureSession commitConfiguration];
368 }];
295 } 369 }
296 370
297 @end 371 @end
298 372
299 namespace webrtc { 373 namespace webrtc {
300 374
375 enum AVFoundationVideoCapturerMessageType : uint32_t {
376 kMessageTypeFrame,
377 };
378
379 struct AVFoundationFrame {
380 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
381 : image_buffer(buffer), capture_time(time) {}
382 CVImageBufferRef image_buffer;
383 int64_t capture_time;
384 };
385
301 AVFoundationVideoCapturer::AVFoundationVideoCapturer() 386 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
302 : _capturer(nil), _startThread(nullptr) { 387 : _capturer(nil), _startThread(nullptr) {
303 // Set our supported formats. This matches kDefaultPreset. 388 // Set our supported formats. This matches kDefaultPreset.
304 std::vector<cricket::VideoFormat> supportedFormats; 389 std::vector<cricket::VideoFormat> supportedFormats;
305 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat)); 390 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
306 SetSupportedFormats(supportedFormats); 391 SetSupportedFormats(supportedFormats);
307 _capturer = 392 _capturer =
308 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; 393 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
309 } 394 }
310 395
(...skipping 18 matching lines...) Expand all
329 414
330 // Keep track of which thread capture started on. This is the thread that 415 // Keep track of which thread capture started on. This is the thread that
331 // frames need to be sent to. 416 // frames need to be sent to.
332 RTC_DCHECK(!_startThread); 417 RTC_DCHECK(!_startThread);
333 _startThread = rtc::Thread::Current(); 418 _startThread = rtc::Thread::Current();
334 419
335 SetCaptureFormat(&format); 420 SetCaptureFormat(&format);
336 // This isn't super accurate because it takes a while for the AVCaptureSession 421 // This isn't super accurate because it takes a while for the AVCaptureSession
337 // to spin up, and this call returns async. 422 // to spin up, and this call returns async.
338 // TODO(tkchin): make this better. 423 // TODO(tkchin): make this better.
339 [_capturer startCaptureAsync]; 424 [_capturer start];
340 SetCaptureState(cricket::CaptureState::CS_RUNNING); 425 SetCaptureState(cricket::CaptureState::CS_RUNNING);
341 426
342 return cricket::CaptureState::CS_STARTING; 427 return cricket::CaptureState::CS_STARTING;
343 } 428 }
344 429
345 void AVFoundationVideoCapturer::Stop() { 430 void AVFoundationVideoCapturer::Stop() {
346 [_capturer stopCaptureAsync]; 431 [_capturer stop];
347 SetCaptureFormat(NULL); 432 SetCaptureFormat(NULL);
348 _startThread = nullptr; 433 _startThread = nullptr;
349 } 434 }
350 435
351 bool AVFoundationVideoCapturer::IsRunning() { 436 bool AVFoundationVideoCapturer::IsRunning() {
352 return _capturer.isRunning; 437 return _capturer.isRunning;
353 } 438 }
354 439
355 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { 440 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
356 return _capturer.captureSession; 441 return _capturer.captureSession;
(...skipping 12 matching lines...) Expand all
369 } 454 }
370 455
371 void AVFoundationVideoCapturer::CaptureSampleBuffer( 456 void AVFoundationVideoCapturer::CaptureSampleBuffer(
372 CMSampleBufferRef sampleBuffer) { 457 CMSampleBufferRef sampleBuffer) {
373 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || 458 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
374 !CMSampleBufferIsValid(sampleBuffer) || 459 !CMSampleBufferIsValid(sampleBuffer) ||
375 !CMSampleBufferDataIsReady(sampleBuffer)) { 460 !CMSampleBufferDataIsReady(sampleBuffer)) {
376 return; 461 return;
377 } 462 }
378 463
379 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 464 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
380 if (imageBuffer == NULL) { 465 if (image_buffer == NULL) {
381 return; 466 return;
382 } 467 }
383 468
469 // Retain the buffer and post it to the webrtc thread. It will be released
470 // after it has successfully been signaled.
471 CVBufferRetain(image_buffer);
472 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
473 _startThread->Post(this, kMessageTypeFrame,
474 new rtc::TypedMessageData<AVFoundationFrame>(frame));
475 }
476
477 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
478 switch (msg->message_id) {
479 case kMessageTypeFrame: {
480 rtc::TypedMessageData<AVFoundationFrame>* data =
481 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
482 const AVFoundationFrame& frame = data->data();
483 OnFrameMessage(frame.image_buffer, frame.capture_time);
484 delete data;
485 break;
486 }
487 }
488 }
489
490 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
491 int64_t capture_time) {
492 RTC_DCHECK(_startThread->IsCurrent());
493
384 // Base address must be unlocked to access frame data. 494 // Base address must be unlocked to access frame data.
385 CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly; 495 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
386 CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags); 496 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
387 if (ret != kCVReturnSuccess) { 497 if (ret != kCVReturnSuccess) {
388 return; 498 return;
389 } 499 }
390 500
391 static size_t const kYPlaneIndex = 0; 501 static size_t const kYPlaneIndex = 0;
392 static size_t const kUVPlaneIndex = 1; 502 static size_t const kUVPlaneIndex = 1;
393 uint8_t *yPlaneAddress = 503 uint8_t* y_plane_address =
394 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex); 504 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
395 size_t yPlaneHeight = 505 kYPlaneIndex));
396 CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex); 506 size_t y_plane_height =
397 size_t yPlaneWidth = 507 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
398 CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex); 508 size_t y_plane_width =
399 size_t yPlaneBytesPerRow = 509 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
400 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex); 510 size_t y_plane_bytes_per_row =
401 size_t uvPlaneHeight = 511 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
402 CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex); 512 size_t uv_plane_height =
403 size_t uvPlaneBytesPerRow = 513 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
404 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex); 514 size_t uv_plane_bytes_per_row =
405 size_t frameSize = 515 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
406 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight; 516 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
517 uv_plane_bytes_per_row * uv_plane_height;
407 518
408 // Sanity check assumption that planar bytes are contiguous. 519 // Sanity check assumption that planar bytes are contiguous.
409 uint8_t *uvPlaneAddress = 520 uint8_t* uv_plane_address =
410 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex); 521 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
411 RTC_DCHECK( 522 kUVPlaneIndex));
412 uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow); 523 RTC_DCHECK(uv_plane_address ==
524 y_plane_address + y_plane_height * y_plane_bytes_per_row);
413 525
414 // Stuff data into a cricket::CapturedFrame. 526 // Stuff data into a cricket::CapturedFrame.
415 int64_t currentTime = rtc::TimeNanos();
416 cricket::CapturedFrame frame; 527 cricket::CapturedFrame frame;
417 frame.width = yPlaneWidth; 528 frame.width = y_plane_width;
418 frame.height = yPlaneHeight; 529 frame.height = y_plane_height;
419 frame.pixel_width = 1; 530 frame.pixel_width = 1;
420 frame.pixel_height = 1; 531 frame.pixel_height = 1;
421 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12); 532 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
422 frame.time_stamp = currentTime; 533 frame.time_stamp = capture_time;
423 frame.data = yPlaneAddress; 534 frame.data = y_plane_address;
424 frame.data_size = frameSize; 535 frame.data_size = frame_size;
425 536
426 if (_startThread->IsCurrent()) {
427 SignalFrameCaptured(this, &frame);
428 } else {
429 _startThread->Invoke<void>(
430 rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
431 this, &frame));
432 }
433 CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
434 }
435
436 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
437 const cricket::CapturedFrame *frame) {
438 RTC_DCHECK(_startThread->IsCurrent());
439 // This will call a superclass method that will perform the frame conversion 537 // This will call a superclass method that will perform the frame conversion
440 // to I420. 538 // to I420.
441 SignalFrameCaptured(this, frame); 539 SignalFrameCaptured(this, &frame);
540
541 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
542 CVBufferRelease(image_buffer);
442 } 543 }
443 544
444 } // namespace webrtc 545 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/api/objc/avfoundationvideocapturer.h ('k') | webrtc/base/objc/RTCDispatcher.m » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698