Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(116)

Side by Side Diff: webrtc/api/objc/avfoundationvideocapturer.mm

Issue 1903663002: Build dynamic iOS SDK. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Fix test gyp Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/api/objc/avfoundationvideocapturer.h"
12
13 #include "webrtc/base/bind.h"
14 #include "webrtc/base/checks.h"
15 #include "webrtc/base/thread.h"
16
17 #import <AVFoundation/AVFoundation.h>
18 #import <Foundation/Foundation.h>
19 #import <UIKit/UIKit.h>
20
21 #import "webrtc/base/objc/RTCDispatcher+Private.h"
22 #import "webrtc/base/objc/RTCLogging.h"
23
24 // TODO(tkchin): support other formats.
25 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
26 static cricket::VideoFormat const kDefaultFormat =
27 cricket::VideoFormat(640,
28 480,
29 cricket::VideoFormat::FpsToInterval(30),
30 cricket::FOURCC_NV12);
31
32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
34 // because other webrtc objects own cricket::VideoCapturer, which is not
35 // ref counted. To prevent bad behavior we do not expose this class directly.
36 @interface RTCAVFoundationVideoCapturerInternal : NSObject
37 <AVCaptureVideoDataOutputSampleBufferDelegate>
38
39 @property(nonatomic, readonly) AVCaptureSession *captureSession;
40 @property(nonatomic, readonly) BOOL isRunning;
41 @property(nonatomic, readonly) BOOL canUseBackCamera;
42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
43
44 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
45 // when we receive frames. This is safe because this object should be owned by
46 // it.
47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
48
49 // Starts and stops the capture session asynchronously. We cannot do this
50 // synchronously without blocking a WebRTC thread.
51 - (void)start;
52 - (void)stop;
53
54 @end
55
56 @implementation RTCAVFoundationVideoCapturerInternal {
57 // Keep pointers to inputs for convenience.
58 AVCaptureDeviceInput *_frontCameraInput;
59 AVCaptureDeviceInput *_backCameraInput;
60 AVCaptureVideoDataOutput *_videoDataOutput;
61 // The cricket::VideoCapturer that owns this class. Should never be NULL.
62 webrtc::AVFoundationVideoCapturer *_capturer;
63 BOOL _orientationHasChanged;
64 }
65
66 @synthesize captureSession = _captureSession;
67 @synthesize isRunning = _isRunning;
68 @synthesize useBackCamera = _useBackCamera;
69
70 // This is called from the thread that creates the video source, which is likely
71 // the main thread.
72 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
73 RTC_DCHECK(capturer);
74 if (self = [super init]) {
75 _capturer = capturer;
76 // Create the capture session and all relevant inputs and outputs. We need
77 // to do this in init because the application may want the capture session
78 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
79 // created here are retained until dealloc and never recreated.
80 if (![self setupCaptureSession]) {
81 return nil;
82 }
83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
84 [center addObserver:self
85 selector:@selector(deviceOrientationDidChange:)
86 name:UIDeviceOrientationDidChangeNotification
87 object:nil];
88 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
89 object:nil
90 queue:nil
91 usingBlock:^(NSNotification *notification) {
92 RTCLogError(@"Capture session error: %@", notification.userInfo);
93 }];
94 }
95 return self;
96 }
97
98 - (void)dealloc {
99 RTC_DCHECK(!_isRunning);
100 [[NSNotificationCenter defaultCenter] removeObserver:self];
101 _capturer = nullptr;
102 }
103
104 - (AVCaptureSession *)captureSession {
105 return _captureSession;
106 }
107
108 // Called from any thread (likely main thread).
109 - (BOOL)canUseBackCamera {
110 return _backCameraInput != nil;
111 }
112
113 // Called from any thread (likely main thread).
114 - (BOOL)useBackCamera {
115 @synchronized(self) {
116 return _useBackCamera;
117 }
118 }
119
120 // Called from any thread (likely main thread).
121 - (void)setUseBackCamera:(BOOL)useBackCamera {
122 if (!self.canUseBackCamera) {
123 if (useBackCamera) {
124 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
125 "not switching.");
126 }
127 return;
128 }
129 @synchronized(self) {
130 if (_useBackCamera == useBackCamera) {
131 return;
132 }
133 _useBackCamera = useBackCamera;
134 [self updateSessionInputForUseBackCamera:useBackCamera];
135 }
136 }
137
138 // Called from WebRTC thread.
139 - (void)start {
140 if (_isRunning) {
141 return;
142 }
143 _isRunning = YES;
144 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
145 block:^{
146 _orientationHasChanged = NO;
147 [self updateOrientation];
148 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
149 AVCaptureSession *captureSession = self.captureSession;
150 [captureSession startRunning];
151 }];
152 }
153
154 // Called from same thread as start.
155 - (void)stop {
156 if (!_isRunning) {
157 return;
158 }
159 _isRunning = NO;
160 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
161 block:^{
162 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
163 [_captureSession stopRunning];
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
165 }];
166 }
167
168 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
169
170 - (void)captureOutput:(AVCaptureOutput *)captureOutput
171 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
172 fromConnection:(AVCaptureConnection *)connection {
173 NSParameterAssert(captureOutput == _videoDataOutput);
174 if (!_isRunning) {
175 return;
176 }
177 _capturer->CaptureSampleBuffer(sampleBuffer);
178 }
179
180 - (void)captureOutput:(AVCaptureOutput *)captureOutput
181 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
182 fromConnection:(AVCaptureConnection *)connection {
183 RTCLogError(@"Dropped sample buffer.");
184 }
185
186 #pragma mark - Private
187
188 - (BOOL)setupCaptureSession {
189 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
190 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
191 NSString *version = [[UIDevice currentDevice] systemVersion];
192 if ([version integerValue] >= 7) {
193 captureSession.usesApplicationAudioSession = NO;
194 }
195 #endif
196 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
197 RTCLogError(@"Session preset unsupported.");
198 return NO;
199 }
200 captureSession.sessionPreset = kDefaultPreset;
201
202 // Add the output.
203 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
204 if (![captureSession canAddOutput:videoDataOutput]) {
205 RTCLogError(@"Video data output unsupported.");
206 return NO;
207 }
208 [captureSession addOutput:videoDataOutput];
209
210 // Get the front and back cameras. If there isn't a front camera
211 // give up.
212 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
213 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
214 if (!frontCameraInput) {
215 RTCLogError(@"No front camera for capture session.");
216 return NO;
217 }
218
219 // Add the inputs.
220 if (![captureSession canAddInput:frontCameraInput] ||
221 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
222 RTCLogError(@"Session does not support capture inputs.");
223 return NO;
224 }
225 AVCaptureDeviceInput *input = self.useBackCamera ?
226 backCameraInput : frontCameraInput;
227 [captureSession addInput:input];
228 _captureSession = captureSession;
229 return YES;
230 }
231
232 - (AVCaptureVideoDataOutput *)videoDataOutput {
233 if (!_videoDataOutput) {
234 // Make the capturer output NV12. Ideally we want I420 but that's not
235 // currently supported on iPhone / iPad.
236 AVCaptureVideoDataOutput *videoDataOutput =
237 [[AVCaptureVideoDataOutput alloc] init];
238 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
239 videoDataOutput.videoSettings = @{
240 (NSString *)kCVPixelBufferPixelFormatTypeKey :
241 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
242 };
243 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
244 dispatch_queue_t queue =
245 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
246 [videoDataOutput setSampleBufferDelegate:self queue:queue];
247 _videoDataOutput = videoDataOutput;
248 }
249 return _videoDataOutput;
250 }
251
252 - (AVCaptureDevice *)videoCaptureDeviceForPosition:
253 (AVCaptureDevicePosition)position {
254 for (AVCaptureDevice *captureDevice in
255 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
256 if (captureDevice.position == position) {
257 return captureDevice;
258 }
259 }
260 return nil;
261 }
262
263 - (AVCaptureDeviceInput *)frontCameraInput {
264 if (!_frontCameraInput) {
265 AVCaptureDevice *frontCameraDevice =
266 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
267 if (!frontCameraDevice) {
268 RTCLogWarning(@"Failed to find front capture device.");
269 return nil;
270 }
271 NSError *error = nil;
272 AVCaptureDeviceInput *frontCameraInput =
273 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
274 error:&error];
275 if (!frontCameraInput) {
276 RTCLogError(@"Failed to create front camera input: %@",
277 error.localizedDescription);
278 return nil;
279 }
280 _frontCameraInput = frontCameraInput;
281 }
282 return _frontCameraInput;
283 }
284
285 - (AVCaptureDeviceInput *)backCameraInput {
286 if (!_backCameraInput) {
287 AVCaptureDevice *backCameraDevice =
288 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
289 if (!backCameraDevice) {
290 RTCLogWarning(@"Failed to find front capture device.");
291 return nil;
292 }
293 NSError *error = nil;
294 AVCaptureDeviceInput *backCameraInput =
295 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
296 error:&error];
297 if (!backCameraInput) {
298 RTCLogError(@"Failed to create front camera input: %@",
299 error.localizedDescription);
300 return nil;
301 }
302 _backCameraInput = backCameraInput;
303 }
304 return _backCameraInput;
305 }
306
307 - (void)deviceOrientationDidChange:(NSNotification *)notification {
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
309 block:^{
310 _orientationHasChanged = YES;
311 [self updateOrientation];
312 }];
313 }
314
315 // Called from capture session queue.
316 - (void)updateOrientation {
317 AVCaptureConnection *connection =
318 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
319 if (!connection.supportsVideoOrientation) {
320 // TODO(tkchin): set rotation bit on frames.
321 return;
322 }
323 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
324 switch ([UIDevice currentDevice].orientation) {
325 case UIDeviceOrientationPortrait:
326 orientation = AVCaptureVideoOrientationPortrait;
327 break;
328 case UIDeviceOrientationPortraitUpsideDown:
329 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
330 break;
331 case UIDeviceOrientationLandscapeLeft:
332 orientation = AVCaptureVideoOrientationLandscapeRight;
333 break;
334 case UIDeviceOrientationLandscapeRight:
335 orientation = AVCaptureVideoOrientationLandscapeLeft;
336 break;
337 case UIDeviceOrientationFaceUp:
338 case UIDeviceOrientationFaceDown:
339 case UIDeviceOrientationUnknown:
340 if (!_orientationHasChanged) {
341 connection.videoOrientation = orientation;
342 }
343 return;
344 }
345 connection.videoOrientation = orientation;
346 }
347
348 // Update the current session input to match what's stored in _useBackCamera.
349 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
350 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
351 block:^{
352 [_captureSession beginConfiguration];
353 AVCaptureDeviceInput *oldInput = _backCameraInput;
354 AVCaptureDeviceInput *newInput = _frontCameraInput;
355 if (useBackCamera) {
356 oldInput = _frontCameraInput;
357 newInput = _backCameraInput;
358 }
359 if (oldInput) {
360 // Ok to remove this even if it's not attached. Will be no-op.
361 [_captureSession removeInput:oldInput];
362 }
363 if (newInput) {
364 [_captureSession addInput:newInput];
365 }
366 [self updateOrientation];
367 [_captureSession commitConfiguration];
368 }];
369 }
370
371 @end
372
373 namespace webrtc {
374
375 enum AVFoundationVideoCapturerMessageType : uint32_t {
376 kMessageTypeFrame,
377 };
378
379 struct AVFoundationFrame {
380 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
381 : image_buffer(buffer), capture_time(time) {}
382 CVImageBufferRef image_buffer;
383 int64_t capture_time;
384 };
385
386 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
387 : _capturer(nil), _startThread(nullptr) {
388 // Set our supported formats. This matches kDefaultPreset.
389 std::vector<cricket::VideoFormat> supportedFormats;
390 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
391 SetSupportedFormats(supportedFormats);
392 _capturer =
393 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
394 }
395
396 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
397 _capturer = nil;
398 }
399
400 cricket::CaptureState AVFoundationVideoCapturer::Start(
401 const cricket::VideoFormat& format) {
402 if (!_capturer) {
403 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
404 return cricket::CaptureState::CS_FAILED;
405 }
406 if (_capturer.isRunning) {
407 LOG(LS_ERROR) << "The capturer is already running.";
408 return cricket::CaptureState::CS_FAILED;
409 }
410 if (format != kDefaultFormat) {
411 LOG(LS_ERROR) << "Unsupported format provided.";
412 return cricket::CaptureState::CS_FAILED;
413 }
414
415 // Keep track of which thread capture started on. This is the thread that
416 // frames need to be sent to.
417 RTC_DCHECK(!_startThread);
418 _startThread = rtc::Thread::Current();
419
420 SetCaptureFormat(&format);
421 // This isn't super accurate because it takes a while for the AVCaptureSession
422 // to spin up, and this call returns async.
423 // TODO(tkchin): make this better.
424 [_capturer start];
425 SetCaptureState(cricket::CaptureState::CS_RUNNING);
426
427 return cricket::CaptureState::CS_STARTING;
428 }
429
430 void AVFoundationVideoCapturer::Stop() {
431 [_capturer stop];
432 SetCaptureFormat(NULL);
433 _startThread = nullptr;
434 }
435
436 bool AVFoundationVideoCapturer::IsRunning() {
437 return _capturer.isRunning;
438 }
439
440 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
441 return _capturer.captureSession;
442 }
443
444 bool AVFoundationVideoCapturer::CanUseBackCamera() const {
445 return _capturer.canUseBackCamera;
446 }
447
448 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
449 _capturer.useBackCamera = useBackCamera;
450 }
451
452 bool AVFoundationVideoCapturer::GetUseBackCamera() const {
453 return _capturer.useBackCamera;
454 }
455
456 void AVFoundationVideoCapturer::CaptureSampleBuffer(
457 CMSampleBufferRef sampleBuffer) {
458 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
459 !CMSampleBufferIsValid(sampleBuffer) ||
460 !CMSampleBufferDataIsReady(sampleBuffer)) {
461 return;
462 }
463
464 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
465 if (image_buffer == NULL) {
466 return;
467 }
468
469 // Retain the buffer and post it to the webrtc thread. It will be released
470 // after it has successfully been signaled.
471 CVBufferRetain(image_buffer);
472 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
473 _startThread->Post(this, kMessageTypeFrame,
474 new rtc::TypedMessageData<AVFoundationFrame>(frame));
475 }
476
477 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
478 switch (msg->message_id) {
479 case kMessageTypeFrame: {
480 rtc::TypedMessageData<AVFoundationFrame>* data =
481 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
482 const AVFoundationFrame& frame = data->data();
483 OnFrameMessage(frame.image_buffer, frame.capture_time);
484 delete data;
485 break;
486 }
487 }
488 }
489
490 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
491 int64_t capture_time) {
492 RTC_DCHECK(_startThread->IsCurrent());
493
494 // Base address must be unlocked to access frame data.
495 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
496 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
497 if (ret != kCVReturnSuccess) {
498 return;
499 }
500
501 static size_t const kYPlaneIndex = 0;
502 static size_t const kUVPlaneIndex = 1;
503 uint8_t* y_plane_address =
504 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
505 kYPlaneIndex));
506 size_t y_plane_height =
507 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
508 size_t y_plane_width =
509 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
510 size_t y_plane_bytes_per_row =
511 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
512 size_t uv_plane_height =
513 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
514 size_t uv_plane_bytes_per_row =
515 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
516 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
517 uv_plane_bytes_per_row * uv_plane_height;
518
519 // Sanity check assumption that planar bytes are contiguous.
520 uint8_t* uv_plane_address =
521 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
522 kUVPlaneIndex));
523 RTC_DCHECK(uv_plane_address ==
524 y_plane_address + y_plane_height * y_plane_bytes_per_row);
525
526 // Stuff data into a cricket::CapturedFrame.
527 cricket::CapturedFrame frame;
528 frame.width = y_plane_width;
529 frame.height = y_plane_height;
530 frame.pixel_width = 1;
531 frame.pixel_height = 1;
532 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
533 frame.time_stamp = capture_time;
534 frame.data = y_plane_address;
535 frame.data_size = frame_size;
536
537 // This will call a superclass method that will perform the frame conversion
538 // to I420.
539 SignalFrameCaptured(this, &frame);
540
541 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
542 CVBufferRelease(image_buffer);
543 }
544
545 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/api/objc/avfoundationvideocapturer.h ('k') | webrtc/api/objctests/RTCConfigurationTest.mm » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698