Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(122)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm

Issue 2046863004: Add AVFoundation video capture support to Mac objc SDK (based on iOS) (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Put framework links in the right place Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm ('k') | webrtc/sdk/sdk.gyp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "avfoundationvideocapturer.h" 11 #include "avfoundationvideocapturer.h"
12 12
13 #import <AVFoundation/AVFoundation.h> 13 #import <AVFoundation/AVFoundation.h>
14 #import <Foundation/Foundation.h> 14 #import <Foundation/Foundation.h>
15 #if TARGET_OS_IPHONE
15 #import <UIKit/UIKit.h> 16 #import <UIKit/UIKit.h>
17 #endif
16 18
17 #import "RTCDispatcher+Private.h" 19 #import "RTCDispatcher+Private.h"
18 #import "WebRTC/RTCLogging.h" 20 #import "WebRTC/RTCLogging.h"
19 21
20 #include "webrtc/base/bind.h" 22 #include "webrtc/base/bind.h"
21 #include "webrtc/base/checks.h" 23 #include "webrtc/base/checks.h"
22 #include "webrtc/base/thread.h" 24 #include "webrtc/base/thread.h"
23 25
24 // TODO(tkchin): support other formats. 26 // TODO(tkchin): support other formats.
25 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; 27 static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
81 if (self = [super init]) { 83 if (self = [super init]) {
82 _capturer = capturer; 84 _capturer = capturer;
83 // Create the capture session and all relevant inputs and outputs. We need 85 // Create the capture session and all relevant inputs and outputs. We need
84 // to do this in init because the application may want the capture session 86 // to do this in init because the application may want the capture session
85 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects 87 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
86 // created here are retained until dealloc and never recreated. 88 // created here are retained until dealloc and never recreated.
87 if (![self setupCaptureSession]) { 89 if (![self setupCaptureSession]) {
88 return nil; 90 return nil;
89 } 91 }
90 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; 92 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
93 #if TARGET_OS_IPHONE
91 [center addObserver:self 94 [center addObserver:self
92 selector:@selector(deviceOrientationDidChange:) 95 selector:@selector(deviceOrientationDidChange:)
93 name:UIDeviceOrientationDidChangeNotification 96 name:UIDeviceOrientationDidChangeNotification
94 object:nil]; 97 object:nil];
95 [center addObserver:self 98 [center addObserver:self
96 selector:@selector(handleCaptureSessionInterruption:) 99 selector:@selector(handleCaptureSessionInterruption:)
97 name:AVCaptureSessionWasInterruptedNotification 100 name:AVCaptureSessionWasInterruptedNotification
98 object:_captureSession]; 101 object:_captureSession];
99 [center addObserver:self 102 [center addObserver:self
100 selector:@selector(handleCaptureSessionInterruptionEnded:) 103 selector:@selector(handleCaptureSessionInterruptionEnded:)
101 name:AVCaptureSessionInterruptionEndedNotification 104 name:AVCaptureSessionInterruptionEndedNotification
102 object:_captureSession]; 105 object:_captureSession];
106 #endif
103 [center addObserver:self 107 [center addObserver:self
104 selector:@selector(handleCaptureSessionRuntimeError:) 108 selector:@selector(handleCaptureSessionRuntimeError:)
105 name:AVCaptureSessionRuntimeErrorNotification 109 name:AVCaptureSessionRuntimeErrorNotification
106 object:_captureSession]; 110 object:_captureSession];
107 [center addObserver:self 111 [center addObserver:self
108 selector:@selector(handleCaptureSessionDidStartRunning:) 112 selector:@selector(handleCaptureSessionDidStartRunning:)
109 name:AVCaptureSessionDidStartRunningNotification 113 name:AVCaptureSessionDidStartRunningNotification
110 object:_captureSession]; 114 object:_captureSession];
111 [center addObserver:self 115 [center addObserver:self
112 selector:@selector(handleCaptureSessionDidStopRunning:) 116 selector:@selector(handleCaptureSessionDidStopRunning:)
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 // Called from WebRTC thread. 185 // Called from WebRTC thread.
182 - (void)start { 186 - (void)start {
183 if (self.hasStarted) { 187 if (self.hasStarted) {
184 return; 188 return;
185 } 189 }
186 self.hasStarted = YES; 190 self.hasStarted = YES;
187 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 191 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
188 block:^{ 192 block:^{
189 _orientationHasChanged = NO; 193 _orientationHasChanged = NO;
190 [self updateOrientation]; 194 [self updateOrientation];
195 #if TARGET_OS_IPHONE
191 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; 196 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
197 #endif
192 AVCaptureSession *captureSession = self.captureSession; 198 AVCaptureSession *captureSession = self.captureSession;
193 [captureSession startRunning]; 199 [captureSession startRunning];
194 }]; 200 }];
195 } 201 }
196 202
197 // Called from same thread as start. 203 // Called from same thread as start.
198 - (void)stop { 204 - (void)stop {
199 if (!self.hasStarted) { 205 if (!self.hasStarted) {
200 return; 206 return;
201 } 207 }
202 self.hasStarted = NO; 208 self.hasStarted = NO;
203 // Due to this async block, it's possible that the ObjC object outlives the 209 // Due to this async block, it's possible that the ObjC object outlives the
204 // C++ one. In order to not invoke functions on the C++ object, we set 210 // C++ one. In order to not invoke functions on the C++ object, we set
205 // hasStarted immediately instead of dispatching it async. 211 // hasStarted immediately instead of dispatching it async.
206 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 212 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
207 block:^{ 213 block:^{
208 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; 214 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
209 [_captureSession stopRunning]; 215 [_captureSession stopRunning];
216 #if TARGET_OS_IPHONE
210 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; 217 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
218 #endif
211 }]; 219 }];
212 } 220 }
213 221
214 #pragma mark iOS notifications 222 #pragma mark iOS notifications
215 223
224 #if TARGET_OS_IPHONE
216 - (void)deviceOrientationDidChange:(NSNotification *)notification { 225 - (void)deviceOrientationDidChange:(NSNotification *)notification {
217 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 226 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
218 block:^{ 227 block:^{
219 _orientationHasChanged = YES; 228 _orientationHasChanged = YES;
220 [self updateOrientation]; 229 [self updateOrientation];
221 }]; 230 }];
222 } 231 }
232 #endif
223 233
224 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate 234 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
225 235
226 - (void)captureOutput:(AVCaptureOutput *)captureOutput 236 - (void)captureOutput:(AVCaptureOutput *)captureOutput
227 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 237 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
228 fromConnection:(AVCaptureConnection *)connection { 238 fromConnection:(AVCaptureConnection *)connection {
229 NSParameterAssert(captureOutput == _videoDataOutput); 239 NSParameterAssert(captureOutput == _videoDataOutput);
230 if (!self.hasStarted) { 240 if (!self.hasStarted) {
231 return; 241 return;
232 } 242 }
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 RTCLog(@"Capture session interrupted: %@", reasonString); 276 RTCLog(@"Capture session interrupted: %@", reasonString);
267 // TODO(tkchin): Handle this case. 277 // TODO(tkchin): Handle this case.
268 } 278 }
269 279
270 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { 280 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
271 RTCLog(@"Capture session interruption ended."); 281 RTCLog(@"Capture session interruption ended.");
272 // TODO(tkchin): Handle this case. 282 // TODO(tkchin): Handle this case.
273 } 283 }
274 284
275 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { 285 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
276 NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; 286 NSError *error =
287 [notification.userInfo objectForKey: AVCaptureSessionErrorKey];
tkchin_webrtc 2016/06/08 21:35:55 nit: indent, and no space after : it should be 6
277 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription); 288 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
278 289
279 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 290 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
280 block:^{ 291 block:^{
292 #if TARGET_OS_IPHONE
281 if (error.code == AVErrorMediaServicesWereReset) { 293 if (error.code == AVErrorMediaServicesWereReset) {
282 [self handleNonFatalError]; 294 [self handleNonFatalError];
283 } else { 295 } else {
284 [self handleFatalError]; 296 [self handleFatalError];
285 } 297 }
298 #else
299 [self handleFatalError];
300 #endif
286 }]; 301 }];
287 } 302 }
288 303
289 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { 304 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
290 RTCLog(@"Capture session started."); 305 RTCLog(@"Capture session started.");
291 self.isRunning = YES; 306 self.isRunning = YES;
292 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 307 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
293 block:^{ 308 block:^{
294 // If we successfully restarted after an unknown error, allow future 309 // If we successfully restarted after an unknown error, allow future
295 // retries on fatal errors. 310 // retries on fatal errors.
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
395 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 410 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
396 if (captureDevice.position == position) { 411 if (captureDevice.position == position) {
397 return captureDevice; 412 return captureDevice;
398 } 413 }
399 } 414 }
400 return nil; 415 return nil;
401 } 416 }
402 417
403 - (AVCaptureDeviceInput *)frontCameraInput { 418 - (AVCaptureDeviceInput *)frontCameraInput {
404 if (!_frontCameraInput) { 419 if (!_frontCameraInput) {
420 #if TARGET_OS_IPHONE
405 AVCaptureDevice *frontCameraDevice = 421 AVCaptureDevice *frontCameraDevice =
406 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; 422 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
423 #else
424 AVCaptureDevice *frontCameraDevice =
425 [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];;
426 #endif
407 if (!frontCameraDevice) { 427 if (!frontCameraDevice) {
408 RTCLogWarning(@"Failed to find front capture device."); 428 RTCLogWarning(@"Failed to find front capture device.");
409 return nil; 429 return nil;
410 } 430 }
411 NSError *error = nil; 431 NSError *error = nil;
412 AVCaptureDeviceInput *frontCameraInput = 432 AVCaptureDeviceInput *frontCameraInput =
413 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice 433 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
414 error:&error]; 434 error:&error];
415 if (!frontCameraInput) { 435 if (!frontCameraInput) {
416 RTCLogError(@"Failed to create front camera input: %@", 436 RTCLogError(@"Failed to create front camera input: %@",
(...skipping 28 matching lines...) Expand all
445 } 465 }
446 466
447 // Called from capture session queue. 467 // Called from capture session queue.
448 - (void)updateOrientation { 468 - (void)updateOrientation {
449 AVCaptureConnection *connection = 469 AVCaptureConnection *connection =
450 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; 470 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
451 if (!connection.supportsVideoOrientation) { 471 if (!connection.supportsVideoOrientation) {
452 // TODO(tkchin): set rotation bit on frames. 472 // TODO(tkchin): set rotation bit on frames.
453 return; 473 return;
454 } 474 }
455 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; 475 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
tkchin_webrtc 2016/06/08 21:35:55 Technically this shouldn't get called at all, but
476 #if TARGET_OS_IPHONE
456 switch ([UIDevice currentDevice].orientation) { 477 switch ([UIDevice currentDevice].orientation) {
457 case UIDeviceOrientationPortrait: 478 case UIDeviceOrientationPortrait:
458 orientation = AVCaptureVideoOrientationPortrait; 479 orientation = AVCaptureVideoOrientationPortrait;
459 break; 480 break;
460 case UIDeviceOrientationPortraitUpsideDown: 481 case UIDeviceOrientationPortraitUpsideDown:
461 orientation = AVCaptureVideoOrientationPortraitUpsideDown; 482 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
462 break; 483 break;
463 case UIDeviceOrientationLandscapeLeft: 484 case UIDeviceOrientationLandscapeLeft:
464 orientation = AVCaptureVideoOrientationLandscapeRight; 485 orientation = AVCaptureVideoOrientationLandscapeRight;
465 break; 486 break;
466 case UIDeviceOrientationLandscapeRight: 487 case UIDeviceOrientationLandscapeRight:
467 orientation = AVCaptureVideoOrientationLandscapeLeft; 488 orientation = AVCaptureVideoOrientationLandscapeLeft;
468 break; 489 break;
469 case UIDeviceOrientationFaceUp: 490 case UIDeviceOrientationFaceUp:
470 case UIDeviceOrientationFaceDown: 491 case UIDeviceOrientationFaceDown:
471 case UIDeviceOrientationUnknown: 492 case UIDeviceOrientationUnknown:
472 if (!_orientationHasChanged) { 493 if (!_orientationHasChanged) {
473 connection.videoOrientation = orientation; 494 connection.videoOrientation = orientation;
474 } 495 }
475 return; 496 return;
476 } 497 }
498 #endif
477 connection.videoOrientation = orientation; 499 connection.videoOrientation = orientation;
478 } 500 }
479 501
480 // Update the current session input to match what's stored in _useBackCamera. 502 // Update the current session input to match what's stored in _useBackCamera.
481 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { 503 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession 504 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{ 505 block:^{
484 [_captureSession beginConfiguration]; 506 [_captureSession beginConfiguration];
485 AVCaptureDeviceInput *oldInput = _backCameraInput; 507 AVCaptureDeviceInput *oldInput = _backCameraInput;
486 AVCaptureDeviceInput *newInput = _frontCameraInput; 508 AVCaptureDeviceInput *newInput = _frontCameraInput;
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
668 690
669 // This will call a superclass method that will perform the frame conversion 691 // This will call a superclass method that will perform the frame conversion
670 // to I420. 692 // to I420.
671 SignalFrameCaptured(this, &frame); 693 SignalFrameCaptured(this, &frame);
672 694
673 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); 695 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
674 CVBufferRelease(image_buffer); 696 CVBufferRelease(image_buffer);
675 } 697 }
676 698
677 } // namespace webrtc 699 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm ('k') | webrtc/sdk/sdk.gyp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698