| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/api/objc/avfoundationvideocapturer.h" | 11 #include "webrtc/api/objc/avfoundationvideocapturer.h" |
| 12 | 12 |
| 13 #include "webrtc/base/bind.h" | 13 #include "webrtc/base/bind.h" |
| 14 | 14 |
| 15 #import <AVFoundation/AVFoundation.h> | 15 #import <AVFoundation/AVFoundation.h> |
| 16 #import <Foundation/Foundation.h> | 16 #import <Foundation/Foundation.h> |
| 17 #import <UIKit/UIKit.h> | 17 #import <UIKit/UIKit.h> |
| 18 | 18 |
| 19 #import "webrtc/base/objc/RTCDispatcher.h" | 19 #import "webrtc/base/objc/RTCDispatcher.h" |
| 20 #import "webrtc/base/objc/RTCLogging.h" |
| 20 | 21 |
| 21 // TODO(tkchin): support other formats. | 22 // TODO(tkchin): support other formats. |
| 22 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; | 23 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; |
| 23 static cricket::VideoFormat const kDefaultFormat = | 24 static cricket::VideoFormat const kDefaultFormat = |
| 24 cricket::VideoFormat(640, | 25 cricket::VideoFormat(640, |
| 25 480, | 26 480, |
| 26 cricket::VideoFormat::FpsToInterval(30), | 27 cricket::VideoFormat::FpsToInterval(30), |
| 27 cricket::FOURCC_NV12); | 28 cricket::FOURCC_NV12); |
| 28 | 29 |
| 29 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 30 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
| 30 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 31 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
| 31 // because other webrtc objects own cricket::VideoCapturer, which is not | 32 // because other webrtc objects own cricket::VideoCapturer, which is not |
| 32 // ref counted. To prevent bad behavior we do not expose this class directly. | 33 // ref counted. To prevent bad behavior we do not expose this class directly. |
| 33 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 34 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
| 34 <AVCaptureVideoDataOutputSampleBufferDelegate> | 35 <AVCaptureVideoDataOutputSampleBufferDelegate> |
| 35 | 36 |
| 36 @property(nonatomic, readonly) AVCaptureSession *captureSession; | 37 @property(nonatomic, readonly) AVCaptureSession *captureSession; |
| 37 @property(nonatomic, readonly) BOOL isRunning; | 38 @property(nonatomic, readonly) BOOL isRunning; |
| 39 @property(nonatomic, readonly) BOOL canUseBackCamera; |
| 38 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 40 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
| 39 | 41 |
| 40 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | 42 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
| 41 // when we receive frames. This is safe because this object should be owned by | 43 // when we receive frames. This is safe because this object should be owned by |
| 42 // it. | 44 // it. |
| 43 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | 45 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; |
| 44 - (void)startCaptureAsync; | 46 - (void)startCaptureAsync; |
| 45 - (void)stopCaptureAsync; | 47 - (void)stopCaptureAsync; |
| 46 | 48 |
| 47 @end | 49 @end |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 81 } | 83 } |
| 82 return self; | 84 return self; |
| 83 } | 85 } |
| 84 | 86 |
| 85 - (void)dealloc { | 87 - (void)dealloc { |
| 86 [self stopCaptureAsync]; | 88 [self stopCaptureAsync]; |
| 87 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 89 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
| 88 _capturer = nullptr; | 90 _capturer = nullptr; |
| 89 } | 91 } |
| 90 | 92 |
| 93 - (BOOL)canUseBackCamera { |
| 94 return _backDeviceInput != nil; |
| 95 } |
| 96 |
| 91 - (void)setUseBackCamera:(BOOL)useBackCamera { | 97 - (void)setUseBackCamera:(BOOL)useBackCamera { |
| 92 if (_useBackCamera == useBackCamera) { | 98 if (_useBackCamera == useBackCamera) { |
| 93 return; | 99 return; |
| 94 } | 100 } |
| 101 if (!self.canUseBackCamera) { |
| 102 RTCLog(@"No rear-facing camera exists or it cannot be used;" |
| 103 "not switching."); |
| 104 return; |
| 105 } |
| 95 _useBackCamera = useBackCamera; | 106 _useBackCamera = useBackCamera; |
| 96 [self updateSessionInput]; | 107 [self updateSessionInput]; |
| 97 } | 108 } |
| 98 | 109 |
| 99 - (void)startCaptureAsync { | 110 - (void)startCaptureAsync { |
| 100 if (_isRunning) { | 111 if (_isRunning) { |
| 101 return; | 112 return; |
| 102 } | 113 } |
| 103 _orientationHasChanged = NO; | 114 _orientationHasChanged = NO; |
| 104 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 115 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 179 AVCaptureDevice *backCaptureDevice = nil; | 190 AVCaptureDevice *backCaptureDevice = nil; |
| 180 for (AVCaptureDevice *captureDevice in | 191 for (AVCaptureDevice *captureDevice in |
| 181 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | 192 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
| 182 if (captureDevice.position == AVCaptureDevicePositionBack) { | 193 if (captureDevice.position == AVCaptureDevicePositionBack) { |
| 183 backCaptureDevice = captureDevice; | 194 backCaptureDevice = captureDevice; |
| 184 } | 195 } |
| 185 if (captureDevice.position == AVCaptureDevicePositionFront) { | 196 if (captureDevice.position == AVCaptureDevicePositionFront) { |
| 186 frontCaptureDevice = captureDevice; | 197 frontCaptureDevice = captureDevice; |
| 187 } | 198 } |
| 188 } | 199 } |
| 189 if (!frontCaptureDevice || !backCaptureDevice) { | 200 if (!frontCaptureDevice) { |
| 190 NSLog(@"Failed to get capture devices."); | 201 RTCLog(@"Failed to get front capture device."); |
| 191 return NO; | 202 return NO; |
| 192 } | 203 } |
| 204 if (!backCaptureDevice) { |
| 205 RTCLog(@"Failed to get back capture device"); |
| 206 // Don't return NO here because devices exist (16GB 5th generation iPod |
| 207 // Touch) that don't have a rear-facing camera. |
| 208 } |
| 193 | 209 |
| 194 // Set up the session inputs. | 210 // Set up the session inputs. |
| 195 NSError *error = nil; | 211 NSError *error = nil; |
| 196 _frontDeviceInput = | 212 _frontDeviceInput = |
| 197 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice | 213 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice |
| 198 error:&error]; | 214 error:&error]; |
| 199 if (!_frontDeviceInput) { | 215 if (!_frontDeviceInput) { |
| 200 NSLog(@"Failed to get capture device input: %@", | 216 NSLog(@"Failed to get capture device input: %@", |
| 201 error.localizedDescription); | 217 error.localizedDescription); |
| 202 return NO; | 218 return NO; |
| 203 } | 219 } |
| 204 _backDeviceInput = | 220 if (backCaptureDevice) { |
| 205 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice | 221 error = nil; |
| 206 error:&error]; | 222 _backDeviceInput = |
| 207 if (!_backDeviceInput) { | 223 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice |
| 208 NSLog(@"Failed to get capture device input: %@", | 224 error:&error]; |
| 209 error.localizedDescription); | 225 if (error) { |
| 210 return NO; | 226 RTCLog(@"Failed to get capture device input: %@", |
| 227 error.localizedDescription); |
| 228 _backDeviceInput = nil; |
| 229 } |
| 211 } | 230 } |
| 212 | 231 |
| 213 // Add the inputs. | 232 // Add the inputs. |
| 214 if (![_captureSession canAddInput:_frontDeviceInput] || | 233 if (![_captureSession canAddInput:_frontDeviceInput] || |
| 215 ![_captureSession canAddInput:_backDeviceInput]) { | 234 (_backDeviceInput && ![_captureSession canAddInput:_backDeviceInput])) { |
| 216 NSLog(@"Session does not support capture inputs."); | 235 NSLog(@"Session does not support capture inputs."); |
| 217 return NO; | 236 return NO; |
| 218 } | 237 } |
| 219 [self updateSessionInput]; | 238 [self updateSessionInput]; |
| 220 | 239 |
| 221 return YES; | 240 return YES; |
| 222 } | 241 } |
| 223 | 242 |
| 224 - (void)deviceOrientationDidChange:(NSNotification *)notification { | 243 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
| 225 _orientationHasChanged = YES; | 244 _orientationHasChanged = YES; |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 329 } | 348 } |
| 330 | 349 |
| 331 bool AVFoundationVideoCapturer::IsRunning() { | 350 bool AVFoundationVideoCapturer::IsRunning() { |
| 332 return _capturer.isRunning; | 351 return _capturer.isRunning; |
| 333 } | 352 } |
| 334 | 353 |
| 335 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { | 354 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { |
| 336 return _capturer.captureSession; | 355 return _capturer.captureSession; |
| 337 } | 356 } |
| 338 | 357 |
| 358 bool AVFoundationVideoCapturer::CanUseBackCamera() const { |
| 359 return _capturer.canUseBackCamera; |
| 360 } |
| 361 |
| 339 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | 362 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { |
| 340 _capturer.useBackCamera = useBackCamera; | 363 _capturer.useBackCamera = useBackCamera; |
| 341 } | 364 } |
| 342 | 365 |
| 343 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | 366 bool AVFoundationVideoCapturer::GetUseBackCamera() const { |
| 344 return _capturer.useBackCamera; | 367 return _capturer.useBackCamera; |
| 345 } | 368 } |
| 346 | 369 |
| 347 void AVFoundationVideoCapturer::CaptureSampleBuffer( | 370 void AVFoundationVideoCapturer::CaptureSampleBuffer( |
| 348 CMSampleBufferRef sampleBuffer) { | 371 CMSampleBufferRef sampleBuffer) { |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 411 | 434 |
| 412 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( | 435 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( |
| 413 const cricket::CapturedFrame *frame) { | 436 const cricket::CapturedFrame *frame) { |
| 414 RTC_DCHECK(_startThread->IsCurrent()); | 437 RTC_DCHECK(_startThread->IsCurrent()); |
| 415 // This will call a superclass method that will perform the frame conversion | 438 // This will call a superclass method that will perform the frame conversion |
| 416 // to I420. | 439 // to I420. |
| 417 SignalFrameCaptured(this, frame); | 440 SignalFrameCaptured(this, frame); |
| 418 } | 441 } |
| 419 | 442 |
| 420 } // namespace webrtc | 443 } // namespace webrtc |
| OLD | NEW |