OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 16 matching lines...) Expand all Loading... |
27 | 27 |
28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" | 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" |
29 | 29 |
30 #include "webrtc/base/bind.h" | 30 #include "webrtc/base/bind.h" |
31 | 31 |
32 #import <AVFoundation/AVFoundation.h> | 32 #import <AVFoundation/AVFoundation.h> |
33 #import <Foundation/Foundation.h> | 33 #import <Foundation/Foundation.h> |
34 #import <UIKit/UIKit.h> | 34 #import <UIKit/UIKit.h> |
35 | 35 |
36 #import "webrtc/base/objc/RTCDispatcher.h" | 36 #import "webrtc/base/objc/RTCDispatcher.h" |
| 37 #import "webrtc/base/objc/RTCLogging.h" |
37 | 38 |
38 // TODO(tkchin): support other formats. | 39 // TODO(tkchin): support other formats. |
39 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; | 40 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; |
40 static cricket::VideoFormat const kDefaultFormat = | 41 static cricket::VideoFormat const kDefaultFormat = |
41 cricket::VideoFormat(640, | 42 cricket::VideoFormat(640, |
42 480, | 43 480, |
43 cricket::VideoFormat::FpsToInterval(30), | 44 cricket::VideoFormat::FpsToInterval(30), |
44 cricket::FOURCC_NV12); | 45 cricket::FOURCC_NV12); |
45 | 46 |
46 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 47 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
47 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 48 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
48 // because other webrtc objects own cricket::VideoCapturer, which is not | 49 // because other webrtc objects own cricket::VideoCapturer, which is not |
49 // ref counted. To prevent bad behavior we do not expose this class directly. | 50 // ref counted. To prevent bad behavior we do not expose this class directly. |
50 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 51 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
51 <AVCaptureVideoDataOutputSampleBufferDelegate> | 52 <AVCaptureVideoDataOutputSampleBufferDelegate> |
52 | 53 |
53 @property(nonatomic, readonly) AVCaptureSession* captureSession; | 54 @property(nonatomic, readonly) AVCaptureSession* captureSession; |
54 @property(nonatomic, readonly) BOOL isRunning; | 55 @property(nonatomic, readonly) BOOL isRunning; |
| 56 @property(nonatomic, readonly) BOOL canUseBackCamera; |
55 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 57 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
56 | 58 |
57 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | 59 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
58 // when we receive frames. This is safe because this object should be owned by | 60 // when we receive frames. This is safe because this object should be owned by |
59 // it. | 61 // it. |
60 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer; | 62 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer; |
61 - (void)startCaptureAsync; | 63 - (void)startCaptureAsync; |
62 - (void)stopCaptureAsync; | 64 - (void)stopCaptureAsync; |
63 | 65 |
64 @end | 66 @end |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
98 } | 100 } |
99 return self; | 101 return self; |
100 } | 102 } |
101 | 103 |
102 - (void)dealloc { | 104 - (void)dealloc { |
103 [self stopCaptureAsync]; | 105 [self stopCaptureAsync]; |
104 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 106 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
105 _capturer = nullptr; | 107 _capturer = nullptr; |
106 } | 108 } |
107 | 109 |
| 110 - (BOOL)canUseBackCamera { |
| 111 return _backDeviceInput != nil; |
| 112 } |
| 113 |
108 - (void)setUseBackCamera:(BOOL)useBackCamera { | 114 - (void)setUseBackCamera:(BOOL)useBackCamera { |
109 if (_useBackCamera == useBackCamera) { | 115 if (_useBackCamera == useBackCamera) { |
110 return; | 116 return; |
111 } | 117 } |
| 118 if (!self.canUseBackCamera) { |
| 119 RTCLog(@"No rear-facing camera exists or it cannot be used;" |
| 120 "not switching."); |
| 121 return; |
| 122 } |
112 _useBackCamera = useBackCamera; | 123 _useBackCamera = useBackCamera; |
113 [self updateSessionInput]; | 124 [self updateSessionInput]; |
114 } | 125 } |
115 | 126 |
116 - (void)startCaptureAsync { | 127 - (void)startCaptureAsync { |
117 if (_isRunning) { | 128 if (_isRunning) { |
118 return; | 129 return; |
119 } | 130 } |
120 _orientationHasChanged = NO; | 131 _orientationHasChanged = NO; |
121 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 132 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
196 AVCaptureDevice* backCaptureDevice = nil; | 207 AVCaptureDevice* backCaptureDevice = nil; |
197 for (AVCaptureDevice* captureDevice in | 208 for (AVCaptureDevice* captureDevice in |
198 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | 209 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
199 if (captureDevice.position == AVCaptureDevicePositionBack) { | 210 if (captureDevice.position == AVCaptureDevicePositionBack) { |
200 backCaptureDevice = captureDevice; | 211 backCaptureDevice = captureDevice; |
201 } | 212 } |
202 if (captureDevice.position == AVCaptureDevicePositionFront) { | 213 if (captureDevice.position == AVCaptureDevicePositionFront) { |
203 frontCaptureDevice = captureDevice; | 214 frontCaptureDevice = captureDevice; |
204 } | 215 } |
205 } | 216 } |
206 if (!frontCaptureDevice || !backCaptureDevice) { | 217 if (!frontCaptureDevice) { |
207 NSLog(@"Failed to get capture devices."); | 218 RTCLog(@"Failed to get front capture device."); |
208 return NO; | 219 return NO; |
209 } | 220 } |
| 221 if (!backCaptureDevice) { |
| 222 RTCLog(@"Failed to get back capture device"); |
| 223 // Don't return NO here because devices exist (16GB 5th generation iPod |
| 224 // Touch) that don't have a rear-facing camera. |
| 225 } |
210 | 226 |
211 // Set up the session inputs. | 227 // Set up the session inputs. |
212 NSError* error = nil; | 228 NSError* error = nil; |
213 _frontDeviceInput = | 229 _frontDeviceInput = |
214 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice | 230 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice |
215 error:&error]; | 231 error:&error]; |
216 if (!_frontDeviceInput) { | 232 if (!_frontDeviceInput) { |
217 NSLog(@"Failed to get capture device input: %@", | 233 NSLog(@"Failed to get capture device input: %@", |
218 error.localizedDescription); | 234 error.localizedDescription); |
219 return NO; | 235 return NO; |
220 } | 236 } |
221 _backDeviceInput = | 237 if (backCaptureDevice) { |
222 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice | 238 error = nil; |
223 error:&error]; | 239 _backDeviceInput = |
224 if (!_backDeviceInput) { | 240 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice |
225 NSLog(@"Failed to get capture device input: %@", | 241 error:&error]; |
226 error.localizedDescription); | 242 if (error) { |
227 return NO; | 243 RTCLog(@"Failed to get capture device input: %@", |
| 244 error.localizedDescription); |
| 245 _backDeviceInput = nil; |
| 246 } |
228 } | 247 } |
229 | 248 |
230 // Add the inputs. | 249 // Add the inputs. |
231 if (![_captureSession canAddInput:_frontDeviceInput] || | 250 if (![_captureSession canAddInput:_frontDeviceInput] || |
232 ![_captureSession canAddInput:_backDeviceInput]) { | 251 (_backDeviceInput && ![_captureSession canAddInput:_backDeviceInput])) { |
233 NSLog(@"Session does not support capture inputs."); | 252 NSLog(@"Session does not support capture inputs."); |
234 return NO; | 253 return NO; |
235 } | 254 } |
236 [self updateSessionInput]; | 255 [self updateSessionInput]; |
237 | 256 |
238 return YES; | 257 return YES; |
239 } | 258 } |
240 | 259 |
241 - (void)deviceOrientationDidChange:(NSNotification*)notification { | 260 - (void)deviceOrientationDidChange:(NSNotification*)notification { |
242 _orientationHasChanged = YES; | 261 _orientationHasChanged = YES; |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
346 } | 365 } |
347 | 366 |
348 bool AVFoundationVideoCapturer::IsRunning() { | 367 bool AVFoundationVideoCapturer::IsRunning() { |
349 return _capturer.isRunning; | 368 return _capturer.isRunning; |
350 } | 369 } |
351 | 370 |
352 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { | 371 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { |
353 return _capturer.captureSession; | 372 return _capturer.captureSession; |
354 } | 373 } |
355 | 374 |
| 375 bool AVFoundationVideoCapturer::CanUseBackCamera() const { |
| 376 return _capturer.canUseBackCamera; |
| 377 } |
| 378 |
356 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | 379 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { |
357 _capturer.useBackCamera = useBackCamera; | 380 _capturer.useBackCamera = useBackCamera; |
358 } | 381 } |
359 | 382 |
360 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | 383 bool AVFoundationVideoCapturer::GetUseBackCamera() const { |
361 return _capturer.useBackCamera; | 384 return _capturer.useBackCamera; |
362 } | 385 } |
363 | 386 |
364 void AVFoundationVideoCapturer::CaptureSampleBuffer( | 387 void AVFoundationVideoCapturer::CaptureSampleBuffer( |
365 CMSampleBufferRef sampleBuffer) { | 388 CMSampleBufferRef sampleBuffer) { |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
428 | 451 |
429 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( | 452 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( |
430 const cricket::CapturedFrame* frame) { | 453 const cricket::CapturedFrame* frame) { |
431 RTC_DCHECK(_startThread->IsCurrent()); | 454 RTC_DCHECK(_startThread->IsCurrent()); |
432 // This will call a superclass method that will perform the frame conversion | 455 // This will call a superclass method that will perform the frame conversion |
433 // to I420. | 456 // to I420. |
434 SignalFrameCaptured(this, frame); | 457 SignalFrameCaptured(this, frame); |
435 } | 458 } |
436 | 459 |
437 } // namespace webrtc | 460 } // namespace webrtc |
OLD | NEW |