OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 19 matching lines...) Expand all Loading... |
30 cricket::FOURCC_NV12); | 30 cricket::FOURCC_NV12); |
31 | 31 |
32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
34 // because other webrtc objects own cricket::VideoCapturer, which is not | 34 // because other webrtc objects own cricket::VideoCapturer, which is not |
35 // ref counted. To prevent bad behavior we do not expose this class directly. | 35 // ref counted. To prevent bad behavior we do not expose this class directly. |
36 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 36 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
37 <AVCaptureVideoDataOutputSampleBufferDelegate> | 37 <AVCaptureVideoDataOutputSampleBufferDelegate> |
38 | 38 |
39 @property(nonatomic, readonly) AVCaptureSession *captureSession; | 39 @property(nonatomic, readonly) AVCaptureSession *captureSession; |
40 @property(nonatomic, readonly) BOOL isRunning; | 40 @property(nonatomic, readonly) dispatch_queue_t frameQueue; |
41 @property(nonatomic, readonly) BOOL canUseBackCamera; | 41 @property(nonatomic, readonly) BOOL canUseBackCamera; |
42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
| 43 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is
running. |
| 44 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star
t. |
43 | 45 |
44 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | 46 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
45 // when we receive frames. This is safe because this object should be owned by | 47 // when we receive frames. This is safe because this object should be owned by |
46 // it. | 48 // it. |
47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | 49 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; |
48 | 50 |
49 // Starts and stops the capture session asynchronously. We cannot do this | 51 // Starts and stops the capture session asynchronously. We cannot do this |
50 // synchronously without blocking a WebRTC thread. | 52 // synchronously without blocking a WebRTC thread. |
51 - (void)start; | 53 - (void)start; |
52 - (void)stop; | 54 - (void)stop; |
53 | 55 |
54 @end | 56 @end |
55 | 57 |
56 @implementation RTCAVFoundationVideoCapturerInternal { | 58 @implementation RTCAVFoundationVideoCapturerInternal { |
57 // Keep pointers to inputs for convenience. | 59 // Keep pointers to inputs for convenience. |
58 AVCaptureDeviceInput *_frontCameraInput; | 60 AVCaptureDeviceInput *_frontCameraInput; |
59 AVCaptureDeviceInput *_backCameraInput; | 61 AVCaptureDeviceInput *_backCameraInput; |
60 AVCaptureVideoDataOutput *_videoDataOutput; | 62 AVCaptureVideoDataOutput *_videoDataOutput; |
61 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 63 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
62 webrtc::AVFoundationVideoCapturer *_capturer; | 64 webrtc::AVFoundationVideoCapturer *_capturer; |
63 BOOL _orientationHasChanged; | 65 BOOL _orientationHasChanged; |
| 66 BOOL _hasRetriedOnFatalError; |
| 67 BOOL _isRunning; |
| 68 BOOL _hasStarted; |
| 69 rtc::CriticalSection _crit; |
64 } | 70 } |
65 | 71 |
66 @synthesize captureSession = _captureSession; | 72 @synthesize captureSession = _captureSession; |
67 @synthesize isRunning = _isRunning; | 73 @synthesize frameQueue = _frameQueue; |
68 @synthesize useBackCamera = _useBackCamera; | 74 @synthesize useBackCamera = _useBackCamera; |
| 75 @synthesize hasStarted = _hasStarted; |
69 | 76 |
70 // This is called from the thread that creates the video source, which is likely | 77 // This is called from the thread that creates the video source, which is likely |
71 // the main thread. | 78 // the main thread. |
72 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | 79 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { |
73 RTC_DCHECK(capturer); | 80 RTC_DCHECK(capturer); |
74 if (self = [super init]) { | 81 if (self = [super init]) { |
75 _capturer = capturer; | 82 _capturer = capturer; |
76 // Create the capture session and all relevant inputs and outputs. We need | 83 // Create the capture session and all relevant inputs and outputs. We need |
77 // to do this in init because the application may want the capture session | 84 // to do this in init because the application may want the capture session |
78 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 85 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
79 // created here are retained until dealloc and never recreated. | 86 // created here are retained until dealloc and never recreated. |
80 if (![self setupCaptureSession]) { | 87 if (![self setupCaptureSession]) { |
81 return nil; | 88 return nil; |
82 } | 89 } |
83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | 90 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
84 [center addObserver:self | 91 [center addObserver:self |
85 selector:@selector(deviceOrientationDidChange:) | 92 selector:@selector(deviceOrientationDidChange:) |
86 name:UIDeviceOrientationDidChangeNotification | 93 name:UIDeviceOrientationDidChangeNotification |
87 object:nil]; | 94 object:nil]; |
88 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification | 95 [center addObserver:self |
89 object:nil | 96 selector:@selector(handleCaptureSessionInterruption:) |
90 queue:nil | 97 name:AVCaptureSessionWasInterruptedNotification |
91 usingBlock:^(NSNotification *notification) { | 98 object:_captureSession]; |
92 RTCLogError(@"Capture session error: %@", notification.userInfo); | 99 [center addObserver:self |
93 }]; | 100 selector:@selector(handleCaptureSessionInterruptionEnded:) |
| 101 name:AVCaptureSessionInterruptionEndedNotification |
| 102 object:_captureSession]; |
| 103 [center addObserver:self |
| 104 selector:@selector(handleCaptureSessionRuntimeError:) |
| 105 name:AVCaptureSessionRuntimeErrorNotification |
| 106 object:_captureSession]; |
| 107 [center addObserver:self |
| 108 selector:@selector(handleCaptureSessionDidStartRunning:) |
| 109 name:AVCaptureSessionDidStartRunningNotification |
| 110 object:_captureSession]; |
| 111 [center addObserver:self |
| 112 selector:@selector(handleCaptureSessionDidStopRunning:) |
| 113 name:AVCaptureSessionDidStopRunningNotification |
| 114 object:_captureSession]; |
94 } | 115 } |
95 return self; | 116 return self; |
96 } | 117 } |
97 | 118 |
98 - (void)dealloc { | 119 - (void)dealloc { |
99 RTC_DCHECK(!_isRunning); | 120 RTC_DCHECK(!self.hasStarted); |
100 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 121 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
101 _capturer = nullptr; | 122 _capturer = nullptr; |
102 } | 123 } |
103 | 124 |
104 - (AVCaptureSession *)captureSession { | 125 - (AVCaptureSession *)captureSession { |
105 return _captureSession; | 126 return _captureSession; |
106 } | 127 } |
107 | 128 |
| 129 - (dispatch_queue_t)frameQueue { |
| 130 if (!_frameQueue) { |
| 131 _frameQueue = |
| 132 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", |
| 133 DISPATCH_QUEUE_SERIAL); |
| 134 dispatch_set_target_queue( |
| 135 _frameQueue, |
| 136 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); |
| 137 } |
| 138 return _frameQueue; |
| 139 } |
| 140 |
108 // Called from any thread (likely main thread). | 141 // Called from any thread (likely main thread). |
109 - (BOOL)canUseBackCamera { | 142 - (BOOL)canUseBackCamera { |
110 return _backCameraInput != nil; | 143 return _backCameraInput != nil; |
111 } | 144 } |
112 | 145 |
113 // Called from any thread (likely main thread). | 146 // Called from any thread (likely main thread). |
114 - (BOOL)useBackCamera { | 147 - (BOOL)useBackCamera { |
115 @synchronized(self) { | 148 @synchronized(self) { |
116 return _useBackCamera; | 149 return _useBackCamera; |
117 } | 150 } |
(...skipping 10 matching lines...) Expand all Loading... |
128 } | 161 } |
129 @synchronized(self) { | 162 @synchronized(self) { |
130 if (_useBackCamera == useBackCamera) { | 163 if (_useBackCamera == useBackCamera) { |
131 return; | 164 return; |
132 } | 165 } |
133 _useBackCamera = useBackCamera; | 166 _useBackCamera = useBackCamera; |
134 [self updateSessionInputForUseBackCamera:useBackCamera]; | 167 [self updateSessionInputForUseBackCamera:useBackCamera]; |
135 } | 168 } |
136 } | 169 } |
137 | 170 |
| 171 - (BOOL)isRunning { |
| 172 rtc::CritScope cs(&_crit); |
| 173 return _isRunning; |
| 174 } |
| 175 |
| 176 - (void)setIsRunning:(BOOL)isRunning { |
| 177 rtc::CritScope cs(&_crit); |
| 178 _isRunning = isRunning; |
| 179 } |
| 180 |
138 // Called from WebRTC thread. | 181 // Called from WebRTC thread. |
139 - (void)start { | 182 - (void)start { |
140 if (_isRunning) { | 183 if (self.hasStarted) { |
141 return; | 184 return; |
142 } | 185 } |
143 _isRunning = YES; | 186 self.hasStarted = YES; |
144 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 187 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
145 block:^{ | 188 block:^{ |
146 _orientationHasChanged = NO; | 189 _orientationHasChanged = NO; |
147 [self updateOrientation]; | 190 [self updateOrientation]; |
148 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 191 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
149 AVCaptureSession *captureSession = self.captureSession; | 192 AVCaptureSession *captureSession = self.captureSession; |
150 [captureSession startRunning]; | 193 [captureSession startRunning]; |
151 }]; | 194 }]; |
152 } | 195 } |
153 | 196 |
154 // Called from same thread as start. | 197 // Called from same thread as start. |
155 - (void)stop { | 198 - (void)stop { |
156 if (!_isRunning) { | 199 if (!self.hasStarted) { |
157 return; | 200 return; |
158 } | 201 } |
159 _isRunning = NO; | 202 self.hasStarted = NO; |
| 203 // Due to this async block, it's possible that the ObjC object outlives the |
| 204 // C++ one. In order to not invoke functions on the C++ object, we set |
| 205 // hasStarted immediately instead of dispatching it async. |
160 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 206 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
161 block:^{ | 207 block:^{ |
162 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; | 208 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; |
163 [_captureSession stopRunning]; | 209 [_captureSession stopRunning]; |
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 210 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
165 }]; | 211 }]; |
166 } | 212 } |
167 | 213 |
| 214 #pragma mark iOS notifications |
| 215 |
| 216 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
| 217 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 218 block:^{ |
| 219 _orientationHasChanged = YES; |
| 220 [self updateOrientation]; |
| 221 }]; |
| 222 } |
| 223 |
168 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 224 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
169 | 225 |
170 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 226 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
171 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 227 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
172 fromConnection:(AVCaptureConnection *)connection { | 228 fromConnection:(AVCaptureConnection *)connection { |
173 NSParameterAssert(captureOutput == _videoDataOutput); | 229 NSParameterAssert(captureOutput == _videoDataOutput); |
174 if (!_isRunning) { | 230 if (!self.hasStarted) { |
175 return; | 231 return; |
176 } | 232 } |
177 _capturer->CaptureSampleBuffer(sampleBuffer); | 233 _capturer->CaptureSampleBuffer(sampleBuffer); |
178 } | 234 } |
179 | 235 |
180 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 236 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
181 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 237 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
182 fromConnection:(AVCaptureConnection *)connection { | 238 fromConnection:(AVCaptureConnection *)connection { |
183 RTCLogError(@"Dropped sample buffer."); | 239 RTCLogError(@"Dropped sample buffer."); |
184 } | 240 } |
185 | 241 |
| 242 #pragma mark - AVCaptureSession notifications |
| 243 |
| 244 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { |
| 245 NSString *reasonString = nil; |
| 246 #if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 |
| 247 NSNumber *reason = |
| 248 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; |
| 249 if (reason) { |
| 250 switch (reason.intValue) { |
| 251 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground
: |
| 252 reasonString = @"VideoDeviceNotAvailableInBackground"; |
| 253 break; |
| 254 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: |
| 255 reasonString = @"AudioDeviceInUseByAnotherClient"; |
| 256 break; |
| 257 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: |
| 258 reasonString = @"VideoDeviceInUseByAnotherClient"; |
| 259 break; |
| 260 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple
ForegroundApps: |
| 261 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; |
| 262 break; |
| 263 } |
| 264 } |
| 265 #endif |
| 266 RTCLog(@"Capture session interrupted: %@", reasonString); |
| 267 // TODO(tkchin): Handle this case. |
| 268 } |
| 269 |
| 270 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { |
| 271 RTCLog(@"Capture session interruption ended."); |
| 272 // TODO(tkchin): Handle this case. |
| 273 } |
| 274 |
| 275 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { |
| 276 NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; |
| 277 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription); |
| 278 |
| 279 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 280 block:^{ |
| 281 if (error.code == AVErrorMediaServicesWereReset) { |
| 282 [self handleNonFatalError]; |
| 283 } else { |
| 284 [self handleFatalError]; |
| 285 } |
| 286 }]; |
| 287 } |
| 288 |
| 289 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { |
| 290 RTCLog(@"Capture session started."); |
| 291 self.isRunning = YES; |
| 292 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 293 block:^{ |
| 294 // If we successfully restarted after an unknown error, allow future |
| 295 // retries on fatal errors. |
| 296 _hasRetriedOnFatalError = NO; |
| 297 }]; |
| 298 } |
| 299 |
| 300 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { |
| 301 RTCLog(@"Capture session stopped."); |
| 302 self.isRunning = NO; |
| 303 } |
| 304 |
| 305 - (void)handleFatalError { |
| 306 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 307 block:^{ |
| 308 if (!_hasRetriedOnFatalError) { |
| 309 RTCLogWarning(@"Attempting to recover from fatal capture error."); |
| 310 [self handleNonFatalError]; |
| 311 _hasRetriedOnFatalError = YES; |
| 312 } else { |
| 313 RTCLogError(@"Previous fatal error recovery failed."); |
| 314 } |
| 315 }]; |
| 316 } |
| 317 |
| 318 - (void)handleNonFatalError { |
| 319 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 320 block:^{ |
| 321 if (self.hasStarted) { |
| 322 RTCLog(@"Restarting capture session after error."); |
| 323 [self.captureSession startRunning]; |
| 324 } |
| 325 }]; |
| 326 } |
| 327 |
186 #pragma mark - Private | 328 #pragma mark - Private |
187 | 329 |
188 - (BOOL)setupCaptureSession { | 330 - (BOOL)setupCaptureSession { |
189 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | 331 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; |
190 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 | 332 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 |
191 NSString *version = [[UIDevice currentDevice] systemVersion]; | 333 NSString *version = [[UIDevice currentDevice] systemVersion]; |
192 if ([version integerValue] >= 7) { | 334 if ([version integerValue] >= 7) { |
193 captureSession.usesApplicationAudioSession = NO; | 335 captureSession.usesApplicationAudioSession = NO; |
194 } | 336 } |
195 #endif | 337 #endif |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
234 // Make the capturer output NV12. Ideally we want I420 but that's not | 376 // Make the capturer output NV12. Ideally we want I420 but that's not |
235 // currently supported on iPhone / iPad. | 377 // currently supported on iPhone / iPad. |
236 AVCaptureVideoDataOutput *videoDataOutput = | 378 AVCaptureVideoDataOutput *videoDataOutput = |
237 [[AVCaptureVideoDataOutput alloc] init]; | 379 [[AVCaptureVideoDataOutput alloc] init]; |
238 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; | 380 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; |
239 videoDataOutput.videoSettings = @{ | 381 videoDataOutput.videoSettings = @{ |
240 (NSString *)kCVPixelBufferPixelFormatTypeKey : | 382 (NSString *)kCVPixelBufferPixelFormatTypeKey : |
241 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | 383 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) |
242 }; | 384 }; |
243 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | 385 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; |
244 dispatch_queue_t queue = | 386 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; |
245 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession]; | |
246 [videoDataOutput setSampleBufferDelegate:self queue:queue]; | |
247 _videoDataOutput = videoDataOutput; | 387 _videoDataOutput = videoDataOutput; |
248 } | 388 } |
249 return _videoDataOutput; | 389 return _videoDataOutput; |
250 } | 390 } |
251 | 391 |
252 - (AVCaptureDevice *)videoCaptureDeviceForPosition: | 392 - (AVCaptureDevice *)videoCaptureDeviceForPosition: |
253 (AVCaptureDevicePosition)position { | 393 (AVCaptureDevicePosition)position { |
254 for (AVCaptureDevice *captureDevice in | 394 for (AVCaptureDevice *captureDevice in |
255 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | 395 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
256 if (captureDevice.position == position) { | 396 if (captureDevice.position == position) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
297 if (!backCameraInput) { | 437 if (!backCameraInput) { |
298 RTCLogError(@"Failed to create front camera input: %@", | 438 RTCLogError(@"Failed to create front camera input: %@", |
299 error.localizedDescription); | 439 error.localizedDescription); |
300 return nil; | 440 return nil; |
301 } | 441 } |
302 _backCameraInput = backCameraInput; | 442 _backCameraInput = backCameraInput; |
303 } | 443 } |
304 return _backCameraInput; | 444 return _backCameraInput; |
305 } | 445 } |
306 | 446 |
307 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
309 block:^{ | |
310 _orientationHasChanged = YES; | |
311 [self updateOrientation]; | |
312 }]; | |
313 } | |
314 | |
315 // Called from capture session queue. | 447 // Called from capture session queue. |
316 - (void)updateOrientation { | 448 - (void)updateOrientation { |
317 AVCaptureConnection *connection = | 449 AVCaptureConnection *connection = |
318 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; | 450 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
319 if (!connection.supportsVideoOrientation) { | 451 if (!connection.supportsVideoOrientation) { |
320 // TODO(tkchin): set rotation bit on frames. | 452 // TODO(tkchin): set rotation bit on frames. |
321 return; | 453 return; |
322 } | 454 } |
323 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; | 455 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; |
324 switch ([UIDevice currentDevice].orientation) { | 456 switch ([UIDevice currentDevice].orientation) { |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
536 | 668 |
537 // This will call a superclass method that will perform the frame conversion | 669 // This will call a superclass method that will perform the frame conversion |
538 // to I420. | 670 // to I420. |
539 SignalFrameCaptured(this, &frame); | 671 SignalFrameCaptured(this, &frame); |
540 | 672 |
541 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); | 673 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); |
542 CVBufferRelease(image_buffer); | 674 CVBufferRelease(image_buffer); |
543 } | 675 } |
544 | 676 |
545 } // namespace webrtc | 677 } // namespace webrtc |
OLD | NEW |