OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 19 matching lines...) Expand all Loading... | |
30 cricket::FOURCC_NV12); | 30 cricket::FOURCC_NV12); |
31 | 31 |
32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 32 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 33 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
34 // because other webrtc objects own cricket::VideoCapturer, which is not | 34 // because other webrtc objects own cricket::VideoCapturer, which is not |
35 // ref counted. To prevent bad behavior we do not expose this class directly. | 35 // ref counted. To prevent bad behavior we do not expose this class directly. |
36 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 36 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
37 <AVCaptureVideoDataOutputSampleBufferDelegate> | 37 <AVCaptureVideoDataOutputSampleBufferDelegate> |
38 | 38 |
39 @property(nonatomic, readonly) AVCaptureSession *captureSession; | 39 @property(nonatomic, readonly) AVCaptureSession *captureSession; |
40 @property(nonatomic, readonly) BOOL isRunning; | 40 @property(nonatomic, readonly) dispatch_queue_t frameQueue; |
41 @property(nonatomic, readonly) BOOL canUseBackCamera; | 41 @property(nonatomic, readonly) BOOL canUseBackCamera; |
42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 42 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
43 @property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running. | |
44 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. | |
43 | 45 |
44 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | 46 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
45 // when we receive frames. This is safe because this object should be owned by | 47 // when we receive frames. This is safe because this object should be owned by |
46 // it. | 48 // it. |
47 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | 49 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; |
48 | 50 |
49 // Starts and stops the capture session asynchronously. We cannot do this | 51 // Starts and stops the capture session asynchronously. We cannot do this |
50 // synchronously without blocking a WebRTC thread. | 52 // synchronously without blocking a WebRTC thread. |
51 - (void)start; | 53 - (void)start; |
52 - (void)stop; | 54 - (void)stop; |
53 | 55 |
54 @end | 56 @end |
55 | 57 |
56 @implementation RTCAVFoundationVideoCapturerInternal { | 58 @implementation RTCAVFoundationVideoCapturerInternal { |
57 // Keep pointers to inputs for convenience. | 59 // Keep pointers to inputs for convenience. |
58 AVCaptureDeviceInput *_frontCameraInput; | 60 AVCaptureDeviceInput *_frontCameraInput; |
59 AVCaptureDeviceInput *_backCameraInput; | 61 AVCaptureDeviceInput *_backCameraInput; |
60 AVCaptureVideoDataOutput *_videoDataOutput; | 62 AVCaptureVideoDataOutput *_videoDataOutput; |
61 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 63 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
62 webrtc::AVFoundationVideoCapturer *_capturer; | 64 webrtc::AVFoundationVideoCapturer *_capturer; |
63 BOOL _orientationHasChanged; | 65 BOOL _orientationHasChanged; |
66 BOOL _hasRetriedOnFatalError; | |
67 BOOL _isRunning; | |
68 BOOL _hasStarted; | |
69 rtc::CriticalSection _crit; | |
64 } | 70 } |
65 | 71 |
66 @synthesize captureSession = _captureSession; | 72 @synthesize captureSession = _captureSession; |
67 @synthesize isRunning = _isRunning; | 73 @synthesize frameQueue = _frameQueue; |
68 @synthesize useBackCamera = _useBackCamera; | 74 @synthesize useBackCamera = _useBackCamera; |
75 @synthesize hasStarted = _hasStarted; | |
Chuck
2016/06/03 14:04:39
Do you want to explicitly synthesize isRunning?
tkchin_webrtc
2016/06/03 17:54:12
Nope. Already overriding both get and set.
| |
69 | 76 |
70 // This is called from the thread that creates the video source, which is likely | 77 // This is called from the thread that creates the video source, which is likely |
71 // the main thread. | 78 // the main thread. |
72 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { | 79 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { |
73 RTC_DCHECK(capturer); | 80 RTC_DCHECK(capturer); |
74 if (self = [super init]) { | 81 if (self = [super init]) { |
75 _capturer = capturer; | 82 _capturer = capturer; |
76 // Create the capture session and all relevant inputs and outputs. We need | 83 // Create the capture session and all relevant inputs and outputs. We need |
77 // to do this in init because the application may want the capture session | 84 // to do this in init because the application may want the capture session |
78 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects | 85 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
79 // created here are retained until dealloc and never recreated. | 86 // created here are retained until dealloc and never recreated. |
80 if (![self setupCaptureSession]) { | 87 if (![self setupCaptureSession]) { |
81 return nil; | 88 return nil; |
82 } | 89 } |
83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; | 90 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
84 [center addObserver:self | 91 [center addObserver:self |
85 selector:@selector(deviceOrientationDidChange:) | 92 selector:@selector(deviceOrientationDidChange:) |
86 name:UIDeviceOrientationDidChangeNotification | 93 name:UIDeviceOrientationDidChangeNotification |
87 object:nil]; | 94 object:nil]; |
88 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification | 95 [center addObserver:self |
89 object:nil | 96 selector:@selector(handleCaptureSessionInterruption:) |
90 queue:nil | 97 name:AVCaptureSessionWasInterruptedNotification |
91 usingBlock:^(NSNotification *notification) { | 98 object:_captureSession]; |
92 RTCLogError(@"Capture session error: %@", notification.userInfo); | 99 [center addObserver:self |
93 }]; | 100 selector:@selector(handleCaptureSessionInterruptionEnded:) |
101 name:AVCaptureSessionInterruptionEndedNotification | |
102 object:_captureSession]; | |
103 [center addObserver:self | |
104 selector:@selector(handleCaptureSessionRuntimeError:) | |
105 name:AVCaptureSessionRuntimeErrorNotification | |
106 object:_captureSession]; | |
107 [center addObserver:self | |
108 selector:@selector(handleCaptureSessionDidStartRunning:) | |
109 name:AVCaptureSessionDidStartRunningNotification | |
110 object:_captureSession]; | |
111 [center addObserver:self | |
112 selector:@selector(handleCaptureSessionDidStopRunning:) | |
113 name:AVCaptureSessionDidStopRunningNotification | |
114 object:_captureSession]; | |
94 } | 115 } |
95 return self; | 116 return self; |
96 } | 117 } |
97 | 118 |
98 - (void)dealloc { | 119 - (void)dealloc { |
99 RTC_DCHECK(!_isRunning); | 120 RTC_DCHECK(!self.isRunning); |
121 RTC_DCHECK(!self.hasStarted); | |
100 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 122 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
101 _capturer = nullptr; | 123 _capturer = nullptr; |
102 } | 124 } |
103 | 125 |
104 - (AVCaptureSession *)captureSession { | 126 - (AVCaptureSession *)captureSession { |
105 return _captureSession; | 127 return _captureSession; |
106 } | 128 } |
107 | 129 |
130 - (dispatch_queue_t)frameQueue { | |
131 if (!_frameQueue) { | |
132 _frameQueue = | |
133 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", | |
134 DISPATCH_QUEUE_SERIAL); | |
135 dispatch_set_target_queue( | |
136 _frameQueue, | |
137 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); | |
138 } | |
139 return _frameQueue; | |
140 } | |
141 | |
108 // Called from any thread (likely main thread). | 142 // Called from any thread (likely main thread). |
109 - (BOOL)canUseBackCamera { | 143 - (BOOL)canUseBackCamera { |
110 return _backCameraInput != nil; | 144 return _backCameraInput != nil; |
111 } | 145 } |
112 | 146 |
113 // Called from any thread (likely main thread). | 147 // Called from any thread (likely main thread). |
114 - (BOOL)useBackCamera { | 148 - (BOOL)useBackCamera { |
115 @synchronized(self) { | 149 @synchronized(self) { |
116 return _useBackCamera; | 150 return _useBackCamera; |
117 } | 151 } |
(...skipping 10 matching lines...) Expand all Loading... | |
128 } | 162 } |
129 @synchronized(self) { | 163 @synchronized(self) { |
130 if (_useBackCamera == useBackCamera) { | 164 if (_useBackCamera == useBackCamera) { |
131 return; | 165 return; |
132 } | 166 } |
133 _useBackCamera = useBackCamera; | 167 _useBackCamera = useBackCamera; |
134 [self updateSessionInputForUseBackCamera:useBackCamera]; | 168 [self updateSessionInputForUseBackCamera:useBackCamera]; |
135 } | 169 } |
136 } | 170 } |
137 | 171 |
172 - (BOOL)isRunning { | |
173 rtc::CritScope cs(&_crit); | |
174 return _isRunning; | |
175 } | |
176 | |
177 - (void)setIsRunning:(BOOL)isRunning { | |
178 rtc::CritScope cs(&_crit); | |
179 _isRunning = isRunning; | |
180 } | |
181 | |
138 // Called from WebRTC thread. | 182 // Called from WebRTC thread. |
139 - (void)start { | 183 - (void)start { |
140 if (_isRunning) { | 184 if (self.hasStarted) { |
141 return; | 185 return; |
142 } | 186 } |
143 _isRunning = YES; | 187 self.hasStarted = YES; |
144 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 188 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
145 block:^{ | 189 block:^{ |
146 _orientationHasChanged = NO; | 190 _orientationHasChanged = NO; |
147 [self updateOrientation]; | 191 [self updateOrientation]; |
148 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 192 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
149 AVCaptureSession *captureSession = self.captureSession; | 193 AVCaptureSession *captureSession = self.captureSession; |
150 [captureSession startRunning]; | 194 [captureSession startRunning]; |
151 }]; | 195 }]; |
152 } | 196 } |
153 | 197 |
154 // Called from same thread as start. | 198 // Called from same thread as start. |
155 - (void)stop { | 199 - (void)stop { |
156 if (!_isRunning) { | 200 if (!self.hasStarted) { |
157 return; | 201 return; |
158 } | 202 } |
159 _isRunning = NO; | 203 self.hasStarted = NO; |
204 // Due to this async block, it's possible that the ObjC object outlives the | |
205 // C++ one. In order to not invoke functions on the C++ object, we set | |
206 // hasStarted immediately instead of dispatching it async. | |
160 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 207 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
161 block:^{ | 208 block:^{ |
162 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; | 209 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; |
163 [_captureSession stopRunning]; | 210 [_captureSession stopRunning]; |
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 211 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
165 }]; | 212 }]; |
166 } | 213 } |
167 | 214 |
215 #pragma mark iOS notifications | |
216 | |
217 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
218 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
219 block:^{ | |
220 _orientationHasChanged = YES; | |
221 [self updateOrientation]; | |
222 }]; | |
223 } | |
224 | |
168 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 225 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
169 | 226 |
170 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 227 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
171 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 228 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
172 fromConnection:(AVCaptureConnection *)connection { | 229 fromConnection:(AVCaptureConnection *)connection { |
173 NSParameterAssert(captureOutput == _videoDataOutput); | 230 NSParameterAssert(captureOutput == _videoDataOutput); |
174 if (!_isRunning) { | 231 if (!self.hasStarted) { |
175 return; | 232 return; |
176 } | 233 } |
177 _capturer->CaptureSampleBuffer(sampleBuffer); | 234 _capturer->CaptureSampleBuffer(sampleBuffer); |
178 } | 235 } |
179 | 236 |
180 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 237 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
181 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 238 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
182 fromConnection:(AVCaptureConnection *)connection { | 239 fromConnection:(AVCaptureConnection *)connection { |
183 RTCLogError(@"Dropped sample buffer."); | 240 RTCLogError(@"Dropped sample buffer."); |
184 } | 241 } |
185 | 242 |
243 #pragma mark - AVCaptureSession notifications | |
244 | |
245 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | |
246 NSString *reasonString = nil; | |
247 #if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | |
248 NSNumber *reason = | |
249 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; | |
250 if (reason) { | |
251 switch (reason.intValue) { | |
252 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground : | |
253 reasonString = @"VideoDeviceNotAvailableInBackground"; | |
254 break; | |
255 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | |
256 reasonString = @"AudioDeviceInUseByAnotherClient"; | |
257 break; | |
258 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | |
259 reasonString = @"VideoDeviceInUseByAnotherClient"; | |
260 break; | |
261 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps: | |
262 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | |
263 break; | |
264 } | |
265 } | |
266 #endif | |
267 RTCLog(@"Capture session interrupted: %@", reasonString); | |
268 // TODO(tkchin): Handle this case. | |
269 } | |
270 | |
271 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | |
272 RTCLog(@"Capture session interruption ended."); | |
273 // TODO(tkchin): Handle this case. | |
274 } | |
275 | |
276 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | |
277 NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; | |
278 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription); | |
279 | |
280 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
281 block:^{ | |
282 if (error.code == AVErrorMediaServicesWereReset) { | |
283 [self handleNonFatalError]; | |
284 } else { | |
285 [self handleFatalError]; | |
286 } | |
287 }]; | |
288 } | |
289 | |
290 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | |
291 RTCLog(@"Capture session started."); | |
292 self.isRunning = YES; | |
293 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
294 block:^{ | |
295 // If we successfully restarted after an unknown error, allow future | |
296 // retries on fatal errors. | |
297 _hasRetriedOnFatalError = NO; | |
298 }]; | |
299 } | |
300 | |
301 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | |
302 RTCLog(@"Capture session stopped."); | |
303 self.isRunning = NO; | |
304 } | |
305 | |
306 - (void)handleFatalError { | |
307 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
308 block:^{ | |
309 if (!_hasRetriedOnFatalError) { | |
310 RTCLogWarning(@"Attempting to recover from fatal capture error."); | |
311 [self handleNonFatalError]; | |
312 _hasRetriedOnFatalError = YES; | |
313 } else { | |
314 RTCLogError(@"Previous fatal error recovery failed."); | |
315 } | |
316 }]; | |
317 } | |
318 | |
319 - (void)handleNonFatalError { | |
320 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
321 block:^{ | |
322 if (self.hasStarted) { | |
323 RTCLog(@"Restarting capture session after error."); | |
324 [self.captureSession startRunning]; | |
325 } | |
326 }]; | |
327 } | |
328 | |
186 #pragma mark - Private | 329 #pragma mark - Private |
187 | 330 |
188 - (BOOL)setupCaptureSession { | 331 - (BOOL)setupCaptureSession { |
189 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | 332 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; |
190 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 | 333 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 |
191 NSString *version = [[UIDevice currentDevice] systemVersion]; | 334 NSString *version = [[UIDevice currentDevice] systemVersion]; |
192 if ([version integerValue] >= 7) { | 335 if ([version integerValue] >= 7) { |
193 captureSession.usesApplicationAudioSession = NO; | 336 captureSession.usesApplicationAudioSession = NO; |
194 } | 337 } |
195 #endif | 338 #endif |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
234 // Make the capturer output NV12. Ideally we want I420 but that's not | 377 // Make the capturer output NV12. Ideally we want I420 but that's not |
235 // currently supported on iPhone / iPad. | 378 // currently supported on iPhone / iPad. |
236 AVCaptureVideoDataOutput *videoDataOutput = | 379 AVCaptureVideoDataOutput *videoDataOutput = |
237 [[AVCaptureVideoDataOutput alloc] init]; | 380 [[AVCaptureVideoDataOutput alloc] init]; |
238 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; | 381 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; |
239 videoDataOutput.videoSettings = @{ | 382 videoDataOutput.videoSettings = @{ |
240 (NSString *)kCVPixelBufferPixelFormatTypeKey : | 383 (NSString *)kCVPixelBufferPixelFormatTypeKey : |
241 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | 384 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) |
242 }; | 385 }; |
243 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | 386 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; |
244 dispatch_queue_t queue = | 387 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; |
245 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession]; | |
246 [videoDataOutput setSampleBufferDelegate:self queue:queue]; | |
247 _videoDataOutput = videoDataOutput; | 388 _videoDataOutput = videoDataOutput; |
248 } | 389 } |
249 return _videoDataOutput; | 390 return _videoDataOutput; |
250 } | 391 } |
251 | 392 |
252 - (AVCaptureDevice *)videoCaptureDeviceForPosition: | 393 - (AVCaptureDevice *)videoCaptureDeviceForPosition: |
253 (AVCaptureDevicePosition)position { | 394 (AVCaptureDevicePosition)position { |
254 for (AVCaptureDevice *captureDevice in | 395 for (AVCaptureDevice *captureDevice in |
255 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | 396 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
256 if (captureDevice.position == position) { | 397 if (captureDevice.position == position) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
297 if (!backCameraInput) { | 438 if (!backCameraInput) { |
298 RTCLogError(@"Failed to create front camera input: %@", | 439 RTCLogError(@"Failed to create front camera input: %@", |
299 error.localizedDescription); | 440 error.localizedDescription); |
300 return nil; | 441 return nil; |
301 } | 442 } |
302 _backCameraInput = backCameraInput; | 443 _backCameraInput = backCameraInput; |
303 } | 444 } |
304 return _backCameraInput; | 445 return _backCameraInput; |
305 } | 446 } |
306 | 447 |
307 - (void)deviceOrientationDidChange:(NSNotification *)notification { | |
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | |
309 block:^{ | |
310 _orientationHasChanged = YES; | |
311 [self updateOrientation]; | |
312 }]; | |
313 } | |
314 | |
315 // Called from capture session queue. | 448 // Called from capture session queue. |
316 - (void)updateOrientation { | 449 - (void)updateOrientation { |
317 AVCaptureConnection *connection = | 450 AVCaptureConnection *connection = |
318 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; | 451 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
319 if (!connection.supportsVideoOrientation) { | 452 if (!connection.supportsVideoOrientation) { |
320 // TODO(tkchin): set rotation bit on frames. | 453 // TODO(tkchin): set rotation bit on frames. |
321 return; | 454 return; |
322 } | 455 } |
323 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; | 456 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; |
324 switch ([UIDevice currentDevice].orientation) { | 457 switch ([UIDevice currentDevice].orientation) { |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
536 | 669 |
537 // This will call a superclass method that will perform the frame conversion | 670 // This will call a superclass method that will perform the frame conversion |
538 // to I420. | 671 // to I420. |
539 SignalFrameCaptured(this, &frame); | 672 SignalFrameCaptured(this, &frame); |
540 | 673 |
541 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); | 674 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); |
542 CVBufferRelease(image_buffer); | 675 CVBufferRelease(image_buffer); |
543 } | 676 } |
544 | 677 |
545 } // namespace webrtc | 678 } // namespace webrtc |
OLD | NEW |