OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "avfoundationvideocapturer.h" | 11 #import "RTCAVFoundationVideoCapturerInternal.h" |
12 | 12 |
13 #import <AVFoundation/AVFoundation.h> | |
14 #import <Foundation/Foundation.h> | 13 #import <Foundation/Foundation.h> |
15 #if TARGET_OS_IPHONE | 14 #if TARGET_OS_IPHONE |
16 #import <UIKit/UIKit.h> | 15 #import <UIKit/UIKit.h> |
17 #endif | 16 #endif |
18 | 17 |
19 #import "RTCDispatcher+Private.h" | 18 #import "RTCDispatcher+Private.h" |
20 #import "WebRTC/RTCLogging.h" | 19 #import "WebRTC/RTCLogging.h" |
21 #if TARGET_OS_IPHONE | |
22 #import "WebRTC/UIDevice+RTCDevice.h" | |
23 #endif | |
24 | 20 |
25 #include "libyuv/rotate.h" | 21 NS_ASSUME_NONNULL_BEGIN |
26 | |
27 #include "webrtc/base/bind.h" | |
28 #include "webrtc/base/checks.h" | |
29 #include "webrtc/base/logging.h" | |
30 #include "webrtc/base/thread.h" | |
31 #include "webrtc/common_video/include/corevideo_frame_buffer.h" | |
32 #include "webrtc/common_video/rotation.h" | |
33 | |
34 // TODO(denicija): add support for higher frame rates. | |
35 // See http://crbug/webrtc/6355 for more info. | |
36 static const int kFramesPerSecond = 30; | |
37 | |
38 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | |
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | |
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | |
41 } | |
42 | |
43 static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) { | |
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps; | |
45 } | |
46 | |
47 // Returns filtered array of device formats based on predefined constraints our | |
48 // stack imposes. | |
49 static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats( | |
50 const AVCaptureDevice *device, | |
51 int supportedFps) { | |
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = | |
53 [NSMutableArray array]; | |
54 | |
55 for (AVCaptureDeviceFormat *format in device.formats) { | |
56 // Filter out subTypes that we currently don't support in the stack | |
57 FourCharCode mediaSubType = | |
58 CMFormatDescriptionGetMediaSubType(format.formatDescription); | |
59 if (!IsMediaSubTypeSupported(mediaSubType)) { | |
60 continue; | |
61 } | |
62 | |
63 // Filter out frame rate ranges that we currently don't support in the stack | |
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRange s) { | |
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) { | |
66 [eligibleDeviceFormats addObject:format]; | |
67 break; | |
68 } | |
69 } | |
70 } | |
71 | |
72 return [eligibleDeviceFormats copy]; | |
73 } | |
74 | |
75 // Mapping from cricket::VideoFormat to AVCaptureDeviceFormat. | |
76 static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat( | |
77 const AVCaptureDevice *device, | |
78 const cricket::VideoFormat &videoFormat) { | |
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil; | |
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | |
81 GetEligibleDeviceFormats(device, videoFormat.framerate()); | |
82 | |
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | |
84 CMVideoDimensions dimension = | |
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | |
86 FourCharCode mediaSubType = | |
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription); | |
88 | |
89 if (videoFormat.width == dimension.width && | |
90 videoFormat.height == dimension.height) { | |
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { | |
92 // This is the preferred format so no need to wait for better option. | |
93 return deviceFormat; | |
94 } else { | |
95 // This is good candidate, but let's wait for something better. | |
96 desiredDeviceFormat = deviceFormat; | |
97 } | |
98 } | |
99 } | |
100 | |
101 return desiredDeviceFormat; | |
102 } | |
103 | |
104 // Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input | |
105 // device. | |
106 static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice( | |
107 AVCaptureDevice *device) { | |
108 std::set<cricket::VideoFormat> supportedFormats; | |
109 | |
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats = | |
111 GetEligibleDeviceFormats(device, kFramesPerSecond); | |
112 | |
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) { | |
114 CMVideoDimensions dimension = | |
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription); | |
116 cricket::VideoFormat format = cricket::VideoFormat( | |
117 dimension.width, dimension.height, | |
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond), | |
119 cricket::FOURCC_NV12); | |
120 supportedFormats.insert(format); | |
121 } | |
122 | |
123 return supportedFormats; | |
124 } | |
125 | |
126 // Sets device format for the provided capture device. Returns YES/NO depending on success. | |
127 // TODO(denicija): When this file is split this static method should be reconsid ered. | |
128 // Perhaps adding a category on AVCaptureDevice would be better. | |
129 static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device, | |
130 AVCaptureSession *session, | |
131 const cricket::VideoFormat &format) { | |
132 AVCaptureDeviceFormat *deviceFormat = | |
133 GetDeviceFormatForVideoFormat(device, format); | |
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval); | |
135 | |
136 NSError *error = nil; | |
137 BOOL success = YES; | |
138 [session beginConfiguration]; | |
139 if ([device lockForConfiguration:&error]) { | |
140 @try { | |
141 device.activeFormat = deviceFormat; | |
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps); | |
143 } @catch (NSException *exception) { | |
144 RTCLogError( | |
145 @"Failed to set active format!\n User info:%@", | |
146 exception.userInfo); | |
147 success = NO; | |
148 } | |
149 | |
150 [device unlockForConfiguration]; | |
151 } else { | |
152 RTCLogError( | |
153 @"Failed to lock device %@. Error: %@", | |
154 device, error.userInfo); | |
155 success = NO; | |
156 } | |
157 [session commitConfiguration]; | |
158 | |
159 return success; | |
160 } | |
161 | |
162 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | |
163 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | |
164 // because other webrtc objects own cricket::VideoCapturer, which is not | |
165 // ref counted. To prevent bad behavior we do not expose this class directly. | |
166 @interface RTCAVFoundationVideoCapturerInternal : NSObject | |
167 <AVCaptureVideoDataOutputSampleBufferDelegate> | |
168 | |
169 @property(nonatomic, readonly) AVCaptureSession *captureSession; | |
170 @property(nonatomic, readonly) dispatch_queue_t frameQueue; | |
171 @property(nonatomic, readonly) BOOL canUseBackCamera; | |
172 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | |
173 @property(atomic, assign) BOOL isRunning; // Whether the capture session is run ning. | |
174 @property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched star t. | |
175 | |
176 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | |
177 // when we receive frames. This is safe because this object should be owned by | |
178 // it. | |
179 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; | |
180 - (AVCaptureDevice *)getActiveCaptureDevice; | |
181 | |
182 - (nullable AVCaptureDevice *)frontCaptureDevice; | |
183 - (nullable AVCaptureDevice *)backCaptureDevice; | |
184 | |
185 // Starts and stops the capture session asynchronously. We cannot do this | |
186 // synchronously without blocking a WebRTC thread. | |
187 - (void)start; | |
188 - (void)stop; | |
189 | |
190 @end | |
191 | |
192 @implementation RTCAVFoundationVideoCapturerInternal { | 22 @implementation RTCAVFoundationVideoCapturerInternal { |
193 // Keep pointers to inputs for convenience. | 23 // Keep pointers to inputs for convenience. |
194 AVCaptureDeviceInput *_frontCameraInput; | 24 AVCaptureDeviceInput *_frontCameraInput; |
195 AVCaptureDeviceInput *_backCameraInput; | 25 AVCaptureDeviceInput *_backCameraInput; |
196 AVCaptureVideoDataOutput *_videoDataOutput; | 26 AVCaptureVideoDataOutput *_videoDataOutput; |
197 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 27 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
198 webrtc::AVFoundationVideoCapturer *_capturer; | 28 webrtc::AVFoundationVideoCapturer *_capturer; |
199 webrtc::VideoRotation _rotation; | 29 webrtc::VideoRotation _rotation; |
200 BOOL _hasRetriedOnFatalError; | 30 BOOL _hasRetriedOnFatalError; |
201 BOOL _isRunning; | 31 BOOL _isRunning; |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
265 } | 95 } |
266 | 96 |
267 - (AVCaptureSession *)captureSession { | 97 - (AVCaptureSession *)captureSession { |
268 return _captureSession; | 98 return _captureSession; |
269 } | 99 } |
270 | 100 |
271 - (AVCaptureDevice *)getActiveCaptureDevice { | 101 - (AVCaptureDevice *)getActiveCaptureDevice { |
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; | 102 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device ; |
273 } | 103 } |
274 | 104 |
275 - (AVCaptureDevice *)frontCaptureDevice { | 105 - (nullable AVCaptureDevice *)frontCaptureDevice { |
276 return _frontCameraInput.device; | 106 return _frontCameraInput.device; |
277 } | 107 } |
278 | 108 |
279 - (AVCaptureDevice *)backCaptureDevice { | 109 - (nullable AVCaptureDevice *)backCaptureDevice { |
280 return _backCameraInput.device; | 110 return _backCameraInput.device; |
281 } | 111 } |
282 | 112 |
283 - (dispatch_queue_t)frameQueue { | 113 - (dispatch_queue_t)frameQueue { |
284 if (!_frameQueue) { | 114 if (!_frameQueue) { |
285 _frameQueue = | 115 _frameQueue = |
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", | 116 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISP ATCH_QUEUE_SERIAL); |
287 DISPATCH_QUEUE_SERIAL); | 117 dispatch_set_target_queue(_frameQueue, |
288 dispatch_set_target_queue( | 118 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ HIGH, 0)); |
289 _frameQueue, | |
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); | |
291 } | 119 } |
292 return _frameQueue; | 120 return _frameQueue; |
293 } | 121 } |
294 | 122 |
295 // Called from any thread (likely main thread). | 123 // Called from any thread (likely main thread). |
296 - (BOOL)canUseBackCamera { | 124 - (BOOL)canUseBackCamera { |
297 return _backCameraInput != nil; | 125 return _backCameraInput != nil; |
298 } | 126 } |
299 | 127 |
300 // Called from any thread (likely main thread). | 128 // Called from any thread (likely main thread). |
301 - (BOOL)useBackCamera { | 129 - (BOOL)useBackCamera { |
302 @synchronized(self) { | 130 @synchronized(self) { |
303 return _useBackCamera; | 131 return _useBackCamera; |
304 } | 132 } |
305 } | 133 } |
306 | 134 |
307 // Called from any thread (likely main thread). | 135 // Called from any thread (likely main thread). |
308 - (void)setUseBackCamera:(BOOL)useBackCamera { | 136 - (void)setUseBackCamera:(BOOL)useBackCamera { |
309 if (!self.canUseBackCamera) { | 137 if (!self.canUseBackCamera) { |
310 if (useBackCamera) { | 138 if (useBackCamera) { |
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" | 139 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" |
312 "not switching."); | 140 "not switching."); |
313 } | 141 } |
314 return; | 142 return; |
315 } | 143 } |
316 @synchronized(self) { | 144 @synchronized(self) { |
317 if (_useBackCamera == useBackCamera) { | 145 if (_useBackCamera == useBackCamera) { |
318 return; | 146 return; |
319 } | 147 } |
320 _useBackCamera = useBackCamera; | 148 _useBackCamera = useBackCamera; |
321 [self updateSessionInputForUseBackCamera:useBackCamera]; | 149 [self updateSessionInputForUseBackCamera:useBackCamera]; |
322 } | 150 } |
323 } | 151 } |
324 | 152 |
325 // Called from WebRTC thread. | 153 // Called from WebRTC thread. |
326 - (void)start { | 154 - (void)start { |
327 if (self.hasStarted) { | 155 if (self.hasStarted) { |
328 return; | 156 return; |
329 } | 157 } |
330 self.hasStarted = YES; | 158 self.hasStarted = YES; |
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 159 [RTCDispatcher |
332 block:^{ | 160 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
161 block:^{ | |
333 #if TARGET_OS_IPHONE | 162 #if TARGET_OS_IPHONE |
334 // Default to portrait orientation on iPhone. This will be reset in | 163 // Default to portrait orientation on iPhone. This will be reset in |
335 // updateOrientation unless orientation is unknown/faceup/facedown. | 164 // updateOrientation unless orientation is unknown/faceu p/facedown. |
336 _rotation = webrtc::kVideoRotation_90; | 165 _rotation = webrtc::kVideoRotation_90; |
337 #else | 166 #else |
338 // No rotation on Mac. | 167 // No rotation on Mac. |
339 _rotation = webrtc::kVideoRotation_0; | 168 _rotation = webrtc::kVideoRotation_0; |
340 #endif | 169 #endif |
341 [self updateOrientation]; | 170 [self updateOrientation]; |
342 #if TARGET_OS_IPHONE | 171 #if TARGET_OS_IPHONE |
343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 172 [[UIDevice currentDevice] beginGeneratingDeviceOrientati onNotifications]; |
344 #endif | 173 #endif |
345 AVCaptureSession *captureSession = self.captureSession; | 174 AVCaptureSession *captureSession = self.captureSession; |
346 [captureSession startRunning]; | 175 [captureSession startRunning]; |
347 }]; | 176 }]; |
348 } | 177 } |
349 | 178 |
350 // Called from same thread as start. | 179 // Called from same thread as start. |
351 - (void)stop { | 180 - (void)stop { |
352 if (!self.hasStarted) { | 181 if (!self.hasStarted) { |
353 return; | 182 return; |
354 } | 183 } |
355 self.hasStarted = NO; | 184 self.hasStarted = NO; |
356 // Due to this async block, it's possible that the ObjC object outlives the | 185 // Due to this async block, it's possible that the ObjC object outlives the |
357 // C++ one. In order to not invoke functions on the C++ object, we set | 186 // C++ one. In order to not invoke functions on the C++ object, we set |
358 // hasStarted immediately instead of dispatching it async. | 187 // hasStarted immediately instead of dispatching it async. |
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 188 [RTCDispatcher |
360 block:^{ | 189 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; | 190 block:^{ |
362 [_captureSession stopRunning]; | 191 [_videoDataOutput setSampleBufferDelegate:nil queue:null ptr]; |
192 [_captureSession stopRunning]; | |
363 #if TARGET_OS_IPHONE | 193 #if TARGET_OS_IPHONE |
364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 194 [[UIDevice currentDevice] endGeneratingDeviceOrientation Notifications]; |
365 #endif | 195 #endif |
366 }]; | 196 }]; |
367 } | 197 } |
368 | 198 |
369 #pragma mark iOS notifications | 199 #pragma mark iOS notifications |
370 | 200 |
371 #if TARGET_OS_IPHONE | 201 #if TARGET_OS_IPHONE |
372 - (void)deviceOrientationDidChange:(NSNotification *)notification { | 202 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 203 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
374 block:^{ | 204 block:^{ |
375 [self updateOrientation]; | 205 [self updateOrientation]; |
376 }]; | 206 }]; |
377 } | 207 } |
378 #endif | 208 #endif |
379 | 209 |
380 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 210 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
381 | 211 |
382 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 212 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 213 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
384 fromConnection:(AVCaptureConnection *)connection { | 214 fromConnection:(AVCaptureConnection *)connection { |
385 NSParameterAssert(captureOutput == _videoDataOutput); | 215 NSParameterAssert(captureOutput == _videoDataOutput); |
386 if (!self.hasStarted) { | 216 if (!self.hasStarted) { |
387 return; | 217 return; |
388 } | 218 } |
389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | 219 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); |
390 } | 220 } |
391 | 221 |
392 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 222 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 223 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
394 fromConnection:(AVCaptureConnection *)connection { | 224 fromConnection:(AVCaptureConnection *)connection { |
395 RTCLogError(@"Dropped sample buffer."); | 225 RTCLogError(@"Dropped sample buffer."); |
396 } | 226 } |
397 | 227 |
398 #pragma mark - AVCaptureSession notifications | 228 #pragma mark - AVCaptureSession notifications |
399 | 229 |
400 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { | 230 - (void)handleCaptureSessionInterruption:(NSNotification *)notification { |
401 NSString *reasonString = nil; | 231 NSString *reasonString = nil; |
402 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \ | 232 #if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ |
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 | 233 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 |
404 NSNumber *reason = | 234 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey ]; |
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey]; | |
406 if (reason) { | 235 if (reason) { |
407 switch (reason.intValue) { | 236 switch (reason.intValue) { |
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground : | 237 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground : |
409 reasonString = @"VideoDeviceNotAvailableInBackground"; | 238 reasonString = @"VideoDeviceNotAvailableInBackground"; |
410 break; | 239 break; |
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: | 240 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: |
412 reasonString = @"AudioDeviceInUseByAnotherClient"; | 241 reasonString = @"AudioDeviceInUseByAnotherClient"; |
413 break; | 242 break; |
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: | 243 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: |
415 reasonString = @"VideoDeviceInUseByAnotherClient"; | 244 reasonString = @"VideoDeviceInUseByAnotherClient"; |
416 break; | 245 break; |
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps: | 246 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultiple ForegroundApps: |
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; | 247 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; |
419 break; | 248 break; |
420 } | 249 } |
421 } | 250 } |
422 #endif | 251 #endif |
423 RTCLog(@"Capture session interrupted: %@", reasonString); | 252 RTCLog(@"Capture session interrupted: %@", reasonString); |
424 // TODO(tkchin): Handle this case. | 253 // TODO(tkchin): Handle this case. |
425 } | 254 } |
426 | 255 |
427 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { | 256 - (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { |
428 RTCLog(@"Capture session interruption ended."); | 257 RTCLog(@"Capture session interruption ended."); |
429 // TODO(tkchin): Handle this case. | 258 // TODO(tkchin): Handle this case. |
430 } | 259 } |
431 | 260 |
432 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { | 261 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { |
433 NSError *error = | 262 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey] ; |
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; | |
435 RTCLogError(@"Capture session runtime error: %@", error); | 263 RTCLogError(@"Capture session runtime error: %@", error); |
436 | 264 |
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 265 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
438 block:^{ | 266 block:^{ |
439 #if TARGET_OS_IPHONE | 267 #if TARGET_OS_IPHONE |
440 if (error.code == AVErrorMediaServicesWereReset) { | 268 if (error.code == AVErrorMediaServicesWereRes et) { |
441 [self handleNonFatalError]; | 269 [self handleNonFatalError]; |
442 } else { | 270 } else { |
443 [self handleFatalError]; | 271 [self handleFatalError]; |
444 } | 272 } |
445 #else | 273 #else |
446 [self handleFatalError]; | 274 [self handleFatalError]; |
447 #endif | 275 #endif |
448 }]; | 276 }]; |
449 } | 277 } |
450 | 278 |
451 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { | 279 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { |
452 RTCLog(@"Capture session started."); | 280 RTCLog(@"Capture session started."); |
453 | 281 |
454 self.isRunning = YES; | 282 self.isRunning = YES; |
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 283 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
456 block:^{ | 284 block:^{ |
457 // If we successfully restarted after an unknown error, allow future | 285 // If we successfully restarted after an unkn own error, |
458 // retries on fatal errors. | 286 // allow future retries on fatal errors. |
459 _hasRetriedOnFatalError = NO; | 287 _hasRetriedOnFatalError = NO; |
460 }]; | 288 }]; |
461 } | 289 } |
462 | 290 |
463 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { | 291 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { |
464 RTCLog(@"Capture session stopped."); | 292 RTCLog(@"Capture session stopped."); |
465 self.isRunning = NO; | 293 self.isRunning = NO; |
466 } | 294 } |
467 | 295 |
468 - (void)handleFatalError { | 296 - (void)handleFatalError { |
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 297 [RTCDispatcher |
470 block:^{ | 298 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
471 if (!_hasRetriedOnFatalError) { | 299 block:^{ |
472 RTCLogWarning(@"Attempting to recover from fatal capture error."); | 300 if (!_hasRetriedOnFatalError) { |
473 [self handleNonFatalError]; | 301 RTCLogWarning(@"Attempting to recover from fatal captu re error."); |
474 _hasRetriedOnFatalError = YES; | 302 [self handleNonFatalError]; |
475 } else { | 303 _hasRetriedOnFatalError = YES; |
476 RTCLogError(@"Previous fatal error recovery failed."); | 304 } else { |
477 } | 305 RTCLogError(@"Previous fatal error recovery failed."); |
478 }]; | 306 } |
307 }]; | |
479 } | 308 } |
480 | 309 |
481 - (void)handleNonFatalError { | 310 - (void)handleNonFatalError { |
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 311 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
483 block:^{ | 312 block:^{ |
484 if (self.hasStarted) { | 313 if (self.hasStarted) { |
485 RTCLog(@"Restarting capture session after error."); | 314 RTCLog(@"Restarting capture session after e rror."); |
486 [self.captureSession startRunning]; | 315 [self.captureSession startRunning]; |
487 } | 316 } |
488 }]; | 317 }]; |
489 } | 318 } |
490 | 319 |
491 #if TARGET_OS_IPHONE | 320 #if TARGET_OS_IPHONE |
492 | 321 |
493 #pragma mark - UIApplication notifications | 322 #pragma mark - UIApplication notifications |
494 | 323 |
495 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { | 324 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { |
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 325 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
497 block:^{ | 326 block:^{ |
498 if (self.hasStarted && !self.captureSession.isRunning) { | 327 if (self.hasStarted && !self.captureSession.i sRunning) { |
499 RTCLog(@"Restarting capture session on active."); | 328 RTCLog(@"Restarting capture session on acti ve."); |
500 [self.captureSession startRunning]; | 329 [self.captureSession startRunning]; |
501 } | 330 } |
502 }]; | 331 }]; |
503 } | 332 } |
504 | 333 |
505 #endif // TARGET_OS_IPHONE | 334 #endif // TARGET_OS_IPHONE |
506 | 335 |
507 #pragma mark - Private | 336 #pragma mark - Private |
508 | 337 |
509 - (BOOL)setupCaptureSession { | 338 - (BOOL)setupCaptureSession { |
510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; | 339 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; |
511 #if defined(WEBRTC_IOS) | 340 #if defined(WEBRTC_IOS) |
512 captureSession.usesApplicationAudioSession = NO; | 341 captureSession.usesApplicationAudioSession = NO; |
(...skipping 14 matching lines...) Expand all Loading... | |
527 RTCLogError(@"No front camera for capture session."); | 356 RTCLogError(@"No front camera for capture session."); |
528 return NO; | 357 return NO; |
529 } | 358 } |
530 | 359 |
531 // Add the inputs. | 360 // Add the inputs. |
532 if (![captureSession canAddInput:frontCameraInput] || | 361 if (![captureSession canAddInput:frontCameraInput] || |
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { | 362 (backCameraInput && ![captureSession canAddInput:backCameraInput])) { |
534 RTCLogError(@"Session does not support capture inputs."); | 363 RTCLogError(@"Session does not support capture inputs."); |
535 return NO; | 364 return NO; |
536 } | 365 } |
537 AVCaptureDeviceInput *input = self.useBackCamera ? | 366 AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCame raInput; |
538 backCameraInput : frontCameraInput; | |
539 [captureSession addInput:input]; | 367 [captureSession addInput:input]; |
540 | 368 |
541 _captureSession = captureSession; | 369 _captureSession = captureSession; |
542 return YES; | 370 return YES; |
543 } | 371 } |
544 | 372 |
545 - (AVCaptureVideoDataOutput *)videoDataOutput { | 373 - (AVCaptureVideoDataOutput *)videoDataOutput { |
546 if (!_videoDataOutput) { | 374 if (!_videoDataOutput) { |
547 // Make the capturer output NV12. Ideally we want I420 but that's not | 375 // Make the capturer output NV12. Ideally we want I420 but that's not |
548 // currently supported on iPhone / iPad. | 376 // currently supported on iPhone / iPad. |
549 AVCaptureVideoDataOutput *videoDataOutput = | 377 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc ] init]; |
550 [[AVCaptureVideoDataOutput alloc] init]; | |
551 videoDataOutput.videoSettings = @{ | 378 videoDataOutput.videoSettings = @{ |
552 (NSString *)kCVPixelBufferPixelFormatTypeKey : | 379 (NSString *) |
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | 380 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar FullRange) |
554 }; | 381 }; |
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; | 382 videoDataOutput.alwaysDiscardsLateVideoFrames = NO; |
556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; | 383 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; |
557 _videoDataOutput = videoDataOutput; | 384 _videoDataOutput = videoDataOutput; |
558 } | 385 } |
559 return _videoDataOutput; | 386 return _videoDataOutput; |
560 } | 387 } |
561 | 388 |
562 - (AVCaptureDevice *)videoCaptureDeviceForPosition: | 389 - (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)posi tion { |
563 (AVCaptureDevicePosition)position { | 390 for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:A VMediaTypeVideo]) { |
564 for (AVCaptureDevice *captureDevice in | |
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | |
566 if (captureDevice.position == position) { | 391 if (captureDevice.position == position) { |
567 return captureDevice; | 392 return captureDevice; |
568 } | 393 } |
569 } | 394 } |
570 return nil; | 395 return nil; |
571 } | 396 } |
572 | 397 |
573 - (AVCaptureDeviceInput *)frontCameraInput { | 398 - (AVCaptureDeviceInput *)frontCameraInput { |
574 if (!_frontCameraInput) { | 399 if (!_frontCameraInput) { |
575 #if TARGET_OS_IPHONE | 400 #if TARGET_OS_IPHONE |
576 AVCaptureDevice *frontCameraDevice = | 401 AVCaptureDevice *frontCameraDevice = |
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; | 402 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; |
578 #else | 403 #else |
579 AVCaptureDevice *frontCameraDevice = | 404 AVCaptureDevice *frontCameraDevice = |
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; | 405 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; |
581 #endif | 406 #endif |
582 if (!frontCameraDevice) { | 407 if (!frontCameraDevice) { |
583 RTCLogWarning(@"Failed to find front capture device."); | 408 RTCLogWarning(@"Failed to find front capture device."); |
584 return nil; | 409 return nil; |
585 } | 410 } |
586 NSError *error = nil; | 411 NSError *error = nil; |
587 AVCaptureDeviceInput *frontCameraInput = | 412 AVCaptureDeviceInput *frontCameraInput = |
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice | 413 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&err or]; |
589 error:&error]; | |
590 if (!frontCameraInput) { | 414 if (!frontCameraInput) { |
591 RTCLogError(@"Failed to create front camera input: %@", | 415 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes cription); |
592 error.localizedDescription); | |
593 return nil; | 416 return nil; |
594 } | 417 } |
595 _frontCameraInput = frontCameraInput; | 418 _frontCameraInput = frontCameraInput; |
596 } | 419 } |
597 return _frontCameraInput; | 420 return _frontCameraInput; |
598 } | 421 } |
599 | 422 |
600 - (AVCaptureDeviceInput *)backCameraInput { | 423 - (AVCaptureDeviceInput *)backCameraInput { |
601 if (!_backCameraInput) { | 424 if (!_backCameraInput) { |
602 AVCaptureDevice *backCameraDevice = | 425 AVCaptureDevice *backCameraDevice = |
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; | 426 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; |
604 if (!backCameraDevice) { | 427 if (!backCameraDevice) { |
605 RTCLogWarning(@"Failed to find front capture device."); | 428 RTCLogWarning(@"Failed to find front capture device."); |
606 return nil; | 429 return nil; |
607 } | 430 } |
608 NSError *error = nil; | 431 NSError *error = nil; |
609 AVCaptureDeviceInput *backCameraInput = | 432 AVCaptureDeviceInput *backCameraInput = |
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice | 433 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&erro r]; |
611 error:&error]; | |
612 if (!backCameraInput) { | 434 if (!backCameraInput) { |
613 RTCLogError(@"Failed to create front camera input: %@", | 435 RTCLogError(@"Failed to create front camera input: %@", error.localizedDes cription); |
614 error.localizedDescription); | |
615 return nil; | 436 return nil; |
616 } | 437 } |
617 _backCameraInput = backCameraInput; | 438 _backCameraInput = backCameraInput; |
618 } | 439 } |
619 return _backCameraInput; | 440 return _backCameraInput; |
620 } | 441 } |
621 | 442 |
622 // Called from capture session queue. | 443 // Called from capture session queue. |
623 - (void)updateOrientation { | 444 - (void)updateOrientation { |
624 #if TARGET_OS_IPHONE | 445 #if TARGET_OS_IPHONE |
625 switch ([UIDevice currentDevice].orientation) { | 446 switch ([UIDevice currentDevice].orientation) { |
626 case UIDeviceOrientationPortrait: | 447 case UIDeviceOrientationPortrait: |
627 _rotation = webrtc::kVideoRotation_90; | 448 _rotation = webrtc::kVideoRotation_90; |
628 break; | 449 break; |
629 case UIDeviceOrientationPortraitUpsideDown: | 450 case UIDeviceOrientationPortraitUpsideDown: |
630 _rotation = webrtc::kVideoRotation_270; | 451 _rotation = webrtc::kVideoRotation_270; |
631 break; | 452 break; |
632 case UIDeviceOrientationLandscapeLeft: | 453 case UIDeviceOrientationLandscapeLeft: |
633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 | 454 _rotation = |
634 : webrtc::kVideoRotation_180; | 455 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 : webrtc::kVi deoRotation_180; |
635 break; | 456 break; |
636 case UIDeviceOrientationLandscapeRight: | 457 case UIDeviceOrientationLandscapeRight: |
637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 | 458 _rotation = |
638 : webrtc::kVideoRotation_0; | 459 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 : webrtc::k VideoRotation_0; |
639 break; | 460 break; |
640 case UIDeviceOrientationFaceUp: | 461 case UIDeviceOrientationFaceUp: |
641 case UIDeviceOrientationFaceDown: | 462 case UIDeviceOrientationFaceDown: |
642 case UIDeviceOrientationUnknown: | 463 case UIDeviceOrientationUnknown: |
643 // Ignore. | 464 // Ignore. |
644 break; | 465 break; |
645 } | 466 } |
646 #endif | 467 #endif |
647 } | 468 } |
648 | 469 |
649 // Update the current session input to match what's stored in _useBackCamera. | 470 // Update the current session input to match what's stored in _useBackCamera. |
650 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | 471 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { |
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 472 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
652 block:^{ | 473 block:^{ |
653 [_captureSession beginConfiguration]; | 474 [_captureSession beginConfiguration]; |
654 AVCaptureDeviceInput *oldInput = _backCameraInput; | 475 AVCaptureDeviceInput *oldInput = _backCameraI nput; |
655 AVCaptureDeviceInput *newInput = _frontCameraInput; | 476 AVCaptureDeviceInput *newInput = _frontCamera Input; |
656 if (useBackCamera) { | 477 if (useBackCamera) { |
657 oldInput = _frontCameraInput; | 478 oldInput = _frontCameraInput; |
658 newInput = _backCameraInput; | 479 newInput = _backCameraInput; |
659 } | 480 } |
660 if (oldInput) { | 481 if (oldInput) { |
661 // Ok to remove this even if it's not attached. Will be no-op. | 482 // Ok to remove this even if it's not attac hed. Will be no-op. |
662 [_captureSession removeInput:oldInput]; | 483 [_captureSession removeInput:oldInput]; |
663 } | 484 } |
664 if (newInput) { | 485 if (newInput) { |
665 [_captureSession addInput:newInput]; | 486 [_captureSession addInput:newInput]; |
666 } | 487 } |
667 [self updateOrientation]; | 488 [self updateOrientation]; |
668 AVCaptureDevice *newDevice = newInput.device; | 489 AVCaptureDevice *newDevice = newInput.device; |
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat(); | 490 const cricket::VideoFormat *format = _capture r->GetCaptureFormat(); |
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format); | 491 _capturer->SetFormatForCaptureDevice(newDevic e, _captureSession, *format); |
magjed_webrtc
2016/11/14 13:48:52
If SetFormatForCaptureDevice is a static method, y
| |
671 [_captureSession commitConfiguration]; | 492 [_captureSession commitConfiguration]; |
672 }]; | 493 }]; |
673 } | 494 } |
674 | 495 |
675 @end | 496 @end |
676 | 497 |
677 namespace webrtc { | 498 NS_ASSUME_NONNULL_END |
678 | |
679 enum AVFoundationVideoCapturerMessageType : uint32_t { | |
680 kMessageTypeFrame, | |
681 }; | |
682 | |
683 AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) { | |
684 _capturer = | |
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | |
686 | |
687 std::set<cricket::VideoFormat> front_camera_video_formats = | |
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]); | |
689 | |
690 std::set<cricket::VideoFormat> back_camera_video_formats = | |
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]); | |
692 | |
693 std::vector<cricket::VideoFormat> intersection_video_formats; | |
694 if (back_camera_video_formats.empty()) { | |
695 intersection_video_formats.assign(front_camera_video_formats.begin(), | |
696 front_camera_video_formats.end()); | |
697 | |
698 } else if (front_camera_video_formats.empty()) { | |
699 intersection_video_formats.assign(back_camera_video_formats.begin(), | |
700 back_camera_video_formats.end()); | |
701 } else { | |
702 std::set_intersection( | |
703 front_camera_video_formats.begin(), front_camera_video_formats.end(), | |
704 back_camera_video_formats.begin(), back_camera_video_formats.end(), | |
705 std::back_inserter(intersection_video_formats)); | |
706 } | |
707 SetSupportedFormats(intersection_video_formats); | |
708 } | |
709 | |
710 AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { | |
711 _capturer = nil; | |
712 } | |
713 | |
714 cricket::CaptureState AVFoundationVideoCapturer::Start( | |
715 const cricket::VideoFormat& format) { | |
716 if (!_capturer) { | |
717 LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; | |
718 return cricket::CaptureState::CS_FAILED; | |
719 } | |
720 if (_capturer.isRunning) { | |
721 LOG(LS_ERROR) << "The capturer is already running."; | |
722 return cricket::CaptureState::CS_FAILED; | |
723 } | |
724 | |
725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice]; | |
726 AVCaptureSession* session = _capturer.captureSession; | |
727 | |
728 if (!SetFormatForCaptureDevice(device, session, format)) { | |
729 return cricket::CaptureState::CS_FAILED; | |
730 } | |
731 | |
732 SetCaptureFormat(&format); | |
733 // This isn't super accurate because it takes a while for the AVCaptureSession | |
734 // to spin up, and this call returns async. | |
735 // TODO(tkchin): make this better. | |
736 [_capturer start]; | |
737 SetCaptureState(cricket::CaptureState::CS_RUNNING); | |
738 | |
739 return cricket::CaptureState::CS_STARTING; | |
740 } | |
741 | |
742 void AVFoundationVideoCapturer::Stop() { | |
743 [_capturer stop]; | |
744 SetCaptureFormat(NULL); | |
745 } | |
746 | |
747 bool AVFoundationVideoCapturer::IsRunning() { | |
748 return _capturer.isRunning; | |
749 } | |
750 | |
751 AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { | |
752 return _capturer.captureSession; | |
753 } | |
754 | |
755 bool AVFoundationVideoCapturer::CanUseBackCamera() const { | |
756 return _capturer.canUseBackCamera; | |
757 } | |
758 | |
759 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | |
760 _capturer.useBackCamera = useBackCamera; | |
761 } | |
762 | |
763 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | |
764 return _capturer.useBackCamera; | |
765 } | |
766 | |
767 void AVFoundationVideoCapturer::CaptureSampleBuffer( | |
768 CMSampleBufferRef sample_buffer, VideoRotation rotation) { | |
769 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 || | |
770 !CMSampleBufferIsValid(sample_buffer) || | |
771 !CMSampleBufferDataIsReady(sample_buffer)) { | |
772 return; | |
773 } | |
774 | |
775 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer); | |
776 if (image_buffer == NULL) { | |
777 return; | |
778 } | |
779 | |
780 const int captured_width = CVPixelBufferGetWidth(image_buffer); | |
781 const int captured_height = CVPixelBufferGetHeight(image_buffer); | |
782 | |
783 int adapted_width; | |
784 int adapted_height; | |
785 int crop_width; | |
786 int crop_height; | |
787 int crop_x; | |
788 int crop_y; | |
789 int64_t translated_camera_time_us; | |
790 | |
791 if (!AdaptFrame(captured_width, captured_height, | |
792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec, | |
793 rtc::TimeMicros(), &adapted_width, &adapted_height, | |
794 &crop_width, &crop_height, &crop_x, &crop_y, | |
795 &translated_camera_time_us)) { | |
796 return; | |
797 } | |
798 | |
799 rtc::scoped_refptr<VideoFrameBuffer> buffer = | |
800 new rtc::RefCountedObject<CoreVideoFrameBuffer>( | |
801 image_buffer, | |
802 adapted_width, adapted_height, | |
803 crop_width, crop_height, | |
804 crop_x, crop_y); | |
805 | |
806 // Applying rotation is only supported for legacy reasons and performance is | |
807 // not critical here. | |
808 if (apply_rotation() && rotation != kVideoRotation_0) { | |
809 buffer = buffer->NativeToI420Buffer(); | |
810 rtc::scoped_refptr<I420Buffer> rotated_buffer = | |
811 (rotation == kVideoRotation_180) | |
812 ? I420Buffer::Create(adapted_width, adapted_height) | |
813 : I420Buffer::Create(adapted_height, adapted_width); | |
814 libyuv::I420Rotate( | |
815 buffer->DataY(), buffer->StrideY(), | |
816 buffer->DataU(), buffer->StrideU(), | |
817 buffer->DataV(), buffer->StrideV(), | |
818 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(), | |
819 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(), | |
820 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(), | |
821 buffer->width(), buffer->height(), | |
822 static_cast<libyuv::RotationMode>(rotation)); | |
823 buffer = rotated_buffer; | |
824 } | |
825 | |
826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us), | |
827 captured_width, captured_height); | |
828 } | |
829 | |
830 } // namespace webrtc | |
OLD | NEW |