OLD | NEW |
---|---|
1 /* | 1 /* |
2 * libjingle | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * Copyright 2015 Google Inc. | |
4 * | 3 * |
5 * Redistribution and use in source and binary forms, with or without | 4 * Use of this source code is governed by a BSD-style license |
6 * modification, are permitted provided that the following conditions are met: | 5 * that can be found in the LICENSE file in the root of the source |
7 * | 6 * tree. An additional intellectual property rights grant can be found |
8 * 1. Redistributions of source code must retain the above copyright notice, | 7 * in the file PATENTS. All contributing project authors may |
9 * this list of conditions and the following disclaimer. | 8 * be found in the AUTHORS file in the root of the source tree. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | |
11 * this list of conditions and the following disclaimer in the documentation | |
12 * and/or other materials provided with the distribution. | |
13 * 3. The name of the author may not be used to endorse or promote products | |
14 * derived from this software without specific prior written permission. | |
15 * | |
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | |
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | |
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
26 */ | 9 */ |
27 | 10 |
28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" | 11 #include "avfoundationvideocapturer.h" |
tkchin_webrtc
2016/01/20 21:51:12
full path
hjon
2016/01/20 22:25:34
Done.
| |
29 | 12 |
30 #include "webrtc/base/bind.h" | 13 #include "webrtc/base/bind.h" |
31 | 14 |
32 #import <AVFoundation/AVFoundation.h> | 15 #import <AVFoundation/AVFoundation.h> |
33 #import <Foundation/Foundation.h> | 16 #import <Foundation/Foundation.h> |
34 #import <UIKit/UIKit.h> | 17 #import <UIKit/UIKit.h> |
35 | 18 |
36 #import "webrtc/base/objc/RTCDispatcher.h" | 19 #import "webrtc/base/objc/RTCDispatcher.h" |
37 | 20 |
38 // TODO(tkchin): support other formats. | 21 // TODO(tkchin): support other formats. |
39 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; | 22 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; |
40 static cricket::VideoFormat const kDefaultFormat = | 23 static cricket::VideoFormat const kDefaultFormat = |
41 cricket::VideoFormat(640, | 24 cricket::VideoFormat(640, |
42 480, | 25 480, |
43 cricket::VideoFormat::FpsToInterval(30), | 26 cricket::VideoFormat::FpsToInterval(30), |
44 cricket::FOURCC_NV12); | 27 cricket::FOURCC_NV12); |
45 | 28 |
46 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 29 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
47 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 30 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
48 // because other webrtc objects own cricket::VideoCapturer, which is not | 31 // because other webrtc objects own cricket::VideoCapturer, which is not |
49 // ref counted. To prevent bad behavior we do not expose this class directly. | 32 // ref counted. To prevent bad behavior we do not expose this class directly. |
50 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 33 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
51 <AVCaptureVideoDataOutputSampleBufferDelegate> | 34 <AVCaptureVideoDataOutputSampleBufferDelegate> |
52 | 35 |
53 @property(nonatomic, readonly) AVCaptureSession* captureSession; | 36 @property(nonatomic, readonly) AVCaptureSession *captureSession; |
54 @property(nonatomic, readonly) BOOL isRunning; | 37 @property(nonatomic, readonly) BOOL isRunning; |
55 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 38 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
56 | 39 |
57 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it | 40 // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
58 // when we receive frames. This is safe because this object should be owned by | 41 // when we receive frames. This is safe because this object should be owned by |
59 // it. | 42 // it. |
60 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer; | 43 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; |
61 - (void)startCaptureAsync; | 44 - (void)startCaptureAsync; |
62 - (void)stopCaptureAsync; | 45 - (void)stopCaptureAsync; |
63 | 46 |
64 @end | 47 @end |
65 | 48 |
66 @implementation RTCAVFoundationVideoCapturerInternal { | 49 @implementation RTCAVFoundationVideoCapturerInternal { |
67 // Keep pointers to inputs for convenience. | 50 // Keep pointers to inputs for convenience. |
68 AVCaptureDeviceInput* _frontDeviceInput; | 51 AVCaptureDeviceInput *_frontDeviceInput; |
69 AVCaptureDeviceInput* _backDeviceInput; | 52 AVCaptureDeviceInput *_backDeviceInput; |
70 AVCaptureVideoDataOutput* _videoOutput; | 53 AVCaptureVideoDataOutput *_videoOutput; |
71 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 54 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
72 webrtc::AVFoundationVideoCapturer* _capturer; | 55 webrtc::AVFoundationVideoCapturer *_capturer; |
73 BOOL _orientationHasChanged; | 56 BOOL _orientationHasChanged; |
74 } | 57 } |
75 | 58 |
76 @synthesize captureSession = _captureSession; | 59 @synthesize captureSession = _captureSession; |
77 @synthesize useBackCamera = _useBackCamera; | 60 @synthesize useBackCamera = _useBackCamera; |
78 @synthesize isRunning = _isRunning; | 61 @synthesize isRunning = _isRunning; |
79 | 62 |
80 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { | 63 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { |
81 NSParameterAssert(capturer); | 64 NSParameterAssert(capturer); |
82 if (self = [super init]) { | 65 if (self = [super init]) { |
83 _capturer = capturer; | 66 _capturer = capturer; |
84 if (![self setupCaptureSession]) { | 67 if (![self setupCaptureSession]) { |
85 return nil; | 68 return nil; |
86 } | 69 } |
87 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; | 70 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
88 [center addObserver:self | 71 [center addObserver:self |
89 selector:@selector(deviceOrientationDidChange:) | 72 selector:@selector(deviceOrientationDidChange:) |
90 name:UIDeviceOrientationDidChangeNotification | 73 name:UIDeviceOrientationDidChangeNotification |
91 object:nil]; | 74 object:nil]; |
92 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification | 75 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification |
93 object:nil | 76 object:nil |
94 queue:nil | 77 queue:nil |
95 usingBlock:^(NSNotification* notification) { | 78 usingBlock:^(NSNotification *notification) { |
96 NSLog(@"Capture session error: %@", notification.userInfo); | 79 NSLog(@"Capture session error: %@", notification.userInfo); |
97 }]; | 80 }]; |
98 } | 81 } |
99 return self; | 82 return self; |
100 } | 83 } |
101 | 84 |
102 - (void)dealloc { | 85 - (void)dealloc { |
103 [self stopCaptureAsync]; | 86 [self stopCaptureAsync]; |
104 [[NSNotificationCenter defaultCenter] removeObserver:self]; | 87 [[NSNotificationCenter defaultCenter] removeObserver:self]; |
105 _capturer = nullptr; | 88 _capturer = nullptr; |
(...skipping 30 matching lines...) Expand all Loading... | |
136 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 119 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
137 block:^{ | 120 block:^{ |
138 [session stopRunning]; | 121 [session stopRunning]; |
139 }]; | 122 }]; |
140 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 123 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
141 _isRunning = NO; | 124 _isRunning = NO; |
142 } | 125 } |
143 | 126 |
144 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 127 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
145 | 128 |
146 - (void)captureOutput:(AVCaptureOutput*)captureOutput | 129 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
147 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 130 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
148 fromConnection:(AVCaptureConnection*)connection { | 131 fromConnection:(AVCaptureConnection *)connection { |
149 NSParameterAssert(captureOutput == _videoOutput); | 132 NSParameterAssert(captureOutput == _videoOutput); |
150 if (!_isRunning) { | 133 if (!_isRunning) { |
151 return; | 134 return; |
152 } | 135 } |
153 _capturer->CaptureSampleBuffer(sampleBuffer); | 136 _capturer->CaptureSampleBuffer(sampleBuffer); |
154 } | 137 } |
155 | 138 |
156 - (void)captureOutput:(AVCaptureOutput*)captureOutput | 139 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
157 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 140 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
158 fromConnection:(AVCaptureConnection*)connection { | 141 fromConnection:(AVCaptureConnection *)connection { |
159 NSLog(@"Dropped sample buffer."); | 142 NSLog(@"Dropped sample buffer."); |
160 } | 143 } |
161 | 144 |
162 #pragma mark - Private | 145 #pragma mark - Private |
163 | 146 |
164 - (BOOL)setupCaptureSession { | 147 - (BOOL)setupCaptureSession { |
165 _captureSession = [[AVCaptureSession alloc] init]; | 148 _captureSession = [[AVCaptureSession alloc] init]; |
166 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 | 149 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 |
167 NSString* version = [[UIDevice currentDevice] systemVersion]; | 150 NSString *version = [[UIDevice currentDevice] systemVersion]; |
168 if ([version integerValue] >= 7) { | 151 if ([version integerValue] >= 7) { |
169 _captureSession.usesApplicationAudioSession = NO; | 152 _captureSession.usesApplicationAudioSession = NO; |
170 } | 153 } |
171 #endif | 154 #endif |
172 if (![_captureSession canSetSessionPreset:kDefaultPreset]) { | 155 if (![_captureSession canSetSessionPreset:kDefaultPreset]) { |
173 NSLog(@"Default video capture preset unsupported."); | 156 NSLog(@"Default video capture preset unsupported."); |
174 return NO; | 157 return NO; |
175 } | 158 } |
176 _captureSession.sessionPreset = kDefaultPreset; | 159 _captureSession.sessionPreset = kDefaultPreset; |
177 | 160 |
178 // Make the capturer output NV12. Ideally we want I420 but that's not | 161 // Make the capturer output NV12. Ideally we want I420 but that's not |
179 // currently supported on iPhone / iPad. | 162 // currently supported on iPhone / iPad. |
180 _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; | 163 _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; |
181 _videoOutput.videoSettings = @{ | 164 _videoOutput.videoSettings = @{ |
182 (NSString*)kCVPixelBufferPixelFormatTypeKey : | 165 (NSString *)kCVPixelBufferPixelFormatTypeKey : |
183 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) | 166 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) |
184 }; | 167 }; |
185 _videoOutput.alwaysDiscardsLateVideoFrames = NO; | 168 _videoOutput.alwaysDiscardsLateVideoFrames = NO; |
186 [_videoOutput setSampleBufferDelegate:self | 169 [_videoOutput setSampleBufferDelegate:self |
187 queue:dispatch_get_main_queue()]; | 170 queue:dispatch_get_main_queue()]; |
188 if (![_captureSession canAddOutput:_videoOutput]) { | 171 if (![_captureSession canAddOutput:_videoOutput]) { |
189 NSLog(@"Default video capture output unsupported."); | 172 NSLog(@"Default video capture output unsupported."); |
190 return NO; | 173 return NO; |
191 } | 174 } |
192 [_captureSession addOutput:_videoOutput]; | 175 [_captureSession addOutput:_videoOutput]; |
193 | 176 |
194 // Find the capture devices. | 177 // Find the capture devices. |
195 AVCaptureDevice* frontCaptureDevice = nil; | 178 AVCaptureDevice *frontCaptureDevice = nil; |
196 AVCaptureDevice* backCaptureDevice = nil; | 179 AVCaptureDevice *backCaptureDevice = nil; |
197 for (AVCaptureDevice* captureDevice in | 180 for (AVCaptureDevice *captureDevice in |
198 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { | 181 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
199 if (captureDevice.position == AVCaptureDevicePositionBack) { | 182 if (captureDevice.position == AVCaptureDevicePositionBack) { |
200 backCaptureDevice = captureDevice; | 183 backCaptureDevice = captureDevice; |
201 } | 184 } |
202 if (captureDevice.position == AVCaptureDevicePositionFront) { | 185 if (captureDevice.position == AVCaptureDevicePositionFront) { |
203 frontCaptureDevice = captureDevice; | 186 frontCaptureDevice = captureDevice; |
204 } | 187 } |
205 } | 188 } |
206 if (!frontCaptureDevice || !backCaptureDevice) { | 189 if (!frontCaptureDevice || !backCaptureDevice) { |
207 NSLog(@"Failed to get capture devices."); | 190 NSLog(@"Failed to get capture devices."); |
208 return NO; | 191 return NO; |
209 } | 192 } |
210 | 193 |
211 // Set up the session inputs. | 194 // Set up the session inputs. |
212 NSError* error = nil; | 195 NSError *error = nil; |
213 _frontDeviceInput = | 196 _frontDeviceInput = |
214 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice | 197 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice |
215 error:&error]; | 198 error:&error]; |
216 if (!_frontDeviceInput) { | 199 if (!_frontDeviceInput) { |
217 NSLog(@"Failed to get capture device input: %@", | 200 NSLog(@"Failed to get capture device input: %@", |
218 error.localizedDescription); | 201 error.localizedDescription); |
219 return NO; | 202 return NO; |
220 } | 203 } |
221 _backDeviceInput = | 204 _backDeviceInput = |
222 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice | 205 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice |
223 error:&error]; | 206 error:&error]; |
224 if (!_backDeviceInput) { | 207 if (!_backDeviceInput) { |
225 NSLog(@"Failed to get capture device input: %@", | 208 NSLog(@"Failed to get capture device input: %@", |
226 error.localizedDescription); | 209 error.localizedDescription); |
227 return NO; | 210 return NO; |
228 } | 211 } |
229 | 212 |
230 // Add the inputs. | 213 // Add the inputs. |
231 if (![_captureSession canAddInput:_frontDeviceInput] || | 214 if (![_captureSession canAddInput:_frontDeviceInput] || |
232 ![_captureSession canAddInput:_backDeviceInput]) { | 215 ![_captureSession canAddInput:_backDeviceInput]) { |
233 NSLog(@"Session does not support capture inputs."); | 216 NSLog(@"Session does not support capture inputs."); |
234 return NO; | 217 return NO; |
235 } | 218 } |
236 [self updateSessionInput]; | 219 [self updateSessionInput]; |
237 | 220 |
238 return YES; | 221 return YES; |
239 } | 222 } |
240 | 223 |
241 - (void)deviceOrientationDidChange:(NSNotification*)notification { | 224 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
242 _orientationHasChanged = YES; | 225 _orientationHasChanged = YES; |
243 [self updateOrientation]; | 226 [self updateOrientation]; |
244 } | 227 } |
245 | 228 |
246 - (void)updateOrientation { | 229 - (void)updateOrientation { |
247 AVCaptureConnection* connection = | 230 AVCaptureConnection *connection = |
248 [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; | 231 [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; |
249 if (!connection.supportsVideoOrientation) { | 232 if (!connection.supportsVideoOrientation) { |
250 // TODO(tkchin): set rotation bit on frames. | 233 // TODO(tkchin): set rotation bit on frames. |
251 return; | 234 return; |
252 } | 235 } |
253 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; | 236 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; |
254 switch ([UIDevice currentDevice].orientation) { | 237 switch ([UIDevice currentDevice].orientation) { |
255 case UIDeviceOrientationPortrait: | 238 case UIDeviceOrientationPortrait: |
256 orientation = AVCaptureVideoOrientationPortrait; | 239 orientation = AVCaptureVideoOrientationPortrait; |
257 break; | 240 break; |
(...skipping 13 matching lines...) Expand all Loading... | |
271 connection.videoOrientation = orientation; | 254 connection.videoOrientation = orientation; |
272 } | 255 } |
273 return; | 256 return; |
274 } | 257 } |
275 connection.videoOrientation = orientation; | 258 connection.videoOrientation = orientation; |
276 } | 259 } |
277 | 260 |
278 - (void)updateSessionInput { | 261 - (void)updateSessionInput { |
279 // Update the current session input to match what's stored in _useBackCamera. | 262 // Update the current session input to match what's stored in _useBackCamera. |
280 [_captureSession beginConfiguration]; | 263 [_captureSession beginConfiguration]; |
281 AVCaptureDeviceInput* oldInput = _backDeviceInput; | 264 AVCaptureDeviceInput *oldInput = _backDeviceInput; |
282 AVCaptureDeviceInput* newInput = _frontDeviceInput; | 265 AVCaptureDeviceInput *newInput = _frontDeviceInput; |
283 if (_useBackCamera) { | 266 if (_useBackCamera) { |
284 oldInput = _frontDeviceInput; | 267 oldInput = _frontDeviceInput; |
285 newInput = _backDeviceInput; | 268 newInput = _backDeviceInput; |
286 } | 269 } |
287 // Ok to remove this even if it's not attached. Will be no-op. | 270 // Ok to remove this even if it's not attached. Will be no-op. |
288 [_captureSession removeInput:oldInput]; | 271 [_captureSession removeInput:oldInput]; |
289 [_captureSession addInput:newInput]; | 272 [_captureSession addInput:newInput]; |
290 [self updateOrientation]; | 273 [self updateOrientation]; |
291 [_captureSession commitConfiguration]; | 274 [_captureSession commitConfiguration]; |
292 } | 275 } |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
376 | 359 |
377 // Base address must be unlocked to access frame data. | 360 // Base address must be unlocked to access frame data. |
378 CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly; | 361 CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly; |
379 CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags); | 362 CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags); |
380 if (ret != kCVReturnSuccess) { | 363 if (ret != kCVReturnSuccess) { |
381 return; | 364 return; |
382 } | 365 } |
383 | 366 |
384 static size_t const kYPlaneIndex = 0; | 367 static size_t const kYPlaneIndex = 0; |
385 static size_t const kUVPlaneIndex = 1; | 368 static size_t const kUVPlaneIndex = 1; |
386 uint8_t* yPlaneAddress = | 369 uint8_t *yPlaneAddress = |
387 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex); | 370 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex); |
388 size_t yPlaneHeight = | 371 size_t yPlaneHeight = |
389 CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex); | 372 CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex); |
390 size_t yPlaneWidth = | 373 size_t yPlaneWidth = |
391 CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex); | 374 CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex); |
392 size_t yPlaneBytesPerRow = | 375 size_t yPlaneBytesPerRow = |
393 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex); | 376 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex); |
394 size_t uvPlaneHeight = | 377 size_t uvPlaneHeight = |
395 CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex); | 378 CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex); |
396 size_t uvPlaneBytesPerRow = | 379 size_t uvPlaneBytesPerRow = |
397 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex); | 380 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex); |
398 size_t frameSize = | 381 size_t frameSize = |
399 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight; | 382 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight; |
400 | 383 |
401 // Sanity check assumption that planar bytes are contiguous. | 384 // Sanity check assumption that planar bytes are contiguous. |
402 uint8_t* uvPlaneAddress = | 385 uint8_t *uvPlaneAddress = |
403 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex); | 386 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex); |
404 RTC_DCHECK( | 387 RTC_DCHECK( |
405 uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow); | 388 uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow); |
406 | 389 |
407 // Stuff data into a cricket::CapturedFrame. | 390 // Stuff data into a cricket::CapturedFrame. |
408 int64_t currentTime = rtc::TimeNanos(); | 391 int64_t currentTime = rtc::TimeNanos(); |
409 cricket::CapturedFrame frame; | 392 cricket::CapturedFrame frame; |
410 frame.width = yPlaneWidth; | 393 frame.width = yPlaneWidth; |
411 frame.height = yPlaneHeight; | 394 frame.height = yPlaneHeight; |
412 frame.pixel_width = 1; | 395 frame.pixel_width = 1; |
413 frame.pixel_height = 1; | 396 frame.pixel_height = 1; |
414 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12); | 397 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12); |
415 frame.time_stamp = currentTime; | 398 frame.time_stamp = currentTime; |
416 frame.data = yPlaneAddress; | 399 frame.data = yPlaneAddress; |
417 frame.data_size = frameSize; | 400 frame.data_size = frameSize; |
418 | 401 |
419 if (_startThread->IsCurrent()) { | 402 if (_startThread->IsCurrent()) { |
420 SignalFrameCaptured(this, &frame); | 403 SignalFrameCaptured(this, &frame); |
421 } else { | 404 } else { |
422 _startThread->Invoke<void>( | 405 _startThread->Invoke<void>( |
423 rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread, | 406 rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread, |
424 this, &frame)); | 407 this, &frame)); |
425 } | 408 } |
426 CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags); | 409 CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags); |
427 } | 410 } |
428 | 411 |
429 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( | 412 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( |
430 const cricket::CapturedFrame* frame) { | 413 const cricket::CapturedFrame *frame) { |
431 RTC_DCHECK(_startThread->IsCurrent()); | 414 RTC_DCHECK(_startThread->IsCurrent()); |
432 // This will call a superclass method that will perform the frame conversion | 415 // This will call a superclass method that will perform the frame conversion |
433 // to I420. | 416 // to I420. |
434 SignalFrameCaptured(this, frame); | 417 SignalFrameCaptured(this, frame); |
435 } | 418 } |
436 | 419 |
437 } // namespace webrtc | 420 } // namespace webrtc |
OLD | NEW |