Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm

Issue 2381853002: Revert of Unify the macOS and iOS capturer implementations (Closed)
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #if !defined(__has_feature) || !__has_feature(objc_arc)
12 #error "This file requires ARC support."
13 #endif
14
15 #import <AVFoundation/AVFoundation.h>
16 #ifdef WEBRTC_IOS
17 #import <UIKit/UIKit.h>
18 #endif
19
20 #import "webrtc/modules/video_capture/objc/device_info_objc.h"
21 #import "webrtc/modules/video_capture/objc/rtc_video_capture_objc.h"
22
23 #include "webrtc/system_wrappers/include/trace.h"
24
25 using namespace webrtc;
26 using namespace webrtc::videocapturemodule;
27
28 @interface RTCVideoCaptureIosObjC (hidden)
29 - (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
30 @end
31
32 @implementation RTCVideoCaptureIosObjC {
33 webrtc::videocapturemodule::VideoCaptureIos* _owner;
34 webrtc::VideoCaptureCapability _capability;
35 AVCaptureSession* _captureSession;
36 int _captureId;
37 BOOL _orientationHasChanged;
38 AVCaptureConnection* _connection;
39 BOOL _captureChanging; // Guarded by _captureChangingCondition.
40 NSCondition* _captureChangingCondition;
41 }
42
43 @synthesize frameRotation = _framRotation;
44
45 - (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
46 if (self == [super init]) {
47 _owner = owner;
48 _captureId = captureId;
49 _captureSession = [[AVCaptureSession alloc] init];
50 #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
51 NSString* version = [[UIDevice currentDevice] systemVersion];
52 if ([version integerValue] >= 7) {
53 _captureSession.usesApplicationAudioSession = NO;
54 }
55 #endif
56 _captureChanging = NO;
57 _captureChangingCondition = [[NSCondition alloc] init];
58
59 if (!_captureSession || !_captureChangingCondition) {
60 return nil;
61 }
62
63 // create and configure a new output (using callbacks)
64 AVCaptureVideoDataOutput* captureOutput =
65 [[AVCaptureVideoDataOutput alloc] init];
66 NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
67
68 NSNumber* val = [NSNumber
69 numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
70 NSDictionary* videoSettings =
71 [NSDictionary dictionaryWithObject:val forKey:key];
72 captureOutput.videoSettings = videoSettings;
73
74 // add new output
75 if ([_captureSession canAddOutput:captureOutput]) {
76 [_captureSession addOutput:captureOutput];
77 } else {
78 WEBRTC_TRACE(kTraceError, kTraceVideoCapture, _captureId,
79 "%s:%s:%d Could not add output to AVCaptureSession ",
80 __FILE__, __FUNCTION__, __LINE__);
81 }
82
83 #ifdef WEBRTC_IOS
84 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
85
86 NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
87 [notify addObserver:self
88 selector:@selector(onVideoError:)
89 name:AVCaptureSessionRuntimeErrorNotification
90 object:_captureSession];
91 [notify addObserver:self
92 selector:@selector(deviceOrientationDidChange:)
93 name:UIDeviceOrientationDidChangeNotification
94 object:nil];
95 #endif
96 }
97
98 return self;
99 }
100
101 - (void)directOutputToSelf {
102 [[self currentOutput]
103 setSampleBufferDelegate:self
104 queue:dispatch_get_global_queue(
105 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
106 }
107
108 - (void)directOutputToNil {
109 [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
110 }
111
112 - (void)deviceOrientationDidChange:(NSNotification*)notification {
113 _orientationHasChanged = YES;
114 [self setRelativeVideoOrientation];
115 }
116
117 - (void)dealloc {
118 [[NSNotificationCenter defaultCenter] removeObserver:self];
119 }
120
121 - (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
122 [self waitForCaptureChangeToFinish];
123 // check to see if the camera is already set
124 if (_captureSession) {
125 NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
126 if ([currentInputs count] > 0) {
127 AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
128 if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
129 return YES;
130 }
131 }
132 }
133
134 return [self changeCaptureInputByUniqueId:uniqueId];
135 }
136
137 - (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
138 [self waitForCaptureChangeToFinish];
139 if (!_captureSession) {
140 return NO;
141 }
142
143 // check limits of the resolution
144 if (capability.maxFPS < 0 || capability.maxFPS > 60) {
145 return NO;
146 }
147
148 if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
149 if (capability.width > 1280 || capability.height > 720) {
150 return NO;
151 }
152 } else if ([_captureSession
153 canSetSessionPreset:AVCaptureSessionPreset640x480]) {
154 if (capability.width > 640 || capability.height > 480) {
155 return NO;
156 }
157 } else if ([_captureSession
158 canSetSessionPreset:AVCaptureSessionPreset352x288]) {
159 if (capability.width > 352 || capability.height > 288) {
160 return NO;
161 }
162 } else if (capability.width < 0 || capability.height < 0) {
163 return NO;
164 }
165
166 _capability = capability;
167
168 AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
169 if (!currentOutput)
170 return NO;
171
172 [self directOutputToSelf];
173
174 _orientationHasChanged = NO;
175 _captureChanging = YES;
176 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
177 ^{
178 [self startCaptureInBackgroundWithOutput:currentOutput];
179 });
180 return YES;
181 }
182
183 - (AVCaptureVideoDataOutput*)currentOutput {
184 return [[_captureSession outputs] firstObject];
185 }
186
187 - (void)startCaptureInBackgroundWithOutput:
188 (AVCaptureVideoDataOutput*)currentOutput {
189 NSString* captureQuality =
190 [NSString stringWithString:AVCaptureSessionPresetLow];
191 if (_capability.width >= 1280 || _capability.height >= 720) {
192 captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
193 } else if (_capability.width >= 640 || _capability.height >= 480) {
194 captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
195 } else if (_capability.width >= 352 || _capability.height >= 288) {
196 captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
197 }
198
199 // begin configuration for the AVCaptureSession
200 [_captureSession beginConfiguration];
201
202 // picture resolution
203 [_captureSession setSessionPreset:captureQuality];
204
205 _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
206 [self setRelativeVideoOrientation];
207
208 // finished configuring, commit settings to AVCaptureSession.
209 [_captureSession commitConfiguration];
210
211 [_captureSession startRunning];
212 [self signalCaptureChangeEnd];
213 }
214
215 - (void)setRelativeVideoOrientation {
216 if (!_connection.supportsVideoOrientation) {
217 return;
218 }
219 #ifndef WEBRTC_IOS
220 _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
221 return;
222 #else
223 switch ([UIDevice currentDevice].orientation) {
224 case UIDeviceOrientationPortrait:
225 _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
226 break;
227 case UIDeviceOrientationPortraitUpsideDown:
228 _connection.videoOrientation =
229 AVCaptureVideoOrientationPortraitUpsideDown;
230 break;
231 case UIDeviceOrientationLandscapeLeft:
232 _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
233 break;
234 case UIDeviceOrientationLandscapeRight:
235 _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
236 break;
237 case UIDeviceOrientationFaceUp:
238 case UIDeviceOrientationFaceDown:
239 case UIDeviceOrientationUnknown:
240 if (!_orientationHasChanged) {
241 _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
242 }
243 break;
244 }
245 #endif
246 }
247
248 - (void)onVideoError:(NSNotification*)notification {
249 NSLog(@"onVideoError: %@", notification);
250 // TODO(sjlee): make the specific error handling with this notification.
251 WEBRTC_TRACE(kTraceError, kTraceVideoCapture, _captureId,
252 "%s:%s:%d [AVCaptureSession startRunning] error.", __FILE__,
253 __FUNCTION__, __LINE__);
254 }
255
256 - (BOOL)stopCapture {
257 #ifdef WEBRTC_IOS
258 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
259 #endif
260 _orientationHasChanged = NO;
261 [self waitForCaptureChangeToFinish];
262 [self directOutputToNil];
263
264 if (!_captureSession) {
265 return NO;
266 }
267
268 _captureChanging = YES;
269 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
270 ^(void) {
271 [self stopCaptureInBackground];
272 });
273 return YES;
274 }
275
276 - (void)stopCaptureInBackground {
277 [_captureSession stopRunning];
278 [self signalCaptureChangeEnd];
279 }
280
281 - (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
282 [self waitForCaptureChangeToFinish];
283 NSArray* currentInputs = [_captureSession inputs];
284 // remove current input
285 if ([currentInputs count] > 0) {
286 AVCaptureInput* currentInput =
287 (AVCaptureInput*)[currentInputs objectAtIndex:0];
288
289 [_captureSession removeInput:currentInput];
290 }
291
292 // Look for input device with the name requested (as our input param)
293 // get list of available capture devices
294 int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
295 if (captureDeviceCount <= 0) {
296 return NO;
297 }
298
299 AVCaptureDevice* captureDevice =
300 [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
301
302 if (!captureDevice) {
303 return NO;
304 }
305
306 // now create capture session input out of AVCaptureDevice
307 NSError* deviceError = nil;
308 AVCaptureDeviceInput* newCaptureInput =
309 [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
310 error:&deviceError];
311
312 if (!newCaptureInput) {
313 const char* errorMessage = [[deviceError localizedDescription] UTF8String];
314
315 WEBRTC_TRACE(kTraceError, kTraceVideoCapture, _captureId,
316 "%s:%s:%d deviceInputWithDevice error:%s", __FILE__,
317 __FUNCTION__, __LINE__, errorMessage);
318
319 return NO;
320 }
321
322 // try to add our new capture device to the capture session
323 [_captureSession beginConfiguration];
324
325 BOOL addedCaptureInput = NO;
326 if ([_captureSession canAddInput:newCaptureInput]) {
327 [_captureSession addInput:newCaptureInput];
328 addedCaptureInput = YES;
329 } else {
330 addedCaptureInput = NO;
331 }
332
333 [_captureSession commitConfiguration];
334
335 return addedCaptureInput;
336 }
337
338 - (void)captureOutput:(AVCaptureOutput*)captureOutput
339 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
340 fromConnection:(AVCaptureConnection*)connection {
341 const int kFlags = 0;
342 CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
343
344 if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
345 return;
346 }
347
348 const int kYPlaneIndex = 0;
349 const int kUVPlaneIndex = 1;
350
351 uint8_t* baseAddress =
352 (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
353 size_t yPlaneBytesPerRow =
354 CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
355 size_t yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
356 size_t uvPlaneBytesPerRow =
357 CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
358 size_t uvPlaneHeight =
359 CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
360 size_t frameSize =
361 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
362
363 VideoCaptureCapability tempCaptureCapability;
364 tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
365 tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
366 tempCaptureCapability.maxFPS = _capability.maxFPS;
367 tempCaptureCapability.rawType = kVideoNV12;
368
369 _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
370
371 CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
372 }
373
374 - (void)signalCaptureChangeEnd {
375 [_captureChangingCondition lock];
376 _captureChanging = NO;
377 [_captureChangingCondition signal];
378 [_captureChangingCondition unlock];
379 }
380
381 - (void)waitForCaptureChangeToFinish {
382 [_captureChangingCondition lock];
383 while (_captureChanging) {
384 [_captureChangingCondition wait];
385 }
386 [_captureChangingCondition unlock];
387 }
388 @end
OLDNEW
« no previous file with comments | « webrtc/modules/video_capture/objc/rtc_video_capture_objc.h ('k') | webrtc/modules/video_capture/objc/video_capture.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698