| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #define DEFAULT_CAPTURE_DEVICE_INDEX 1 | |
| 12 #define DEFAULT_FRAME_RATE 30 | |
| 13 #define DEFAULT_FRAME_WIDTH 352 | |
| 14 #define DEFAULT_FRAME_HEIGHT 288 | |
| 15 #define ROTATE_CAPTURED_FRAME 1 | |
| 16 #define LOW_QUALITY 1 | |
| 17 | |
| 18 #import "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.h" | |
| 19 | |
| 20 #include "webrtc/system_wrappers/include/trace.h" | |
| 21 | |
| 22 using namespace webrtc; | |
| 23 using namespace videocapturemodule; | |
| 24 | |
| 25 @implementation VideoCaptureMacQTKitObjC | |
| 26 | |
| 27 -(id)init { | |
| 28 self = [super init]; | |
| 29 if (self) { | |
| 30 [self initializeVariables]; | |
| 31 } | |
| 32 return self; | |
| 33 } | |
| 34 | |
| 35 - (void)dealloc { | |
| 36 if (_captureSession) | |
| 37 [_captureSession stopRunning]; | |
| 38 | |
| 39 if (_captureVideoDeviceInput) { | |
| 40 if ([[_captureVideoDeviceInput device] isOpen]) | |
| 41 [[_captureVideoDeviceInput device] close]; | |
| 42 | |
| 43 [_captureVideoDeviceInput release]; | |
| 44 } | |
| 45 | |
| 46 [_captureDecompressedVideoOutput release]; | |
| 47 [_captureSession release]; | |
| 48 [_captureDevices release]; | |
| 49 [_lock release]; | |
| 50 | |
| 51 [super dealloc]; | |
| 52 } | |
| 53 | |
| 54 #pragma mark Public methods | |
| 55 | |
| 56 - (void)registerOwner:(VideoCaptureMacQTKit*)owner { | |
| 57 [_lock lock]; | |
| 58 _owner = owner; | |
| 59 [_lock unlock]; | |
| 60 } | |
| 61 | |
| 62 - (BOOL)setCaptureDeviceById:(char*)uniqueId { | |
| 63 if (uniqueId == nil || !strcmp("", uniqueId)) { | |
| 64 WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0, | |
| 65 "Incorrect capture id argument"); | |
| 66 return NO; | |
| 67 } | |
| 68 | |
| 69 if (!strcmp(uniqueId, _captureDeviceNameUniqueID)) | |
| 70 return YES; | |
| 71 | |
| 72 QTCaptureDevice* captureDevice; | |
| 73 for(int index = 0; index < _captureDeviceCount; index++) { | |
| 74 captureDevice = (QTCaptureDevice*)[_captureDevices objectAtIndex:index]; | |
| 75 char captureDeviceId[1024] = ""; | |
| 76 [[captureDevice uniqueID] getCString:captureDeviceId | |
| 77 maxLength:1024 | |
| 78 encoding:NSUTF8StringEncoding]; | |
| 79 if (strcmp(uniqueId, captureDeviceId) == 0) { | |
| 80 WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0, | |
| 81 "%s:%d Found capture device id %s as index %d", | |
| 82 __FUNCTION__, __LINE__, captureDeviceId, index); | |
| 83 [[captureDevice localizedDisplayName] getCString:_captureDeviceNameUTF8 | |
| 84 maxLength:1024 | |
| 85 encoding:NSUTF8StringEncoding]; | |
| 86 [[captureDevice uniqueID] getCString:_captureDeviceNameUniqueID | |
| 87 maxLength:1024 | |
| 88 encoding:NSUTF8StringEncoding]; | |
| 89 break; | |
| 90 } | |
| 91 captureDevice = nil; | |
| 92 } | |
| 93 | |
| 94 if (!captureDevice) | |
| 95 return NO; | |
| 96 | |
| 97 NSError* error; | |
| 98 if (![captureDevice open:&error]) { | |
| 99 WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0, | |
| 100 "Failed to open capture device: %s", _captureDeviceNameUTF8); | |
| 101 return NO; | |
| 102 } | |
| 103 | |
| 104 if (_captureVideoDeviceInput) { | |
| 105 [_captureVideoDeviceInput release]; | |
| 106 } | |
| 107 _captureVideoDeviceInput = | |
| 108 [[QTCaptureDeviceInput alloc] initWithDevice:captureDevice]; | |
| 109 | |
| 110 if (![_captureSession addInput:_captureVideoDeviceInput error:&error]) { | |
| 111 WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0, | |
| 112 "Failed to add input from %s to the capture session", | |
| 113 _captureDeviceNameUTF8); | |
| 114 return NO; | |
| 115 } | |
| 116 | |
| 117 WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0, | |
| 118 "%s:%d successfully added capture device: %s", __FUNCTION__, | |
| 119 __LINE__, _captureDeviceNameUTF8); | |
| 120 return YES; | |
| 121 } | |
| 122 | |
| 123 - (void)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate { | |
| 124 _frameWidth = width; | |
| 125 _frameHeight = height; | |
| 126 _frameRate = frameRate; | |
| 127 | |
| 128 NSDictionary* captureDictionary = | |
| 129 [NSDictionary dictionaryWithObjectsAndKeys: | |
| 130 [NSNumber numberWithDouble:_frameWidth], | |
| 131 (id)kCVPixelBufferWidthKey, | |
| 132 [NSNumber numberWithDouble:_frameHeight], | |
| 133 (id)kCVPixelBufferHeightKey, | |
| 134 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], | |
| 135 (id)kCVPixelBufferPixelFormatTypeKey, | |
| 136 nil]; | |
| 137 [_captureDecompressedVideoOutput | |
| 138 performSelectorOnMainThread:@selector(setPixelBufferAttributes:) | |
| 139 withObject:captureDictionary | |
| 140 waitUntilDone:YES]; | |
| 141 } | |
| 142 | |
| 143 - (void)startCapture { | |
| 144 if (_capturing) | |
| 145 return; | |
| 146 | |
| 147 [_captureSession startRunning]; | |
| 148 _capturing = YES; | |
| 149 } | |
| 150 | |
| 151 - (void)stopCapture { | |
| 152 if (!_capturing) | |
| 153 return; | |
| 154 | |
| 155 [_captureSession stopRunning]; | |
| 156 _capturing = NO; | |
| 157 } | |
| 158 | |
| 159 #pragma mark Private methods | |
| 160 | |
| 161 - (BOOL)initializeVariables { | |
| 162 if (NSClassFromString(@"QTCaptureSession") == nil) | |
| 163 return NO; | |
| 164 | |
| 165 memset(_captureDeviceNameUTF8, 0, 1024); | |
| 166 _framesDelivered = 0; | |
| 167 _framesRendered = 0; | |
| 168 _captureDeviceCount = 0; | |
| 169 _capturing = NO; | |
| 170 _captureInitialized = NO; | |
| 171 _frameRate = DEFAULT_FRAME_RATE; | |
| 172 _frameWidth = DEFAULT_FRAME_WIDTH; | |
| 173 _frameHeight = DEFAULT_FRAME_HEIGHT; | |
| 174 _lock = [[NSLock alloc] init]; | |
| 175 _captureSession = [[QTCaptureSession alloc] init]; | |
| 176 _captureDecompressedVideoOutput = | |
| 177 [[QTCaptureDecompressedVideoOutput alloc] init]; | |
| 178 [_captureDecompressedVideoOutput setDelegate:self]; | |
| 179 | |
| 180 [self getCaptureDevices]; | |
| 181 if (![self initializeVideoCapture]) | |
| 182 return NO; | |
| 183 | |
| 184 return NO; | |
| 185 } | |
| 186 | |
| 187 - (void)getCaptureDevices { | |
| 188 if (_captureDevices) | |
| 189 [_captureDevices release]; | |
| 190 | |
| 191 _captureDevices = [[NSArray alloc] initWithArray: | |
| 192 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]]; | |
| 193 | |
| 194 _captureDeviceCount = _captureDevices.count; | |
| 195 } | |
| 196 | |
| 197 - (BOOL)initializeVideoCapture{ | |
| 198 NSDictionary *captureDictionary = | |
| 199 [NSDictionary dictionaryWithObjectsAndKeys: | |
| 200 [NSNumber numberWithDouble:_frameWidth], | |
| 201 (id)kCVPixelBufferWidthKey, | |
| 202 [NSNumber numberWithDouble:_frameHeight], | |
| 203 (id)kCVPixelBufferHeightKey, | |
| 204 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], | |
| 205 (id)kCVPixelBufferPixelFormatTypeKey, | |
| 206 nil]; | |
| 207 | |
| 208 [_captureDecompressedVideoOutput setPixelBufferAttributes:captureDictionary]; | |
| 209 [_captureDecompressedVideoOutput setAutomaticallyDropsLateVideoFrames:YES]; | |
| 210 [_captureDecompressedVideoOutput | |
| 211 setMinimumVideoFrameInterval:(NSTimeInterval)1/(float)_frameRate]; | |
| 212 | |
| 213 NSError *error; | |
| 214 if (![_captureSession addOutput:_captureDecompressedVideoOutput error:&error]) | |
| 215 return NO; | |
| 216 | |
| 217 return YES; | |
| 218 } | |
| 219 | |
| 220 - (void)captureOutput:(QTCaptureOutput *)captureOutput | |
| 221 didDropVideoFrameWithSampleBuffer:(QTSampleBuffer *)sampleBuffer | |
| 222 fromConnection:(QTCaptureConnection *)connection { | |
| 223 } | |
| 224 | |
| 225 - (void)captureOutput:(QTCaptureOutput *)captureOutput | |
| 226 didOutputVideoFrame:(CVImageBufferRef)videoFrame | |
| 227 withSampleBuffer:(QTSampleBuffer *)sampleBuffer | |
| 228 fromConnection:(QTCaptureConnection *)connection { | |
| 229 | |
| 230 [_lock lock]; | |
| 231 if (!_owner) { | |
| 232 [_lock unlock]; | |
| 233 return; | |
| 234 } | |
| 235 | |
| 236 const int kFlags = 0; | |
| 237 if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) == kCVReturnSuccess) { | |
| 238 void *baseAddress = CVPixelBufferGetBaseAddress(videoFrame); | |
| 239 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame); | |
| 240 size_t frameHeight = CVPixelBufferGetHeight(videoFrame); | |
| 241 size_t frameSize = bytesPerRow * frameHeight; | |
| 242 | |
| 243 VideoCaptureCapability tempCaptureCapability; | |
| 244 tempCaptureCapability.width = _frameWidth; | |
| 245 tempCaptureCapability.height = _frameHeight; | |
| 246 tempCaptureCapability.maxFPS = _frameRate; | |
| 247 // TODO(wu) : Update actual type and not hard-coded value. | |
| 248 tempCaptureCapability.rawType = kVideoBGRA; | |
| 249 | |
| 250 _owner->IncomingFrame((unsigned char*)baseAddress, frameSize, | |
| 251 tempCaptureCapability, 0); | |
| 252 CVPixelBufferUnlockBaseAddress(videoFrame, kFlags); | |
| 253 } | |
| 254 [_lock unlock]; | |
| 255 _framesDelivered++; | |
| 256 _framesRendered++; | |
| 257 } | |
| 258 | |
| 259 @end | |
| OLD | NEW |