| OLD | NEW |
| (Empty) | |
| 1 /* |
| 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #import <OCMock/OCMock.h> |
| 12 |
| 13 #if TARGET_OS_IPHONE |
| 14 #import <UIKit/UIKit.h> |
| 15 #endif |
| 16 |
| 17 #include "webrtc/base/gunit.h" |
| 18 |
| 19 #import <WebRTC/RTCCameraVideoCapturer.h> |
| 20 #import <WebRTC/RTCDispatcher.h> |
| 21 #import <WebRTC/RTCVideoFrame.h> |
| 22 |
| 23 #if TARGET_OS_IPHONE |
| 24 // Helper method. |
| 25 CMSampleBufferRef createTestSampleBufferRef() { |
| 26 |
| 27 // This image is already in the testing bundle. |
| 28 UIImage *image = [UIImage imageNamed:@"Default.png"]; |
| 29 CGSize size = image.size; |
| 30 CGImageRef imageRef = [image CGImage]; |
| 31 |
| 32 CVPixelBufferRef pixelBuffer = nullptr; |
| 33 CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelForm
atType_32ARGB, nil, |
| 34 &pixelBuffer); |
| 35 |
| 36 CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); |
| 37 // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of
8 for both. |
| 38 CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8,
8 * size.width, |
| 39 rgbColorSpace, kCGImageAlphaPremu
ltipliedFirst); |
| 40 |
| 41 CGContextDrawImage( |
| 42 context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imag
eRef)), imageRef); |
| 43 |
| 44 CGColorSpaceRelease(rgbColorSpace); |
| 45 CGContextRelease(context); |
| 46 |
| 47 // We don't really care about the timing. |
| 48 CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid}; |
| 49 CMVideoFormatDescriptionRef description = nullptr; |
| 50 CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description); |
| 51 |
| 52 CMSampleBufferRef sampleBuffer = nullptr; |
| 53 CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL
, NULL, description, |
| 54 &timing, &sampleBuffer); |
| 55 CFRelease(pixelBuffer); |
| 56 |
| 57 return sampleBuffer; |
| 58 |
| 59 } |
| 60 #endif |
| 61 @interface RTCCameraVideoCapturer (Tests)<AVCaptureVideoDataOutputSampleBufferDe
legate> |
| 62 @end |
| 63 |
| 64 @interface RTCCameraVideoCapturerTests : NSObject |
| 65 @property(nonatomic, strong) id delegateMock; |
| 66 @property(nonatomic, strong) id deviceMock; |
| 67 @property(nonatomic, strong) RTCCameraVideoCapturer *capturer; |
| 68 @end |
| 69 |
| 70 @implementation RTCCameraVideoCapturerTests |
| 71 @synthesize delegateMock = _delegateMock; |
| 72 @synthesize capturer = _capturer; |
| 73 @synthesize deviceMock = _deviceMock; |
| 74 |
| 75 - (void)setup { |
| 76 self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate)); |
| 77 self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegate
Mock]; |
| 78 self.deviceMock = [self createDeviceMock]; |
| 79 } |
| 80 |
| 81 - (void)tearDown { |
| 82 [self.delegateMock stopMocking]; |
| 83 [self.deviceMock stopMocking]; |
| 84 self.delegateMock = nil; |
| 85 self.deviceMock = nil; |
| 86 self.capturer = nil; |
| 87 } |
| 88 |
| 89 #pragma mark - utils |
| 90 |
| 91 - (id)createDeviceMock { |
| 92 return OCMClassMock([AVCaptureDevice class]); |
| 93 } |
| 94 |
| 95 #pragma mark - test cases |
| 96 |
| 97 - (void)testSetupSession { |
| 98 AVCaptureSession *session = self.capturer.captureSession; |
| 99 EXPECT_TRUE(session != nil); |
| 100 |
| 101 #if TARGET_OS_IPHONE |
| 102 EXPECT_EQ(session.sessionPreset, AVCaptureSessionPresetInputPriority); |
| 103 EXPECT_EQ(session.usesApplicationAudioSession, NO); |
| 104 #endif |
| 105 EXPECT_EQ(session.outputs.count, 1u); |
| 106 } |
| 107 |
| 108 - (void)testSetupSessionOutput { |
| 109 AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0
]; |
| 110 EXPECT_EQ(videoOutput.alwaysDiscardsLateVideoFrames, NO); |
| 111 EXPECT_EQ(videoOutput.sampleBufferDelegate, self.capturer); |
| 112 } |
| 113 |
| 114 - (void)testSupportedFormatsForDevice { |
| 115 // given |
| 116 id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]); |
| 117 CMVideoFormatDescriptionRef format; |
| 118 |
| 119 // We don't care about width and heigth so arbitrary 123 and 456 values. |
| 120 int width = 123; |
| 121 int height = 456; |
| 122 CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRan
ge, width, height, |
| 123 nil, &format); |
| 124 OCMStub([validFormat1 formatDescription]).andReturn(format); |
| 125 |
| 126 id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]); |
| 127 CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideo
Range, width, |
| 128 height, nil, &format); |
| 129 OCMStub([validFormat2 formatDescription]).andReturn(format); |
| 130 |
| 131 id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]); |
| 132 CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width,
height, nil, |
| 133 &format); |
| 134 OCMStub([invalidFormat formatDescription]).andReturn(format); |
| 135 |
| 136 NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ]; |
| 137 OCMStub([self.deviceMock formats]).andReturn(formats); |
| 138 |
| 139 // when |
| 140 NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:
self.deviceMock]; |
| 141 |
| 142 // then |
| 143 EXPECT_EQ(supportedFormats.count, 2u); |
| 144 EXPECT_TRUE([supportedFormats containsObject:validFormat1]); |
| 145 EXPECT_TRUE([supportedFormats containsObject:validFormat2]); |
| 146 // cleanup |
| 147 [validFormat1 stopMocking]; |
| 148 [validFormat2 stopMocking]; |
| 149 [invalidFormat stopMocking]; |
| 150 validFormat1 = nil; |
| 151 validFormat2 = nil; |
| 152 invalidFormat = nil; |
| 153 } |
| 154 |
| 155 - (void)testCaptureDevices { |
| 156 OCMStub([self.deviceMock devicesWithMediaType:AVMediaTypeVideo]).andReturn(@[
[NSObject new] ]); |
| 157 OCMStub([self.deviceMock devicesWithMediaType:AVMediaTypeAudio]).andReturn(@[
[NSObject new] ]); |
| 158 |
| 159 NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; |
| 160 |
| 161 EXPECT_EQ(captureDevices.count, 1u); |
| 162 } |
| 163 |
| 164 - (void)testDelegateCallbackNotCalledWhenInvalidBuffer { |
| 165 // given |
| 166 CMSampleBufferRef sampleBuffer = nullptr; |
| 167 [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg
any]]; |
| 168 |
| 169 // when |
| 170 [self.capturer captureOutput:self.capturer.captureSession.outputs[0] |
| 171 didOutputSampleBuffer:sampleBuffer |
| 172 fromConnection:nil]; |
| 173 |
| 174 // then |
| 175 [self.delegateMock verify]; |
| 176 } |
| 177 |
| 178 |
| 179 - (void)testDelegateCallbackWithValidBufferAndOrientationUpdate { |
| 180 #if TARGET_OS_IPHONE |
| 181 // given |
| 182 UIDevice *currentDeviceMock = OCMClassMock([UIDevice class]); |
| 183 // UpsideDown -> RTCVideoRotation_270. |
| 184 OCMStub(currentDeviceMock.orientation).andReturn(UIDeviceOrientationPortraitUp
sideDown); |
| 185 id classMock = OCMClassMock([UIDevice class]); |
| 186 OCMStub([classMock currentDevice]).andReturn(currentDeviceMock); |
| 187 |
| 188 CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); |
| 189 |
| 190 // then |
| 191 [[self.delegateMock expect] capturer:self.capturer |
| 192 didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFram
e *expectedFrame) { |
| 193 EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270); |
| 194 return YES; |
| 195 }]]; |
| 196 |
| 197 // when |
| 198 NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
| 199 [center postNotificationName:UIDeviceOrientationDidChangeNotification object:n
il]; |
| 200 |
| 201 // We need to wait for the dispatch to finish. |
| 202 WAIT(0, 1000); |
| 203 |
| 204 [self.capturer captureOutput:self.capturer.captureSession.outputs[0] |
| 205 didOutputSampleBuffer:sampleBuffer |
| 206 fromConnection:nil]; |
| 207 |
| 208 [self.delegateMock verify]; |
| 209 |
| 210 [(id)currentDeviceMock stopMocking]; |
| 211 currentDeviceMock = nil; |
| 212 [classMock stopMocking]; |
| 213 classMock = nil; |
| 214 CFRelease(sampleBuffer); |
| 215 #endif |
| 216 } |
| 217 |
| 218 @end |
| 219 |
| 220 TEST(RTCCameraVideoCapturerTests, SetupSession) { |
| 221 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 222 [test setup]; |
| 223 [test testSetupSession]; |
| 224 [test tearDown]; |
| 225 } |
| 226 |
| 227 TEST(RTCCameraVideoCapturerTests, SetupSessionOutput) { |
| 228 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 229 [test setup]; |
| 230 [test testSetupSessionOutput]; |
| 231 [test tearDown]; |
| 232 } |
| 233 |
| 234 TEST(RTCCameraVideoCapturerTests, SupportedFormatsForDevice) { |
| 235 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 236 [test setup]; |
| 237 [test testSupportedFormatsForDevice]; |
| 238 [test tearDown]; |
| 239 } |
| 240 |
| 241 TEST(RTCCameraVideoCapturerTests, CaptureDevices) { |
| 242 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 243 [test setup]; |
| 244 [test testCaptureDevices]; |
| 245 [test tearDown]; |
| 246 } |
| 247 |
| 248 TEST(RTCCameraVideoCapturerTests, DelegateCallbackNotCalledWhenInvalidBuffer) { |
| 249 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 250 [test setup]; |
| 251 [test testDelegateCallbackNotCalledWhenInvalidBuffer]; |
| 252 [test tearDown]; |
| 253 } |
| 254 |
| 255 TEST(RTCCameraVideoCapturerTests, DelegateCallbackWithValidBufferAndOrientationU
pdate) { |
| 256 RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init]
; |
| 257 [test setup]; |
| 258 [test testDelegateCallbackWithValidBufferAndOrientationUpdate]; |
| 259 [test tearDown]; |
| 260 } |
| OLD | NEW |