| Index: webrtc/api/objc/avfoundationvideocapturer.mm
|
| diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/webrtc/api/objc/avfoundationvideocapturer.mm
|
| similarity index 81%
|
| copy from talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| copy to webrtc/api/objc/avfoundationvideocapturer.mm
|
| index 0f9dc6825e9135461a6d8d8c5ad713c00c2c01b3..a3f0f44160b2e5c885da1bf6d66a2b9eb142b3d5 100644
|
| --- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| +++ b/webrtc/api/objc/avfoundationvideocapturer.mm
|
| @@ -1,31 +1,14 @@
|
| /*
|
| - * libjingle
|
| - * Copyright 2015 Google Inc.
|
| + * Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
| *
|
| - * Redistribution and use in source and binary forms, with or without
|
| - * modification, are permitted provided that the following conditions are met:
|
| - *
|
| - * 1. Redistributions of source code must retain the above copyright notice,
|
| - * this list of conditions and the following disclaimer.
|
| - * 2. Redistributions in binary form must reproduce the above copyright notice,
|
| - * this list of conditions and the following disclaimer in the documentation
|
| - * and/or other materials provided with the distribution.
|
| - * 3. The name of the author may not be used to endorse or promote products
|
| - * derived from this software without specific prior written permission.
|
| - *
|
| - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
| - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
| - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
| - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
| - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
| - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
| - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
| - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| + * Use of this source code is governed by a BSD-style license
|
| + * that can be found in the LICENSE file in the root of the source
|
| + * tree. An additional intellectual property rights grant can be found
|
| + * in the file PATENTS. All contributing project authors may
|
| + * be found in the AUTHORS file in the root of the source tree.
|
| */
|
|
|
| -#include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
|
| +#include "webrtc/api/objc/avfoundationvideocapturer.h"
|
|
|
| #include "webrtc/base/bind.h"
|
|
|
| @@ -50,14 +33,14 @@ static cricket::VideoFormat const kDefaultFormat =
|
| @interface RTCAVFoundationVideoCapturerInternal : NSObject
|
| <AVCaptureVideoDataOutputSampleBufferDelegate>
|
|
|
| -@property(nonatomic, readonly) AVCaptureSession* captureSession;
|
| +@property(nonatomic, readonly) AVCaptureSession *captureSession;
|
| @property(nonatomic, readonly) BOOL isRunning;
|
| @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
|
|
|
| // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
|
| // when we receive frames. This is safe because this object should be owned by
|
| // it.
|
| -- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer;
|
| +- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
|
| - (void)startCaptureAsync;
|
| - (void)stopCaptureAsync;
|
|
|
| @@ -65,11 +48,11 @@ static cricket::VideoFormat const kDefaultFormat =
|
|
|
| @implementation RTCAVFoundationVideoCapturerInternal {
|
| // Keep pointers to inputs for convenience.
|
| - AVCaptureDeviceInput* _frontDeviceInput;
|
| - AVCaptureDeviceInput* _backDeviceInput;
|
| - AVCaptureVideoDataOutput* _videoOutput;
|
| + AVCaptureDeviceInput *_frontDeviceInput;
|
| + AVCaptureDeviceInput *_backDeviceInput;
|
| + AVCaptureVideoDataOutput *_videoOutput;
|
| // The cricket::VideoCapturer that owns this class. Should never be NULL.
|
| - webrtc::AVFoundationVideoCapturer* _capturer;
|
| + webrtc::AVFoundationVideoCapturer *_capturer;
|
| BOOL _orientationHasChanged;
|
| }
|
|
|
| @@ -77,14 +60,14 @@ static cricket::VideoFormat const kDefaultFormat =
|
| @synthesize useBackCamera = _useBackCamera;
|
| @synthesize isRunning = _isRunning;
|
|
|
| -- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
|
| +- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
|
| NSParameterAssert(capturer);
|
| if (self = [super init]) {
|
| _capturer = capturer;
|
| if (![self setupCaptureSession]) {
|
| return nil;
|
| }
|
| - NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
|
| + NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
| [center addObserver:self
|
| selector:@selector(deviceOrientationDidChange:)
|
| name:UIDeviceOrientationDidChangeNotification
|
| @@ -92,7 +75,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
| [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
|
| object:nil
|
| queue:nil
|
| - usingBlock:^(NSNotification* notification) {
|
| + usingBlock:^(NSNotification *notification) {
|
| NSLog(@"Capture session error: %@", notification.userInfo);
|
| }];
|
| }
|
| @@ -143,9 +126,9 @@ static cricket::VideoFormat const kDefaultFormat =
|
|
|
| #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
|
|
| -- (void)captureOutput:(AVCaptureOutput*)captureOutput
|
| +- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
| didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
| - fromConnection:(AVCaptureConnection*)connection {
|
| + fromConnection:(AVCaptureConnection *)connection {
|
| NSParameterAssert(captureOutput == _videoOutput);
|
| if (!_isRunning) {
|
| return;
|
| @@ -153,9 +136,9 @@ static cricket::VideoFormat const kDefaultFormat =
|
| _capturer->CaptureSampleBuffer(sampleBuffer);
|
| }
|
|
|
| -- (void)captureOutput:(AVCaptureOutput*)captureOutput
|
| +- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
| didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
| - fromConnection:(AVCaptureConnection*)connection {
|
| + fromConnection:(AVCaptureConnection *)connection {
|
| NSLog(@"Dropped sample buffer.");
|
| }
|
|
|
| @@ -164,7 +147,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
| - (BOOL)setupCaptureSession {
|
| _captureSession = [[AVCaptureSession alloc] init];
|
| #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
|
| - NSString* version = [[UIDevice currentDevice] systemVersion];
|
| + NSString *version = [[UIDevice currentDevice] systemVersion];
|
| if ([version integerValue] >= 7) {
|
| _captureSession.usesApplicationAudioSession = NO;
|
| }
|
| @@ -179,7 +162,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
| // currently supported on iPhone / iPad.
|
| _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
|
| _videoOutput.videoSettings = @{
|
| - (NSString*)kCVPixelBufferPixelFormatTypeKey :
|
| + (NSString *)kCVPixelBufferPixelFormatTypeKey :
|
| @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
| };
|
| _videoOutput.alwaysDiscardsLateVideoFrames = NO;
|
| @@ -192,9 +175,9 @@ static cricket::VideoFormat const kDefaultFormat =
|
| [_captureSession addOutput:_videoOutput];
|
|
|
| // Find the capture devices.
|
| - AVCaptureDevice* frontCaptureDevice = nil;
|
| - AVCaptureDevice* backCaptureDevice = nil;
|
| - for (AVCaptureDevice* captureDevice in
|
| + AVCaptureDevice *frontCaptureDevice = nil;
|
| + AVCaptureDevice *backCaptureDevice = nil;
|
| + for (AVCaptureDevice *captureDevice in
|
| [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
|
| if (captureDevice.position == AVCaptureDevicePositionBack) {
|
| backCaptureDevice = captureDevice;
|
| @@ -209,7 +192,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
| }
|
|
|
| // Set up the session inputs.
|
| - NSError* error = nil;
|
| + NSError *error = nil;
|
| _frontDeviceInput =
|
| [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
|
| error:&error];
|
| @@ -238,13 +221,13 @@ static cricket::VideoFormat const kDefaultFormat =
|
| return YES;
|
| }
|
|
|
| -- (void)deviceOrientationDidChange:(NSNotification*)notification {
|
| +- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
| _orientationHasChanged = YES;
|
| [self updateOrientation];
|
| }
|
|
|
| - (void)updateOrientation {
|
| - AVCaptureConnection* connection =
|
| + AVCaptureConnection *connection =
|
| [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
| if (!connection.supportsVideoOrientation) {
|
| // TODO(tkchin): set rotation bit on frames.
|
| @@ -278,8 +261,8 @@ static cricket::VideoFormat const kDefaultFormat =
|
| - (void)updateSessionInput {
|
| // Update the current session input to match what's stored in _useBackCamera.
|
| [_captureSession beginConfiguration];
|
| - AVCaptureDeviceInput* oldInput = _backDeviceInput;
|
| - AVCaptureDeviceInput* newInput = _frontDeviceInput;
|
| + AVCaptureDeviceInput *oldInput = _backDeviceInput;
|
| + AVCaptureDeviceInput *newInput = _frontDeviceInput;
|
| if (_useBackCamera) {
|
| oldInput = _frontDeviceInput;
|
| newInput = _backDeviceInput;
|
| @@ -383,8 +366,8 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
|
|
|
| static size_t const kYPlaneIndex = 0;
|
| static size_t const kUVPlaneIndex = 1;
|
| - uint8_t* yPlaneAddress =
|
| - (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
|
| + uint8_t *yPlaneAddress =
|
| + (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
|
| size_t yPlaneHeight =
|
| CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex);
|
| size_t yPlaneWidth =
|
| @@ -399,8 +382,8 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
|
| yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
|
|
|
| // Sanity check assumption that planar bytes are contiguous.
|
| - uint8_t* uvPlaneAddress =
|
| - (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
|
| + uint8_t *uvPlaneAddress =
|
| + (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
|
| RTC_DCHECK(
|
| uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
|
|
|
| @@ -427,7 +410,7 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
|
| }
|
|
|
| void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
|
| - const cricket::CapturedFrame* frame) {
|
| + const cricket::CapturedFrame *frame) {
|
| RTC_DCHECK(_startThread->IsCurrent());
|
| // This will call a superclass method that will perform the frame conversion
|
| // to I420.
|
|
|