| OLD | NEW |
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 */ | 26 */ |
| 27 | 27 |
| 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" | 28 #include "talk/app/webrtc/objc/avfoundationvideocapturer.h" |
| 29 | 29 |
| 30 #include "webrtc/base/bind.h" | 30 #include "webrtc/base/bind.h" |
| 31 | 31 |
| 32 #import <AVFoundation/AVFoundation.h> | 32 #import <AVFoundation/AVFoundation.h> |
| 33 #import <Foundation/Foundation.h> | 33 #import <Foundation/Foundation.h> |
| 34 #import <UIKit/UIKit.h> | 34 #import <UIKit/UIKit.h> |
| 35 | 35 |
| 36 #import "webrtc/base/objc/RTCDispatcher.h" |
| 37 |
| 36 // TODO(tkchin): support other formats. | 38 // TODO(tkchin): support other formats. |
| 37 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; | 39 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480; |
| 38 static cricket::VideoFormat const kDefaultFormat = | 40 static cricket::VideoFormat const kDefaultFormat = |
| 39 cricket::VideoFormat(640, | 41 cricket::VideoFormat(640, |
| 40 480, | 42 480, |
| 41 cricket::VideoFormat::FpsToInterval(30), | 43 cricket::VideoFormat::FpsToInterval(30), |
| 42 cricket::FOURCC_NV12); | 44 cricket::FOURCC_NV12); |
| 43 | 45 |
| 44 // This queue is used to start and stop the capturer without blocking the | |
| 45 // calling thread. -[AVCaptureSession startRunning] blocks until the camera is | |
| 46 // running. | |
| 47 static dispatch_queue_t kBackgroundQueue = nil; | |
| 48 | |
| 49 // This class used to capture frames using AVFoundation APIs on iOS. It is meant | 46 // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
| 50 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this | 47 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
| 51 // because other webrtc objects own cricket::VideoCapturer, which is not | 48 // because other webrtc objects own cricket::VideoCapturer, which is not |
| 52 // ref counted. To prevent bad behavior we do not expose this class directly. | 49 // ref counted. To prevent bad behavior we do not expose this class directly. |
| 53 @interface RTCAVFoundationVideoCapturerInternal : NSObject | 50 @interface RTCAVFoundationVideoCapturerInternal : NSObject |
| 54 <AVCaptureVideoDataOutputSampleBufferDelegate> | 51 <AVCaptureVideoDataOutputSampleBufferDelegate> |
| 55 | 52 |
| 56 @property(nonatomic, readonly) AVCaptureSession* captureSession; | 53 @property(nonatomic, readonly) AVCaptureSession* captureSession; |
| 57 @property(nonatomic, readonly) BOOL isRunning; | 54 @property(nonatomic, readonly) BOOL isRunning; |
| 58 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. | 55 @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 73 AVCaptureVideoDataOutput* _videoOutput; | 70 AVCaptureVideoDataOutput* _videoOutput; |
| 74 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 71 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
| 75 webrtc::AVFoundationVideoCapturer* _capturer; | 72 webrtc::AVFoundationVideoCapturer* _capturer; |
| 76 BOOL _orientationHasChanged; | 73 BOOL _orientationHasChanged; |
| 77 } | 74 } |
| 78 | 75 |
| 79 @synthesize captureSession = _captureSession; | 76 @synthesize captureSession = _captureSession; |
| 80 @synthesize useBackCamera = _useBackCamera; | 77 @synthesize useBackCamera = _useBackCamera; |
| 81 @synthesize isRunning = _isRunning; | 78 @synthesize isRunning = _isRunning; |
| 82 | 79 |
| 83 + (void)initialize { | |
| 84 static dispatch_once_t onceToken; | |
| 85 dispatch_once(&onceToken, ^{ | |
| 86 kBackgroundQueue = dispatch_queue_create( | |
| 87 "com.google.webrtc.RTCAVFoundationCapturerBackground", | |
| 88 DISPATCH_QUEUE_SERIAL); | |
| 89 }); | |
| 90 } | |
| 91 | |
| 92 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { | 80 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer { |
| 93 NSParameterAssert(capturer); | 81 NSParameterAssert(capturer); |
| 94 if (self = [super init]) { | 82 if (self = [super init]) { |
| 95 _capturer = capturer; | 83 _capturer = capturer; |
| 96 if (![self setupCaptureSession]) { | 84 if (![self setupCaptureSession]) { |
| 97 return nil; | 85 return nil; |
| 98 } | 86 } |
| 99 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; | 87 NSNotificationCenter* center = [NSNotificationCenter defaultCenter]; |
| 100 [center addObserver:self | 88 [center addObserver:self |
| 101 selector:@selector(deviceOrientationDidChange:) | 89 selector:@selector(deviceOrientationDidChange:) |
| (...skipping 23 matching lines...) Expand all Loading... |
| 125 [self updateSessionInput]; | 113 [self updateSessionInput]; |
| 126 } | 114 } |
| 127 | 115 |
| 128 - (void)startCaptureAsync { | 116 - (void)startCaptureAsync { |
| 129 if (_isRunning) { | 117 if (_isRunning) { |
| 130 return; | 118 return; |
| 131 } | 119 } |
| 132 _orientationHasChanged = NO; | 120 _orientationHasChanged = NO; |
| 133 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 121 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
| 134 AVCaptureSession* session = _captureSession; | 122 AVCaptureSession* session = _captureSession; |
| 135 dispatch_async(kBackgroundQueue, ^{ | 123 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 124 block:^{ |
| 136 [session startRunning]; | 125 [session startRunning]; |
| 137 }); | 126 }]; |
| 138 _isRunning = YES; | 127 _isRunning = YES; |
| 139 } | 128 } |
| 140 | 129 |
| 141 - (void)stopCaptureAsync { | 130 - (void)stopCaptureAsync { |
| 142 if (!_isRunning) { | 131 if (!_isRunning) { |
| 143 return; | 132 return; |
| 144 } | 133 } |
| 145 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; | 134 [_videoOutput setSampleBufferDelegate:nil queue:nullptr]; |
| 146 AVCaptureSession* session = _captureSession; | 135 AVCaptureSession* session = _captureSession; |
| 147 dispatch_async(kBackgroundQueue, ^{ | 136 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 137 block:^{ |
| 148 [session stopRunning]; | 138 [session stopRunning]; |
| 149 }); | 139 }]; |
| 150 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; | 140 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
| 151 _isRunning = NO; | 141 _isRunning = NO; |
| 152 } | 142 } |
| 153 | 143 |
| 154 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 144 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
| 155 | 145 |
| 156 - (void)captureOutput:(AVCaptureOutput*)captureOutput | 146 - (void)captureOutput:(AVCaptureOutput*)captureOutput |
| 157 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 147 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 158 fromConnection:(AVCaptureConnection*)connection { | 148 fromConnection:(AVCaptureConnection*)connection { |
| 159 NSParameterAssert(captureOutput == _videoOutput); | 149 NSParameterAssert(captureOutput == _videoOutput); |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 438 | 428 |
| 439 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( | 429 void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread( |
| 440 const cricket::CapturedFrame* frame) { | 430 const cricket::CapturedFrame* frame) { |
| 441 RTC_DCHECK(_startThread->IsCurrent()); | 431 RTC_DCHECK(_startThread->IsCurrent()); |
| 442 // This will call a superclass method that will perform the frame conversion | 432 // This will call a superclass method that will perform the frame conversion |
| 443 // to I420. | 433 // to I420. |
| 444 SignalFrameCaptured(this, frame); | 434 SignalFrameCaptured(this, frame); |
| 445 } | 435 } |
| 446 | 436 |
| 447 } // namespace webrtc | 437 } // namespace webrtc |
| OLD | NEW |