| Index: talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| index e1b0f88fb6ee590b405af970fefbef4db921bd11..0f9dc6825e9135461a6d8d8c5ad713c00c2c01b3 100644
|
| --- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| +++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
|
| @@ -33,6 +33,8 @@
|
| #import <Foundation/Foundation.h>
|
| #import <UIKit/UIKit.h>
|
|
|
| +#import "webrtc/base/objc/RTCDispatcher.h"
|
| +
|
| // TODO(tkchin): support other formats.
|
| static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
|
| static cricket::VideoFormat const kDefaultFormat =
|
| @@ -41,11 +43,6 @@ static cricket::VideoFormat const kDefaultFormat =
|
| cricket::VideoFormat::FpsToInterval(30),
|
| cricket::FOURCC_NV12);
|
|
|
| -// This queue is used to start and stop the capturer without blocking the
|
| -// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
|
| -// running.
|
| -static dispatch_queue_t kBackgroundQueue = nil;
|
| -
|
| // This class used to capture frames using AVFoundation APIs on iOS. It is meant
|
| // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
|
| // because other webrtc objects own cricket::VideoCapturer, which is not
|
| @@ -80,15 +77,6 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
| @synthesize useBackCamera = _useBackCamera;
|
| @synthesize isRunning = _isRunning;
|
|
|
| -+ (void)initialize {
|
| - static dispatch_once_t onceToken;
|
| - dispatch_once(&onceToken, ^{
|
| - kBackgroundQueue = dispatch_queue_create(
|
| - "com.google.webrtc.RTCAVFoundationCapturerBackground",
|
| - DISPATCH_QUEUE_SERIAL);
|
| - });
|
| -}
|
| -
|
| - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
|
| NSParameterAssert(capturer);
|
| if (self = [super init]) {
|
| @@ -132,9 +120,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
| _orientationHasChanged = NO;
|
| [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
| AVCaptureSession* session = _captureSession;
|
| - dispatch_async(kBackgroundQueue, ^{
|
| + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
| + block:^{
|
| [session startRunning];
|
| - });
|
| + }];
|
| _isRunning = YES;
|
| }
|
|
|
| @@ -144,9 +133,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
| }
|
| [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
|
| AVCaptureSession* session = _captureSession;
|
| - dispatch_async(kBackgroundQueue, ^{
|
| + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
| + block:^{
|
| [session stopRunning];
|
| - });
|
| + }];
|
| [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
| _isRunning = NO;
|
| }
|
|
|