Index: webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m |
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m |
index a2290c2458405a1d39458f64eae820abfdc14bc9..0b58297a0fbbba9751eae8c5ca5a3377c325e1fd 100644 |
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m |
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m |
@@ -19,6 +19,7 @@ |
#endif |
#import "RTCDispatcher+Private.h" |
+#import "RTCImageHelper.h" |
const int64_t kNanosecondsPerSecond = 1000000000; |
@@ -35,11 +36,13 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
AVCaptureVideoDataOutput *_videoDataOutput; |
AVCaptureSession *_captureSession; |
AVCaptureDevice *_currentDevice; |
- RTCVideoRotation _rotation; |
BOOL _hasRetriedOnFatalError; |
BOOL _isRunning; |
// Will the session be running once all asynchronous operations have been completed? |
BOOL _willBeRunning; |
+#if TARGET_OS_IPHONE |
+ UIDeviceOrientation _orientation; |
+#endif |
} |
@synthesize frameQueue = _frameQueue; |
@@ -56,6 +59,7 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
} |
NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
#if TARGET_OS_IPHONE |
+ _orientation = UIDeviceOrientationPortrait; |
[center addObserver:self |
selector:@selector(deviceOrientationDidChange:) |
name:UIDeviceOrientationDidChangeNotification |
@@ -117,7 +121,7 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
- (void)startCaptureWithDevice:(AVCaptureDevice *)device |
format:(AVCaptureDeviceFormat *)format |
fps:(NSInteger)fps { |
- _willBeRunning = true; |
+ _willBeRunning = YES; |
[RTCDispatcher |
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
block:^{ |
@@ -141,12 +145,12 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
[_captureSession startRunning]; |
[self updateDeviceCaptureFormat:format fps:fps]; |
[_currentDevice unlockForConfiguration]; |
- _isRunning = true; |
+ _isRunning = YES; |
}]; |
} |
- (void)stopCapture { |
- _willBeRunning = false; |
+ _willBeRunning = NO; |
[RTCDispatcher |
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
block:^{ |
@@ -160,7 +164,7 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
#if TARGET_OS_IPHONE |
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
#endif |
- _isRunning = false; |
+ _isRunning = NO; |
}]; |
} |
@@ -192,11 +196,48 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
return; |
} |
+#if TARGET_OS_IPHONE |
+ // Default to portrait orientation on iPhone. |
+ RTCVideoRotation rotation = RTCVideoRotation_90; |
+ // Check here, which camera this frame is from, to avoid any race conditions. |
+ AVCaptureDeviceInput *deviceInput = |
+ (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input; |
+ BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePositionFront; |
+ // Check the image's EXIF for the actual camera the image came as the image could have been |
+ // delayed as we set alwaysDiscardsLateVideoFrames to NO. |
+ AVCaptureDevicePosition cameraPosition = [RTCImageHelper cameraFromSampleBuffer:sampleBuffer]; |
tkchin_webrtc
2017/07/24 21:30:37
nit: name the method more appropriately for the re
jtt_webrtc
2017/07/24 22:29:51
Done.
|
+ if (cameraPosition != AVCaptureDevicePositionUnspecified) { |
+ usingFrontCamera = cameraPosition == AVCaptureDevicePositionFront; |
+ } |
+ switch (_orientation) { |
tkchin_webrtc
2017/07/24 21:30:37
orientation is set from the capture session queue,
jtt_webrtc
2017/07/24 22:29:51
Acknowledged.
|
+ case UIDeviceOrientationPortrait: |
+ rotation = RTCVideoRotation_90; |
+ break; |
+ case UIDeviceOrientationPortraitUpsideDown: |
+ rotation = RTCVideoRotation_270; |
+ break; |
+ case UIDeviceOrientationLandscapeLeft: |
+ rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; |
+ break; |
+ case UIDeviceOrientationLandscapeRight: |
+ rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; |
+ break; |
+ case UIDeviceOrientationFaceUp: |
+ case UIDeviceOrientationFaceDown: |
+ case UIDeviceOrientationUnknown: |
+ // Ignore. |
+ break; |
+ } |
+#else |
+ // No rotation on Mac. |
+ RTCVideoRotation rotation = RTCVideoRotation_0; |
+#endif |
+ |
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; |
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * |
- kNanosecondsPerSecond; |
+ kNanosecondsPerSecond; |
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer |
- rotation:_rotation |
+ rotation:rotation |
timeStampNs:timeStampNs]; |
[self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
} |
@@ -399,26 +440,7 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], |
@"updateOrientation must be called on the capture queue."); |
#if TARGET_OS_IPHONE |
- BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFront; |
- switch ([UIDevice currentDevice].orientation) { |
- case UIDeviceOrientationPortrait: |
- _rotation = RTCVideoRotation_90; |
- break; |
- case UIDeviceOrientationPortraitUpsideDown: |
- _rotation = RTCVideoRotation_270; |
- break; |
- case UIDeviceOrientationLandscapeLeft: |
- _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; |
- break; |
- case UIDeviceOrientationLandscapeRight: |
- _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; |
- break; |
- case UIDeviceOrientationFaceUp: |
- case UIDeviceOrientationFaceDown: |
- case UIDeviceOrientationUnknown: |
- // Ignore. |
- break; |
- } |
+ _orientation = [UIDevice currentDevice].orientation; |
#endif |
} |