| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #import <Foundation/Foundation.h> | 11 #import <Foundation/Foundation.h> |
| 12 | 12 |
| 13 #import "WebRTC/RTCCameraVideoCapturer.h" | 13 #import "WebRTC/RTCCameraVideoCapturer.h" |
| 14 #import "WebRTC/RTCLogging.h" | 14 #import "WebRTC/RTCLogging.h" |
| 15 #import "WebRTC/RTCVideoFrameBuffer.h" | 15 #import "WebRTC/RTCVideoFrameBuffer.h" |
| 16 | 16 |
| 17 #if TARGET_OS_IPHONE | 17 #if TARGET_OS_IPHONE |
| 18 #import "WebRTC/UIDevice+RTCDevice.h" | 18 #import "WebRTC/UIDevice+RTCDevice.h" |
| 19 #endif | 19 #endif |
| 20 | 20 |
| 21 #import "AVCaptureSession+Device.h" | 21 #import "AVCaptureSession+DevicePosition.h" |
| 22 #import "RTCDispatcher+Private.h" | 22 #import "RTCDispatcher+Private.h" |
| 23 | 23 |
| 24 const int64_t kNanosecondsPerSecond = 1000000000; | 24 const int64_t kNanosecondsPerSecond = 1000000000; |
| 25 | 25 |
| 26 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { | 26 static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { |
| 27 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || | 27 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || |
| 28 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); | 28 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); |
| 29 } | 29 } |
| 30 | 30 |
| 31 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> | 31 @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegat
e> |
| (...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 191 } | 191 } |
| 192 | 192 |
| 193 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | 193 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
| 194 if (pixelBuffer == nil) { | 194 if (pixelBuffer == nil) { |
| 195 return; | 195 return; |
| 196 } | 196 } |
| 197 | 197 |
| 198 #if TARGET_OS_IPHONE | 198 #if TARGET_OS_IPHONE |
| 199 // Default to portrait orientation on iPhone. | 199 // Default to portrait orientation on iPhone. |
| 200 RTCVideoRotation rotation = RTCVideoRotation_90; | 200 RTCVideoRotation rotation = RTCVideoRotation_90; |
| 201 // Check here, which camera this frame is from, to avoid any race conditions. | 201 BOOL usingFrontCamera = NO; |
| 202 AVCaptureDeviceInput *deviceInput = | 202 // Check the image's EXIF for the camera the image came from as the image coul
d have been |
| 203 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.first
Object).input; | |
| 204 BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePosition
Front; | |
| 205 // Check the image's EXIF for the actual camera the image came as the image co
uld have been | |
| 206 // delayed as we set alwaysDiscardsLateVideoFrames to NO. | 203 // delayed as we set alwaysDiscardsLateVideoFrames to NO. |
| 207 AVCaptureDevicePosition cameraPosition = | 204 AVCaptureDevicePosition cameraPosition = |
| 208 [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; | 205 [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; |
| 209 if (cameraPosition != AVCaptureDevicePositionUnspecified) { | 206 if (cameraPosition != AVCaptureDevicePositionUnspecified) { |
| 210 usingFrontCamera = cameraPosition == AVCaptureDevicePositionFront; | 207 usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition; |
| 208 } else { |
| 209 AVCaptureDeviceInput *deviceInput = |
| 210 (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.fir
stObject).input; |
| 211 usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.positi
on; |
| 211 } | 212 } |
| 212 switch (_orientation) { | 213 switch (_orientation) { |
| 213 case UIDeviceOrientationPortrait: | 214 case UIDeviceOrientationPortrait: |
| 214 rotation = RTCVideoRotation_90; | 215 rotation = RTCVideoRotation_90; |
| 215 break; | 216 break; |
| 216 case UIDeviceOrientationPortraitUpsideDown: | 217 case UIDeviceOrientationPortraitUpsideDown: |
| 217 rotation = RTCVideoRotation_270; | 218 rotation = RTCVideoRotation_270; |
| 218 break; | 219 break; |
| 219 case UIDeviceOrientationLandscapeLeft: | 220 case UIDeviceOrientationLandscapeLeft: |
| 220 rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; | 221 rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; |
| (...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 438 | 439 |
| 439 - (void)updateOrientation { | 440 - (void)updateOrientation { |
| 440 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], | 441 NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], |
| 441 @"updateOrientation must be called on the capture queue."); | 442 @"updateOrientation must be called on the capture queue."); |
| 442 #if TARGET_OS_IPHONE | 443 #if TARGET_OS_IPHONE |
| 443 _orientation = [UIDevice currentDevice].orientation; | 444 _orientation = [UIDevice currentDevice].orientation; |
| 444 #endif | 445 #endif |
| 445 } | 446 } |
| 446 | 447 |
| 447 @end | 448 @end |
| OLD | NEW |