| OLD | NEW | 
|    1 /* |    1 /* | 
|    2  *  Copyright 2015 The WebRTC project authors. All Rights Reserved. |    2  *  Copyright 2015 The WebRTC project authors. All Rights Reserved. | 
|    3  * |    3  * | 
|    4  *  Use of this source code is governed by a BSD-style license |    4  *  Use of this source code is governed by a BSD-style license | 
|    5  *  that can be found in the LICENSE file in the root of the source |    5  *  that can be found in the LICENSE file in the root of the source | 
|    6  *  tree. An additional intellectual property rights grant can be found |    6  *  tree. An additional intellectual property rights grant can be found | 
|    7  *  in the file PATENTS.  All contributing project authors may |    7  *  in the file PATENTS.  All contributing project authors may | 
|    8  *  be found in the AUTHORS file in the root of the source tree. |    8  *  be found in the AUTHORS file in the root of the source tree. | 
|    9  */ |    9  */ | 
|   10  |   10  | 
|   11 #include "avfoundationvideocapturer.h" |   11 #include "avfoundationvideocapturer.h" | 
|   12  |   12  | 
|   13 #import <AVFoundation/AVFoundation.h> |   13 #import <AVFoundation/AVFoundation.h> | 
|   14 #import <Foundation/Foundation.h> |   14 #import <Foundation/Foundation.h> | 
|   15 #if TARGET_OS_IPHONE |   15 #if TARGET_OS_IPHONE | 
|   16 #import <UIKit/UIKit.h> |   16 #import <UIKit/UIKit.h> | 
|   17 #endif |   17 #endif | 
|   18  |   18  | 
|   19 #import "RTCDispatcher+Private.h" |   19 #import "RTCDispatcher+Private.h" | 
|   20 #import "WebRTC/RTCLogging.h" |   20 #import "WebRTC/RTCLogging.h" | 
|   21 #if TARGET_OS_IPHONE |   21 #if TARGET_OS_IPHONE | 
|   22 #import "WebRTC/UIDevice+RTCDevice.h" |   22 #import "WebRTC/UIDevice+RTCDevice.h" | 
|   23 #endif |   23 #endif | 
|   24  |   24  | 
 |   25 #include "libyuv/rotate.h" | 
 |   26  | 
|   25 #include "webrtc/base/bind.h" |   27 #include "webrtc/base/bind.h" | 
|   26 #include "webrtc/base/checks.h" |   28 #include "webrtc/base/checks.h" | 
|   27 #include "webrtc/base/thread.h" |   29 #include "webrtc/base/thread.h" | 
|   28 #include "webrtc/common_video/include/corevideo_frame_buffer.h" |   30 #include "webrtc/common_video/include/corevideo_frame_buffer.h" | 
 |   31 #include "webrtc/common_video/rotation.h" | 
|   29  |   32  | 
|   30 struct AVCaptureSessionPresetResolution { |   33 struct AVCaptureSessionPresetResolution { | 
|   31   NSString *sessionPreset; |   34   NSString *sessionPreset; | 
|   32   int width; |   35   int width; | 
|   33   int height; |   36   int height; | 
|   34 }; |   37 }; | 
|   35  |   38  | 
|   36 #if TARGET_OS_IPHONE |   39 #if TARGET_OS_IPHONE | 
|   37 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { |   40 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { | 
|   38   { AVCaptureSessionPreset352x288, 352, 288}, |   41   { AVCaptureSessionPreset352x288, 352, 288}, | 
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   91  |   94  | 
|   92 @end |   95 @end | 
|   93  |   96  | 
|   94 @implementation RTCAVFoundationVideoCapturerInternal { |   97 @implementation RTCAVFoundationVideoCapturerInternal { | 
|   95   // Keep pointers to inputs for convenience. |   98   // Keep pointers to inputs for convenience. | 
|   96   AVCaptureDeviceInput *_frontCameraInput; |   99   AVCaptureDeviceInput *_frontCameraInput; | 
|   97   AVCaptureDeviceInput *_backCameraInput; |  100   AVCaptureDeviceInput *_backCameraInput; | 
|   98   AVCaptureVideoDataOutput *_videoDataOutput; |  101   AVCaptureVideoDataOutput *_videoDataOutput; | 
|   99   // The cricket::VideoCapturer that owns this class. Should never be NULL. |  102   // The cricket::VideoCapturer that owns this class. Should never be NULL. | 
|  100   webrtc::AVFoundationVideoCapturer *_capturer; |  103   webrtc::AVFoundationVideoCapturer *_capturer; | 
|  101   BOOL _orientationHasChanged; |  104   webrtc::VideoRotation _rotation; | 
|  102   BOOL _hasRetriedOnFatalError; |  105   BOOL _hasRetriedOnFatalError; | 
|  103   BOOL _isRunning; |  106   BOOL _isRunning; | 
|  104   BOOL _hasStarted; |  107   BOOL _hasStarted; | 
|  105   rtc::CriticalSection _crit; |  108   rtc::CriticalSection _crit; | 
|  106 } |  109 } | 
|  107  |  110  | 
|  108 @synthesize captureSession = _captureSession; |  111 @synthesize captureSession = _captureSession; | 
|  109 @synthesize frameQueue = _frameQueue; |  112 @synthesize frameQueue = _frameQueue; | 
|  110 @synthesize useBackCamera = _useBackCamera; |  113 @synthesize useBackCamera = _useBackCamera; | 
|  111 @synthesize hasStarted = _hasStarted; |  114 @synthesize hasStarted = _hasStarted; | 
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  221 } |  224 } | 
|  222  |  225  | 
|  223 // Called from WebRTC thread. |  226 // Called from WebRTC thread. | 
|  224 - (void)start { |  227 - (void)start { | 
|  225   if (self.hasStarted) { |  228   if (self.hasStarted) { | 
|  226     return; |  229     return; | 
|  227   } |  230   } | 
|  228   self.hasStarted = YES; |  231   self.hasStarted = YES; | 
|  229   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |  232   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 
|  230                                block:^{ |  233                                block:^{ | 
|  231     _orientationHasChanged = NO; |  234 #if TARGET_OS_IPHONE | 
 |  235      // Default to portrait orientation on iPhone. This will be reset in | 
 |  236      // updateOrientation unless orientation is unknown/faceup/facedown. | 
 |  237      _rotation = webrtc::kVideoRotation_90; | 
 |  238 #else | 
 |  239     // No rotation on Mac. | 
 |  240     _rotation = webrtc::kVideoRotation_0; | 
 |  241 #endif | 
|  232     [self updateOrientation]; |  242     [self updateOrientation]; | 
|  233 #if TARGET_OS_IPHONE |  243 #if TARGET_OS_IPHONE | 
|  234     [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |  244     [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 
|  235 #endif |  245 #endif | 
|  236     AVCaptureSession *captureSession = self.captureSession; |  246     AVCaptureSession *captureSession = self.captureSession; | 
|  237     [captureSession startRunning]; |  247     [captureSession startRunning]; | 
|  238   }]; |  248   }]; | 
|  239 } |  249 } | 
|  240  |  250  | 
|  241 // Called from same thread as start. |  251 // Called from same thread as start. | 
| (...skipping 14 matching lines...) Expand all  Loading... | 
|  256 #endif |  266 #endif | 
|  257   }]; |  267   }]; | 
|  258 } |  268 } | 
|  259  |  269  | 
|  260 #pragma mark iOS notifications |  270 #pragma mark iOS notifications | 
|  261  |  271  | 
|  262 #if TARGET_OS_IPHONE |  272 #if TARGET_OS_IPHONE | 
|  263 - (void)deviceOrientationDidChange:(NSNotification *)notification { |  273 - (void)deviceOrientationDidChange:(NSNotification *)notification { | 
|  264   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |  274   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 
|  265                                block:^{ |  275                                block:^{ | 
|  266     _orientationHasChanged = YES; |  | 
|  267     [self updateOrientation]; |  276     [self updateOrientation]; | 
|  268   }]; |  277   }]; | 
|  269 } |  278 } | 
|  270 #endif |  279 #endif | 
|  271  |  280  | 
|  272 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |  281 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 
|  273  |  282  | 
|  274 - (void)captureOutput:(AVCaptureOutput *)captureOutput |  283 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 
|  275     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |  284     didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 
|  276            fromConnection:(AVCaptureConnection *)connection { |  285            fromConnection:(AVCaptureConnection *)connection { | 
|  277   NSParameterAssert(captureOutput == _videoDataOutput); |  286   NSParameterAssert(captureOutput == _videoDataOutput); | 
|  278   if (!self.hasStarted) { |  287   if (!self.hasStarted) { | 
|  279     return; |  288     return; | 
|  280   } |  289   } | 
|  281   _capturer->CaptureSampleBuffer(sampleBuffer); |  290   _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); | 
|  282 } |  291 } | 
|  283  |  292  | 
|  284 - (void)captureOutput:(AVCaptureOutput *)captureOutput |  293 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 
|  285     didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |  294     didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 
|  286          fromConnection:(AVCaptureConnection *)connection { |  295          fromConnection:(AVCaptureConnection *)connection { | 
|  287   RTCLogError(@"Dropped sample buffer."); |  296   RTCLogError(@"Dropped sample buffer."); | 
|  288 } |  297 } | 
|  289  |  298  | 
|  290 #pragma mark - AVCaptureSession notifications |  299 #pragma mark - AVCaptureSession notifications | 
|  291  |  300  | 
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  503   if (![device lockForConfiguration:&error]) { |  512   if (![device lockForConfiguration:&error]) { | 
|  504     RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc
     alizedDescription); |  513     RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc
     alizedDescription); | 
|  505     return; |  514     return; | 
|  506   } |  515   } | 
|  507   device.activeVideoMinFrameDuration = minFrameDuration; |  516   device.activeVideoMinFrameDuration = minFrameDuration; | 
|  508   [device unlockForConfiguration]; |  517   [device unlockForConfiguration]; | 
|  509 } |  518 } | 
|  510  |  519  | 
|  511 // Called from capture session queue. |  520 // Called from capture session queue. | 
|  512 - (void)updateOrientation { |  521 - (void)updateOrientation { | 
|  513   AVCaptureConnection *connection = |  | 
|  514       [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |  | 
|  515   if (!connection.supportsVideoOrientation) { |  | 
|  516     // TODO(tkchin): set rotation bit on frames. |  | 
|  517     return; |  | 
|  518   } |  | 
|  519 #if TARGET_OS_IPHONE |  522 #if TARGET_OS_IPHONE | 
|  520   AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; |  | 
|  521   switch ([UIDevice currentDevice].orientation) { |  523   switch ([UIDevice currentDevice].orientation) { | 
|  522     case UIDeviceOrientationPortrait: |  524     case UIDeviceOrientationPortrait: | 
|  523       orientation = AVCaptureVideoOrientationPortrait; |  525       _rotation = webrtc::kVideoRotation_90; | 
|  524       break; |  526       break; | 
|  525     case UIDeviceOrientationPortraitUpsideDown: |  527     case UIDeviceOrientationPortraitUpsideDown: | 
|  526       orientation = AVCaptureVideoOrientationPortraitUpsideDown; |  528       _rotation = webrtc::kVideoRotation_270; | 
|  527       break; |  529       break; | 
|  528     case UIDeviceOrientationLandscapeLeft: |  530     case UIDeviceOrientationLandscapeLeft: | 
|  529       orientation = AVCaptureVideoOrientationLandscapeRight; |  531       _rotation = webrtc::kVideoRotation_180; | 
|  530       break; |  532       break; | 
|  531     case UIDeviceOrientationLandscapeRight: |  533     case UIDeviceOrientationLandscapeRight: | 
|  532       orientation = AVCaptureVideoOrientationLandscapeLeft; |  534       _rotation = webrtc::kVideoRotation_0; | 
|  533       break; |  535       break; | 
|  534     case UIDeviceOrientationFaceUp: |  536     case UIDeviceOrientationFaceUp: | 
|  535     case UIDeviceOrientationFaceDown: |  537     case UIDeviceOrientationFaceDown: | 
|  536     case UIDeviceOrientationUnknown: |  538     case UIDeviceOrientationUnknown: | 
|  537       if (!_orientationHasChanged) { |  539       // Ignore. | 
|  538         connection.videoOrientation = orientation; |  540       break; | 
|  539       } |  | 
|  540       return; |  | 
|  541   } |  541   } | 
|  542   connection.videoOrientation = orientation; |  | 
|  543 #endif |  542 #endif | 
|  544 } |  543 } | 
|  545  |  544  | 
|  546 // Update the current session input to match what's stored in _useBackCamera. |  545 // Update the current session input to match what's stored in _useBackCamera. | 
|  547 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { |  546 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | 
|  548   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |  547   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 
|  549                                block:^{ |  548                                block:^{ | 
|  550     [_captureSession beginConfiguration]; |  549     [_captureSession beginConfiguration]; | 
|  551     AVCaptureDeviceInput *oldInput = _backCameraInput; |  550     AVCaptureDeviceInput *oldInput = _backCameraInput; | 
|  552     AVCaptureDeviceInput *newInput = _frontCameraInput; |  551     AVCaptureDeviceInput *newInput = _frontCameraInput; | 
| (...skipping 18 matching lines...) Expand all  Loading... | 
|  571  |  570  | 
|  572 @end |  571 @end | 
|  573  |  572  | 
|  574 namespace webrtc { |  573 namespace webrtc { | 
|  575  |  574  | 
|  576 enum AVFoundationVideoCapturerMessageType : uint32_t { |  575 enum AVFoundationVideoCapturerMessageType : uint32_t { | 
|  577   kMessageTypeFrame, |  576   kMessageTypeFrame, | 
|  578 }; |  577 }; | 
|  579  |  578  | 
|  580 struct AVFoundationFrame { |  579 struct AVFoundationFrame { | 
|  581   AVFoundationFrame(CVImageBufferRef buffer, int64_t time) |  580   AVFoundationFrame(CVImageBufferRef buffer, | 
|  582     : image_buffer(buffer), capture_time(time) {} |  581                     webrtc::VideoRotation rotation, | 
 |  582                     int64_t time) | 
 |  583       : image_buffer(buffer), rotation(rotation), capture_time(time) {} | 
|  583   CVImageBufferRef image_buffer; |  584   CVImageBufferRef image_buffer; | 
 |  585   webrtc::VideoRotation rotation; | 
|  584   int64_t capture_time; |  586   int64_t capture_time; | 
|  585 }; |  587 }; | 
|  586  |  588  | 
|  587 AVFoundationVideoCapturer::AVFoundationVideoCapturer() |  589 AVFoundationVideoCapturer::AVFoundationVideoCapturer() | 
|  588     : _capturer(nil), _startThread(nullptr) { |  590     : _capturer(nil), _startThread(nullptr) { | 
|  589   // Set our supported formats. This matches kAvailablePresets. |  591   // Set our supported formats. This matches kAvailablePresets. | 
|  590   _capturer = |  592   _capturer = | 
|  591       [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |  593       [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | 
|  592  |  594  | 
|  593   std::vector<cricket::VideoFormat> supported_formats; |  595   std::vector<cricket::VideoFormat> supported_formats; | 
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  681  |  683  | 
|  682 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { |  684 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | 
|  683   _capturer.useBackCamera = useBackCamera; |  685   _capturer.useBackCamera = useBackCamera; | 
|  684 } |  686 } | 
|  685  |  687  | 
|  686 bool AVFoundationVideoCapturer::GetUseBackCamera() const { |  688 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | 
|  687   return _capturer.useBackCamera; |  689   return _capturer.useBackCamera; | 
|  688 } |  690 } | 
|  689  |  691  | 
|  690 void AVFoundationVideoCapturer::CaptureSampleBuffer( |  692 void AVFoundationVideoCapturer::CaptureSampleBuffer( | 
|  691     CMSampleBufferRef sampleBuffer) { |  693     CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) { | 
|  692   if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |  694   if (CMSampleBufferGetNumSamples(sample_buffer) != 1 || | 
|  693       !CMSampleBufferIsValid(sampleBuffer) || |  695       !CMSampleBufferIsValid(sample_buffer) || | 
|  694       !CMSampleBufferDataIsReady(sampleBuffer)) { |  696       !CMSampleBufferDataIsReady(sample_buffer)) { | 
|  695     return; |  697     return; | 
|  696   } |  698   } | 
|  697  |  699  | 
|  698   CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer); |  700   CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer); | 
|  699   if (image_buffer == NULL) { |  701   if (image_buffer == NULL) { | 
|  700     return; |  702     return; | 
|  701   } |  703   } | 
|  702  |  704  | 
|  703   // Retain the buffer and post it to the webrtc thread. It will be released |  705   // Retain the buffer and post it to the webrtc thread. It will be released | 
|  704   // after it has successfully been signaled. |  706   // after it has successfully been signaled. | 
|  705   CVBufferRetain(image_buffer); |  707   CVBufferRetain(image_buffer); | 
|  706   AVFoundationFrame frame(image_buffer, rtc::TimeNanos()); |  708   AVFoundationFrame frame(image_buffer, rotation, rtc::TimeNanos()); | 
|  707   _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame, |  709   _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame, | 
|  708                      new rtc::TypedMessageData<AVFoundationFrame>(frame)); |  710                      new rtc::TypedMessageData<AVFoundationFrame>(frame)); | 
|  709 } |  711 } | 
|  710  |  712  | 
|  711 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) { |  713 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) { | 
|  712   switch (msg->message_id) { |  714   switch (msg->message_id) { | 
|  713     case kMessageTypeFrame: { |  715     case kMessageTypeFrame: { | 
|  714       rtc::TypedMessageData<AVFoundationFrame>* data = |  716       rtc::TypedMessageData<AVFoundationFrame>* data = | 
|  715         static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata); |  717         static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata); | 
|  716       const AVFoundationFrame& frame = data->data(); |  718       const AVFoundationFrame& frame = data->data(); | 
|  717       OnFrameMessage(frame.image_buffer, frame.capture_time); |  719       OnFrameMessage(frame.image_buffer, frame.rotation, frame.capture_time); | 
|  718       delete data; |  720       delete data; | 
|  719       break; |  721       break; | 
|  720     } |  722     } | 
|  721   } |  723   } | 
|  722 } |  724 } | 
|  723  |  725  | 
|  724 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer, |  726 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer, | 
 |  727                                                webrtc::VideoRotation rotation, | 
|  725                                                int64_t capture_time_ns) { |  728                                                int64_t capture_time_ns) { | 
|  726   RTC_DCHECK(_startThread->IsCurrent()); |  729   RTC_DCHECK(_startThread->IsCurrent()); | 
|  727  |  730  | 
|  728   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |  731   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = | 
|  729       new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer); |  732       new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer); | 
|  730  |  733  | 
|  731   const int captured_width = buffer->width(); |  734   const int captured_width = buffer->width(); | 
|  732   const int captured_height = buffer->height(); |  735   const int captured_height = buffer->height(); | 
|  733  |  736  | 
|  734   int adapted_width; |  737   int adapted_width; | 
|  735   int adapted_height; |  738   int adapted_height; | 
|  736   int crop_width; |  739   int crop_width; | 
|  737   int crop_height; |  740   int crop_height; | 
|  738   int crop_x; |  741   int crop_x; | 
|  739   int crop_y; |  742   int crop_y; | 
|  740   int64_t translated_camera_time_us; |  743   int64_t translated_camera_time_us; | 
|  741  |  744  | 
|  742   if (!AdaptFrame(captured_width, captured_height, |  745   if (!AdaptFrame(captured_width, captured_height, | 
|  743                   capture_time_ns / rtc::kNumNanosecsPerMicrosec, |  746                   capture_time_ns / rtc::kNumNanosecsPerMicrosec, | 
|  744                   rtc::TimeMicros(), &adapted_width, &adapted_height, |  747                   rtc::TimeMicros(), &adapted_width, &adapted_height, | 
|  745                   &crop_width, &crop_height, &crop_x, &crop_y, |  748                   &crop_width, &crop_height, &crop_x, &crop_y, | 
|  746                   &translated_camera_time_us)) { |  749                   &translated_camera_time_us)) { | 
|  747     CVBufferRelease(image_buffer); |  750     CVBufferRelease(image_buffer); | 
|  748     return; |  751     return; | 
|  749   } |  752   } | 
|  750  |  753  | 
|  751   if (adapted_width != captured_width || crop_width != captured_width || |  754   if (adapted_width != captured_width || crop_width != captured_width || | 
|  752       adapted_height != captured_height || crop_height != captured_height) { |  755       adapted_height != captured_height || crop_height != captured_height || | 
 |  756       (apply_rotation() && rotation != webrtc::kVideoRotation_0)) { | 
|  753     // TODO(magjed): Avoid converting to I420. |  757     // TODO(magjed): Avoid converting to I420. | 
|  754     rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( |  758     rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( | 
|  755         _buffer_pool.CreateBuffer(adapted_width, adapted_height)); |  759         _buffer_pool.CreateBuffer(adapted_width, adapted_height)); | 
|  756     scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x, |  760     scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x, | 
|  757                                     crop_y, crop_width, crop_height); |  761                                     crop_y, crop_width, crop_height); | 
|  758     buffer = scaled_buffer; |  762     if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) { | 
 |  763       buffer = scaled_buffer; | 
 |  764     } else { | 
 |  765       // Applying rotation is only supported for legacy reasons and performance | 
 |  766       // is not critical here. | 
 |  767       buffer = (rotation == webrtc::kVideoRotation_180) | 
 |  768           ? I420Buffer::Create(adapted_width, adapted_height) | 
 |  769           : I420Buffer::Create(adapted_height, adapted_width); | 
 |  770       libyuv::I420Rotate(scaled_buffer->DataY(), scaled_buffer->StrideY(), | 
 |  771                          scaled_buffer->DataU(), scaled_buffer->StrideU(), | 
 |  772                          scaled_buffer->DataV(), scaled_buffer->StrideV(), | 
 |  773                          buffer->MutableDataY(), buffer->StrideY(), | 
 |  774                          buffer->MutableDataU(), buffer->StrideU(), | 
 |  775                          buffer->MutableDataV(), buffer->StrideV(), | 
 |  776                          crop_width, crop_height, | 
 |  777                          static_cast<libyuv::RotationMode>(rotation)); | 
 |  778     } | 
|  759   } |  779   } | 
|  760  |  780  | 
|  761   OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, |  781   OnFrame(cricket::WebRtcVideoFrame(buffer, rotation, | 
|  762                                     translated_camera_time_us, 0), |  782                                     translated_camera_time_us, 0), | 
|  763           captured_width, captured_height); |  783           captured_width, captured_height); | 
|  764  |  784  | 
|  765   CVBufferRelease(image_buffer); |  785   CVBufferRelease(image_buffer); | 
|  766 } |  786 } | 
|  767  |  787  | 
|  768 }  // namespace webrtc |  788 }  // namespace webrtc | 
| OLD | NEW |