Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "avfoundationvideocapturer.h" | 11 #include "avfoundationvideocapturer.h" |
| 12 | 12 |
| 13 #import <AVFoundation/AVFoundation.h> | 13 #import <AVFoundation/AVFoundation.h> |
| 14 #import <Foundation/Foundation.h> | 14 #import <Foundation/Foundation.h> |
| 15 #if TARGET_OS_IPHONE | 15 #if TARGET_OS_IPHONE |
| 16 #import <UIKit/UIKit.h> | 16 #import <UIKit/UIKit.h> |
| 17 #endif | 17 #endif |
| 18 | 18 |
| 19 #import "RTCDispatcher+Private.h" | 19 #import "RTCDispatcher+Private.h" |
| 20 #import "WebRTC/RTCLogging.h" | 20 #import "WebRTC/RTCLogging.h" |
| 21 #if TARGET_OS_IPHONE | 21 #if TARGET_OS_IPHONE |
| 22 #import "WebRTC/UIDevice+RTCDevice.h" | 22 #import "WebRTC/UIDevice+RTCDevice.h" |
| 23 #endif | 23 #endif |
| 24 | 24 |
| 25 #include "libyuv/rotate.h" | |
| 26 | |
| 25 #include "webrtc/base/bind.h" | 27 #include "webrtc/base/bind.h" |
| 26 #include "webrtc/base/checks.h" | 28 #include "webrtc/base/checks.h" |
| 27 #include "webrtc/base/thread.h" | 29 #include "webrtc/base/thread.h" |
| 28 #include "webrtc/common_video/include/corevideo_frame_buffer.h" | 30 #include "webrtc/common_video/include/corevideo_frame_buffer.h" |
| 31 #include "webrtc/common_video/rotation.h" | |
| 29 | 32 |
| 30 struct AVCaptureSessionPresetResolution { | 33 struct AVCaptureSessionPresetResolution { |
| 31 NSString *sessionPreset; | 34 NSString *sessionPreset; |
| 32 int width; | 35 int width; |
| 33 int height; | 36 int height; |
| 34 }; | 37 }; |
| 35 | 38 |
| 36 #if TARGET_OS_IPHONE | 39 #if TARGET_OS_IPHONE |
| 37 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { | 40 static const AVCaptureSessionPresetResolution kAvailablePresets[] = { |
| 38 { AVCaptureSessionPreset352x288, 352, 288}, | 41 { AVCaptureSessionPreset352x288, 352, 288}, |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 91 | 94 |
| 92 @end | 95 @end |
| 93 | 96 |
| 94 @implementation RTCAVFoundationVideoCapturerInternal { | 97 @implementation RTCAVFoundationVideoCapturerInternal { |
| 95 // Keep pointers to inputs for convenience. | 98 // Keep pointers to inputs for convenience. |
| 96 AVCaptureDeviceInput *_frontCameraInput; | 99 AVCaptureDeviceInput *_frontCameraInput; |
| 97 AVCaptureDeviceInput *_backCameraInput; | 100 AVCaptureDeviceInput *_backCameraInput; |
| 98 AVCaptureVideoDataOutput *_videoDataOutput; | 101 AVCaptureVideoDataOutput *_videoDataOutput; |
| 99 // The cricket::VideoCapturer that owns this class. Should never be NULL. | 102 // The cricket::VideoCapturer that owns this class. Should never be NULL. |
| 100 webrtc::AVFoundationVideoCapturer *_capturer; | 103 webrtc::AVFoundationVideoCapturer *_capturer; |
| 101 BOOL _orientationHasChanged; | 104 webrtc::VideoRotation _rotation; |
| 102 BOOL _hasRetriedOnFatalError; | 105 BOOL _hasRetriedOnFatalError; |
| 103 BOOL _isRunning; | 106 BOOL _isRunning; |
| 104 BOOL _hasStarted; | 107 BOOL _hasStarted; |
| 105 rtc::CriticalSection _crit; | 108 rtc::CriticalSection _crit; |
| 106 } | 109 } |
| 107 | 110 |
| 108 @synthesize captureSession = _captureSession; | 111 @synthesize captureSession = _captureSession; |
| 109 @synthesize frameQueue = _frameQueue; | 112 @synthesize frameQueue = _frameQueue; |
| 110 @synthesize useBackCamera = _useBackCamera; | 113 @synthesize useBackCamera = _useBackCamera; |
| 111 @synthesize hasStarted = _hasStarted; | 114 @synthesize hasStarted = _hasStarted; |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 221 } | 224 } |
| 222 | 225 |
| 223 // Called from WebRTC thread. | 226 // Called from WebRTC thread. |
| 224 - (void)start { | 227 - (void)start { |
| 225 if (self.hasStarted) { | 228 if (self.hasStarted) { |
| 226 return; | 229 return; |
| 227 } | 230 } |
| 228 self.hasStarted = YES; | 231 self.hasStarted = YES; |
| 229 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 232 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 230 block:^{ | 233 block:^{ |
| 231 _orientationHasChanged = NO; | 234 _rotation = webrtc::kVideoRotation_0; |
|
tkchin_webrtc
2016/08/23 17:33:19
Can you add comment that this sets 0 rotation is o
tkchin_webrtc
2016/08/23 17:37:25
To clarify - what happens to an AVCapturePreviewLa
magjed_webrtc
2016/08/24 11:08:36
Nothing happens when the device is set faceup/face
| |
| 232 [self updateOrientation]; | 235 [self updateOrientation]; |
| 233 #if TARGET_OS_IPHONE | 236 #if TARGET_OS_IPHONE |
| 234 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; | 237 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
| 235 #endif | 238 #endif |
| 236 AVCaptureSession *captureSession = self.captureSession; | 239 AVCaptureSession *captureSession = self.captureSession; |
| 237 [captureSession startRunning]; | 240 [captureSession startRunning]; |
| 238 }]; | 241 }]; |
| 239 } | 242 } |
| 240 | 243 |
| 241 // Called from same thread as start. | 244 // Called from same thread as start. |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 256 #endif | 259 #endif |
| 257 }]; | 260 }]; |
| 258 } | 261 } |
| 259 | 262 |
| 260 #pragma mark iOS notifications | 263 #pragma mark iOS notifications |
| 261 | 264 |
| 262 #if TARGET_OS_IPHONE | 265 #if TARGET_OS_IPHONE |
| 263 - (void)deviceOrientationDidChange:(NSNotification *)notification { | 266 - (void)deviceOrientationDidChange:(NSNotification *)notification { |
| 264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 267 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 265 block:^{ | 268 block:^{ |
| 266 _orientationHasChanged = YES; | |
| 267 [self updateOrientation]; | 269 [self updateOrientation]; |
| 268 }]; | 270 }]; |
| 269 } | 271 } |
| 270 #endif | 272 #endif |
| 271 | 273 |
| 272 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate | 274 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
| 273 | 275 |
| 274 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 276 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 275 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer | 277 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 276 fromConnection:(AVCaptureConnection *)connection { | 278 fromConnection:(AVCaptureConnection *)connection { |
| 277 NSParameterAssert(captureOutput == _videoDataOutput); | 279 NSParameterAssert(captureOutput == _videoDataOutput); |
| 278 if (!self.hasStarted) { | 280 if (!self.hasStarted) { |
| 279 return; | 281 return; |
| 280 } | 282 } |
| 281 _capturer->CaptureSampleBuffer(sampleBuffer); | 283 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation); |
| 282 } | 284 } |
| 283 | 285 |
| 284 - (void)captureOutput:(AVCaptureOutput *)captureOutput | 286 - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 285 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer | 287 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 286 fromConnection:(AVCaptureConnection *)connection { | 288 fromConnection:(AVCaptureConnection *)connection { |
| 287 RTCLogError(@"Dropped sample buffer."); | 289 RTCLogError(@"Dropped sample buffer."); |
| 288 } | 290 } |
| 289 | 291 |
| 290 #pragma mark - AVCaptureSession notifications | 292 #pragma mark - AVCaptureSession notifications |
| 291 | 293 |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 503 if (![device lockForConfiguration:&error]) { | 505 if (![device lockForConfiguration:&error]) { |
| 504 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription); | 506 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.loc alizedDescription); |
| 505 return; | 507 return; |
| 506 } | 508 } |
| 507 device.activeVideoMinFrameDuration = minFrameDuration; | 509 device.activeVideoMinFrameDuration = minFrameDuration; |
| 508 [device unlockForConfiguration]; | 510 [device unlockForConfiguration]; |
| 509 } | 511 } |
| 510 | 512 |
| 511 // Called from capture session queue. | 513 // Called from capture session queue. |
| 512 - (void)updateOrientation { | 514 - (void)updateOrientation { |
| 513 AVCaptureConnection *connection = | |
| 514 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; | |
| 515 if (!connection.supportsVideoOrientation) { | |
| 516 // TODO(tkchin): set rotation bit on frames. | |
| 517 return; | |
| 518 } | |
| 519 #if TARGET_OS_IPHONE | 515 #if TARGET_OS_IPHONE |
| 520 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; | |
| 521 switch ([UIDevice currentDevice].orientation) { | 516 switch ([UIDevice currentDevice].orientation) { |
| 522 case UIDeviceOrientationPortrait: | 517 case UIDeviceOrientationPortrait: |
| 523 orientation = AVCaptureVideoOrientationPortrait; | 518 _rotation = webrtc::kVideoRotation_90; |
| 524 break; | 519 break; |
| 525 case UIDeviceOrientationPortraitUpsideDown: | 520 case UIDeviceOrientationPortraitUpsideDown: |
| 526 orientation = AVCaptureVideoOrientationPortraitUpsideDown; | 521 _rotation = webrtc::kVideoRotation_270; |
| 527 break; | 522 break; |
| 528 case UIDeviceOrientationLandscapeLeft: | 523 case UIDeviceOrientationLandscapeLeft: |
| 529 orientation = AVCaptureVideoOrientationLandscapeRight; | 524 _rotation = webrtc::kVideoRotation_180; |
| 530 break; | 525 break; |
| 531 case UIDeviceOrientationLandscapeRight: | 526 case UIDeviceOrientationLandscapeRight: |
| 532 orientation = AVCaptureVideoOrientationLandscapeLeft; | 527 _rotation = webrtc::kVideoRotation_0; |
| 533 break; | 528 break; |
| 534 case UIDeviceOrientationFaceUp: | 529 case UIDeviceOrientationFaceUp: |
| 535 case UIDeviceOrientationFaceDown: | 530 case UIDeviceOrientationFaceDown: |
| 536 case UIDeviceOrientationUnknown: | 531 case UIDeviceOrientationUnknown: |
| 537 if (!_orientationHasChanged) { | 532 // Ignore. |
| 538 connection.videoOrientation = orientation; | 533 break; |
| 539 } | |
| 540 return; | |
| 541 } | 534 } |
| 542 connection.videoOrientation = orientation; | |
| 543 #endif | 535 #endif |
| 544 } | 536 } |
| 545 | 537 |
| 546 // Update the current session input to match what's stored in _useBackCamera. | 538 // Update the current session input to match what's stored in _useBackCamera. |
| 547 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { | 539 - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { |
| 548 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession | 540 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 549 block:^{ | 541 block:^{ |
| 550 [_captureSession beginConfiguration]; | 542 [_captureSession beginConfiguration]; |
| 551 AVCaptureDeviceInput *oldInput = _backCameraInput; | 543 AVCaptureDeviceInput *oldInput = _backCameraInput; |
| 552 AVCaptureDeviceInput *newInput = _frontCameraInput; | 544 AVCaptureDeviceInput *newInput = _frontCameraInput; |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 571 | 563 |
| 572 @end | 564 @end |
| 573 | 565 |
| 574 namespace webrtc { | 566 namespace webrtc { |
| 575 | 567 |
| 576 enum AVFoundationVideoCapturerMessageType : uint32_t { | 568 enum AVFoundationVideoCapturerMessageType : uint32_t { |
| 577 kMessageTypeFrame, | 569 kMessageTypeFrame, |
| 578 }; | 570 }; |
| 579 | 571 |
| 580 struct AVFoundationFrame { | 572 struct AVFoundationFrame { |
| 581 AVFoundationFrame(CVImageBufferRef buffer, int64_t time) | 573 AVFoundationFrame(CVImageBufferRef buffer, |
| 582 : image_buffer(buffer), capture_time(time) {} | 574 webrtc::VideoRotation rotation, |
| 575 int64_t time) | |
| 576 : image_buffer(buffer), rotation(rotation), capture_time(time) {} | |
| 583 CVImageBufferRef image_buffer; | 577 CVImageBufferRef image_buffer; |
| 578 webrtc::VideoRotation rotation; | |
| 584 int64_t capture_time; | 579 int64_t capture_time; |
| 585 }; | 580 }; |
| 586 | 581 |
| 587 AVFoundationVideoCapturer::AVFoundationVideoCapturer() | 582 AVFoundationVideoCapturer::AVFoundationVideoCapturer() |
| 588 : _capturer(nil), _startThread(nullptr) { | 583 : _capturer(nil), _startThread(nullptr) { |
| 589 // Set our supported formats. This matches kAvailablePresets. | 584 // Set our supported formats. This matches kAvailablePresets. |
| 590 _capturer = | 585 _capturer = |
| 591 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; | 586 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
| 592 | 587 |
| 593 std::vector<cricket::VideoFormat> supported_formats; | 588 std::vector<cricket::VideoFormat> supported_formats; |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 681 | 676 |
| 682 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { | 677 void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { |
| 683 _capturer.useBackCamera = useBackCamera; | 678 _capturer.useBackCamera = useBackCamera; |
| 684 } | 679 } |
| 685 | 680 |
| 686 bool AVFoundationVideoCapturer::GetUseBackCamera() const { | 681 bool AVFoundationVideoCapturer::GetUseBackCamera() const { |
| 687 return _capturer.useBackCamera; | 682 return _capturer.useBackCamera; |
| 688 } | 683 } |
| 689 | 684 |
| 690 void AVFoundationVideoCapturer::CaptureSampleBuffer( | 685 void AVFoundationVideoCapturer::CaptureSampleBuffer( |
| 691 CMSampleBufferRef sampleBuffer) { | 686 CMSampleBufferRef sampleBuffer, webrtc::VideoRotation rotation) { |
| 692 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || | 687 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |
| 693 !CMSampleBufferIsValid(sampleBuffer) || | 688 !CMSampleBufferIsValid(sampleBuffer) || |
| 694 !CMSampleBufferDataIsReady(sampleBuffer)) { | 689 !CMSampleBufferDataIsReady(sampleBuffer)) { |
| 695 return; | 690 return; |
| 696 } | 691 } |
| 697 | 692 |
| 698 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer); | 693 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
| 699 if (image_buffer == NULL) { | 694 if (image_buffer == NULL) { |
| 700 return; | 695 return; |
| 701 } | 696 } |
| 702 | 697 |
| 703 // Retain the buffer and post it to the webrtc thread. It will be released | 698 // Retain the buffer and post it to the webrtc thread. It will be released |
| 704 // after it has successfully been signaled. | 699 // after it has successfully been signaled. |
| 705 CVBufferRetain(image_buffer); | 700 CVBufferRetain(image_buffer); |
| 706 AVFoundationFrame frame(image_buffer, rtc::TimeNanos()); | 701 AVFoundationFrame frame(image_buffer, rotation, rtc::TimeNanos()); |
| 707 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame, | 702 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame, |
| 708 new rtc::TypedMessageData<AVFoundationFrame>(frame)); | 703 new rtc::TypedMessageData<AVFoundationFrame>(frame)); |
| 709 } | 704 } |
| 710 | 705 |
| 711 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) { | 706 void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) { |
| 712 switch (msg->message_id) { | 707 switch (msg->message_id) { |
| 713 case kMessageTypeFrame: { | 708 case kMessageTypeFrame: { |
| 714 rtc::TypedMessageData<AVFoundationFrame>* data = | 709 rtc::TypedMessageData<AVFoundationFrame>* data = |
| 715 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata); | 710 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata); |
| 716 const AVFoundationFrame& frame = data->data(); | 711 const AVFoundationFrame& frame = data->data(); |
| 717 OnFrameMessage(frame.image_buffer, frame.capture_time); | 712 OnFrameMessage(frame.image_buffer, frame.rotation, frame.capture_time); |
| 718 delete data; | 713 delete data; |
| 719 break; | 714 break; |
| 720 } | 715 } |
| 721 } | 716 } |
| 722 } | 717 } |
| 723 | 718 |
| 724 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer, | 719 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer, |
| 720 webrtc::VideoRotation rotation, | |
| 725 int64_t capture_time_ns) { | 721 int64_t capture_time_ns) { |
| 726 RTC_DCHECK(_startThread->IsCurrent()); | 722 RTC_DCHECK(_startThread->IsCurrent()); |
| 727 | 723 |
| 728 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = | 724 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |
| 729 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer); | 725 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer); |
| 730 | 726 |
| 731 const int captured_width = buffer->width(); | 727 const int captured_width = buffer->width(); |
| 732 const int captured_height = buffer->height(); | 728 const int captured_height = buffer->height(); |
| 733 | 729 |
| 734 int adapted_width; | 730 int adapted_width; |
| 735 int adapted_height; | 731 int adapted_height; |
| 736 int crop_width; | 732 int crop_width; |
| 737 int crop_height; | 733 int crop_height; |
| 738 int crop_x; | 734 int crop_x; |
| 739 int crop_y; | 735 int crop_y; |
| 740 int64_t translated_camera_time_us; | 736 int64_t translated_camera_time_us; |
| 741 | 737 |
| 742 if (!AdaptFrame(captured_width, captured_height, | 738 if (!AdaptFrame(captured_width, captured_height, |
| 743 capture_time_ns / rtc::kNumNanosecsPerMicrosec, | 739 capture_time_ns / rtc::kNumNanosecsPerMicrosec, |
| 744 rtc::TimeMicros(), &adapted_width, &adapted_height, | 740 rtc::TimeMicros(), &adapted_width, &adapted_height, |
| 745 &crop_width, &crop_height, &crop_x, &crop_y, | 741 &crop_width, &crop_height, &crop_x, &crop_y, |
| 746 &translated_camera_time_us)) { | 742 &translated_camera_time_us)) { |
| 747 CVBufferRelease(image_buffer); | 743 CVBufferRelease(image_buffer); |
| 748 return; | 744 return; |
| 749 } | 745 } |
| 750 | 746 |
| 751 if (adapted_width != captured_width || crop_width != captured_width || | 747 if (adapted_width != captured_width || crop_width != captured_width || |
| 752 adapted_height != captured_height || crop_height != captured_height) { | 748 adapted_height != captured_height || crop_height != captured_height || |
| 749 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) { | |
| 753 // TODO(magjed): Avoid converting to I420. | 750 // TODO(magjed): Avoid converting to I420. |
| 754 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( | 751 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( |
| 755 _buffer_pool.CreateBuffer(adapted_width, adapted_height)); | 752 _buffer_pool.CreateBuffer(adapted_width, adapted_height)); |
| 756 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x, | 753 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x, |
| 757 crop_y, crop_width, crop_height); | 754 crop_y, crop_width, crop_height); |
| 758 buffer = scaled_buffer; | 755 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) { |
| 756 buffer = scaled_buffer; | |
| 757 } else { | |
| 758 // Applying rotation is only supported for legacy reasons and performance | |
| 759 // is not critical here. | |
| 760 buffer = (rotation == webrtc::kVideoRotation_180) | |
| 761 ? I420Buffer::Create(adapted_width, adapted_height) | |
| 762 : I420Buffer::Create(adapted_height, adapted_width); | |
| 763 libyuv::I420Rotate(scaled_buffer->DataY(), scaled_buffer->StrideY(), | |
| 764 scaled_buffer->DataU(), scaled_buffer->StrideU(), | |
| 765 scaled_buffer->DataV(), scaled_buffer->StrideV(), | |
| 766 buffer->MutableDataY(), buffer->StrideY(), | |
| 767 buffer->MutableDataU(), buffer->StrideU(), | |
| 768 buffer->MutableDataV(), buffer->StrideV(), | |
| 769 crop_width, crop_height, | |
| 770 static_cast<libyuv::RotationMode>(rotation)); | |
| 771 } | |
| 759 } | 772 } |
| 760 | 773 |
| 761 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, | 774 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation, |
| 762 translated_camera_time_us, 0), | 775 translated_camera_time_us, 0), |
| 763 captured_width, captured_height); | 776 captured_width, captured_height); |
| 764 | 777 |
| 765 CVBufferRelease(image_buffer); | 778 CVBufferRelease(image_buffer); |
| 766 } | 779 } |
| 767 | 780 |
| 768 } // namespace webrtc | 781 } // namespace webrtc |
| OLD | NEW |