| Index: webrtc/modules/video_capture/video_capture_impl.cc
|
| diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
|
| index ead2b1568a52e9a1dec8d8d346710a2fa6b71b25..94ca02b27ae2387a59cabc975ca5c901a9392cd9 100644
|
| --- a/webrtc/modules/video_capture/video_capture_impl.cc
|
| +++ b/webrtc/modules/video_capture/video_capture_impl.cc
|
| @@ -33,9 +33,8 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
|
| return implementation;
|
| }
|
|
|
| -const char* VideoCaptureImpl::CurrentDeviceName() const
|
| -{
|
| - return _deviceUniqueId;
|
| +const char* VideoCaptureImpl::CurrentDeviceName() const {
|
| + return _deviceUniqueId;
|
| }
|
|
|
| // static
|
| @@ -96,13 +95,12 @@ VideoCaptureImpl::VideoCaptureImpl()
|
| memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
|
| }
|
|
|
| -VideoCaptureImpl::~VideoCaptureImpl()
|
| -{
|
| - DeRegisterCaptureDataCallback();
|
| - delete &_apiCs;
|
| +VideoCaptureImpl::~VideoCaptureImpl() {
|
| + DeRegisterCaptureDataCallback();
|
| + delete &_apiCs;
|
|
|
| - if (_deviceUniqueId)
|
| - delete[] _deviceUniqueId;
|
| + if (_deviceUniqueId)
|
| + delete[] _deviceUniqueId;
|
| }
|
|
|
| void VideoCaptureImpl::RegisterCaptureDataCallback(
|
| @@ -125,74 +123,68 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
|
| return 0;
|
| }
|
|
|
| -int32_t VideoCaptureImpl::IncomingFrame(
|
| - uint8_t* videoFrame,
|
| - size_t videoFrameLength,
|
| - const VideoCaptureCapability& frameInfo,
|
| - int64_t captureTime/*=0*/)
|
| -{
|
| - CriticalSectionScoped cs(&_apiCs);
|
| +int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
|
| + size_t videoFrameLength,
|
| + const VideoCaptureCapability& frameInfo,
|
| + int64_t captureTime /*=0*/) {
|
| + CriticalSectionScoped cs(&_apiCs);
|
|
|
| - const int32_t width = frameInfo.width;
|
| - const int32_t height = frameInfo.height;
|
| + const int32_t width = frameInfo.width;
|
| + const int32_t height = frameInfo.height;
|
|
|
| - TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
|
| + TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
|
|
|
| - // Not encoded, convert to I420.
|
| - const VideoType commonVideoType =
|
| - RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
|
| + // Not encoded, convert to I420.
|
| + const VideoType commonVideoType =
|
| + RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
|
|
|
| - if (frameInfo.rawType != kVideoMJPEG &&
|
| - CalcBufferSize(commonVideoType, width,
|
| - abs(height)) != videoFrameLength)
|
| - {
|
| - LOG(LS_ERROR) << "Wrong incoming frame length.";
|
| - return -1;
|
| - }
|
| + if (frameInfo.rawType != kVideoMJPEG &&
|
| + CalcBufferSize(commonVideoType, width, abs(height)) != videoFrameLength) {
|
| + LOG(LS_ERROR) << "Wrong incoming frame length.";
|
| + return -1;
|
| + }
|
|
|
| - int stride_y = width;
|
| - int stride_uv = (width + 1) / 2;
|
| - int target_width = width;
|
| - int target_height = height;
|
| -
|
| - // SetApplyRotation doesn't take any lock. Make a local copy here.
|
| - bool apply_rotation = apply_rotation_;
|
| -
|
| - if (apply_rotation) {
|
| - // Rotating resolution when for 90/270 degree rotations.
|
| - if (_rotateFrame == kVideoRotation_90 ||
|
| - _rotateFrame == kVideoRotation_270) {
|
| - target_width = abs(height);
|
| - target_height = width;
|
| - }
|
| - }
|
| + int stride_y = width;
|
| + int stride_uv = (width + 1) / 2;
|
| + int target_width = width;
|
| + int target_height = height;
|
| +
|
| + // SetApplyRotation doesn't take any lock. Make a local copy here.
|
| + bool apply_rotation = apply_rotation_;
|
|
|
| - // Setting absolute height (in case it was negative).
|
| - // In Windows, the image starts bottom left, instead of top left.
|
| - // Setting a negative source height, inverts the image (within LibYuv).
|
| -
|
| - // TODO(nisse): Use a pool?
|
| - rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
|
| - target_width, abs(target_height), stride_y, stride_uv, stride_uv);
|
| - const int conversionResult = ConvertToI420(
|
| - commonVideoType, videoFrame, 0, 0, // No cropping
|
| - width, height, videoFrameLength,
|
| - apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
|
| - if (conversionResult < 0)
|
| - {
|
| - LOG(LS_ERROR) << "Failed to convert capture frame from type "
|
| - << frameInfo.rawType << "to I420.";
|
| - return -1;
|
| + if (apply_rotation) {
|
| + // Rotating resolution when for 90/270 degree rotations.
|
| + if (_rotateFrame == kVideoRotation_90 ||
|
| + _rotateFrame == kVideoRotation_270) {
|
| + target_width = abs(height);
|
| + target_height = width;
|
| }
|
| + }
|
|
|
| - VideoFrame captureFrame(
|
| - buffer, 0, rtc::TimeMillis(),
|
| - !apply_rotation ? _rotateFrame : kVideoRotation_0);
|
| - captureFrame.set_ntp_time_ms(captureTime);
|
| + // Setting absolute height (in case it was negative).
|
| + // In Windows, the image starts bottom left, instead of top left.
|
| + // Setting a negative source height, inverts the image (within LibYuv).
|
| +
|
| + // TODO(nisse): Use a pool?
|
| + rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
|
| + target_width, abs(target_height), stride_y, stride_uv, stride_uv);
|
| + const int conversionResult = ConvertToI420(
|
| + commonVideoType, videoFrame, 0, 0, // No cropping
|
| + width, height, videoFrameLength,
|
| + apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
|
| + if (conversionResult < 0) {
|
| + LOG(LS_ERROR) << "Failed to convert capture frame from type "
|
| + << frameInfo.rawType << "to I420.";
|
| + return -1;
|
| + }
|
| +
|
| + VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(),
|
| + !apply_rotation ? _rotateFrame : kVideoRotation_0);
|
| + captureFrame.set_ntp_time_ms(captureTime);
|
|
|
| - DeliverCapturedFrame(captureFrame);
|
| + DeliverCapturedFrame(captureFrame);
|
|
|
| - return 0;
|
| + return 0;
|
| }
|
|
|
| int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
|
| @@ -209,52 +201,40 @@ bool VideoCaptureImpl::SetApplyRotation(bool enable) {
|
| return true;
|
| }
|
|
|
| -void VideoCaptureImpl::UpdateFrameCount()
|
| -{
|
| - if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
|
| - {
|
| - // first no shift
|
| - }
|
| - else
|
| - {
|
| - // shift
|
| - for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
|
| - {
|
| - _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
|
| - }
|
| +void VideoCaptureImpl::UpdateFrameCount() {
|
| + if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) {
|
| + // first no shift
|
| + } else {
|
| + // shift
|
| + for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--) {
|
| + _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
|
| }
|
| - _incomingFrameTimesNanos[0] = rtc::TimeNanos();
|
| + }
|
| + _incomingFrameTimesNanos[0] = rtc::TimeNanos();
|
| }
|
|
|
| -uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
|
| -{
|
| - int32_t num = 0;
|
| - int32_t nrOfFrames = 0;
|
| - for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
|
| - {
|
| - if (_incomingFrameTimesNanos[num] <= 0 ||
|
| - (now_ns - _incomingFrameTimesNanos[num]) /
|
| - rtc::kNumNanosecsPerMillisec >
|
| - kFrameRateHistoryWindowMs) // don't use data older than 2sec
|
| - {
|
| - break;
|
| - }
|
| - else
|
| - {
|
| - nrOfFrames++;
|
| - }
|
| +uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) {
|
| + int32_t num = 0;
|
| + int32_t nrOfFrames = 0;
|
| + for (num = 1; num < (kFrameRateCountHistorySize - 1); num++) {
|
| + if (_incomingFrameTimesNanos[num] <= 0 ||
|
| + (now_ns - _incomingFrameTimesNanos[num]) /
|
| + rtc::kNumNanosecsPerMillisec >
|
| + kFrameRateHistoryWindowMs) { // don't use data older than 2sec
|
| + break;
|
| + } else {
|
| + nrOfFrames++;
|
| }
|
| - if (num > 1)
|
| - {
|
| - int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
|
| - rtc::kNumNanosecsPerMillisec;
|
| - if (diff > 0)
|
| - {
|
| - return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
|
| - }
|
| + }
|
| + if (num > 1) {
|
| + int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
|
| + rtc::kNumNanosecsPerMillisec;
|
| + if (diff > 0) {
|
| + return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
|
| }
|
| + }
|
|
|
| - return nrOfFrames;
|
| + return nrOfFrames;
|
| }
|
| } // namespace videocapturemodule
|
| } // namespace webrtc
|
|
|