| Index: webrtc/media/base/videocapturer_unittest.cc
|
| diff --git a/webrtc/media/base/videocapturer_unittest.cc b/webrtc/media/base/videocapturer_unittest.cc
|
| index 25230b5118a80517ff861d5bb473019f58528332..73f99af8b707e75d3eae6d7f4652ed7cc084d017 100644
|
| --- a/webrtc/media/base/videocapturer_unittest.cc
|
| +++ b/webrtc/media/base/videocapturer_unittest.cc
|
| @@ -15,6 +15,7 @@
|
|
|
| #include "webrtc/base/gunit.h"
|
| #include "webrtc/base/logging.h"
|
| +#include "webrtc/base/random.h"
|
| #include "webrtc/base/thread.h"
|
| #include "webrtc/media/base/fakevideocapturer.h"
|
| #include "webrtc/media/base/fakevideorenderer.h"
|
| @@ -781,3 +782,90 @@ TEST_F(VideoCapturerTest, BlacklistAllFormats) {
|
| ASSERT_EQ(1u, capturer_->GetSupportedFormats()->size());
|
| EXPECT_EQ(vga_format.height, capturer_->GetSupportedFormats()->at(0).height);
|
| }
|
| +
|
| +TEST_F(VideoCapturerTest, AttenuateTimestampJitter) {
|
| + const int kWidth = 800;
|
| + const int kHeight = 400;
|
| +
|
| + const double rel_freq_error = 0.001;
|
| + const int64_t epoch = 10000;
|
| + const int64_t jitter_us = 5000;
|
| + const int64_t interval_us = 33333; // 30 FPS
|
| + const int64_t interval_error_us = interval_us * rel_freq_error;
|
| + const int nframes = 200;
|
| +
|
| + const int64_t system_start_us = rtc::TimeMicros();
|
| + webrtc::Random random(17);
|
| +
|
| + for (int i = 0; i < nframes; i++) {
|
| + // Camera time subject to drift.
|
| + int64_t camera_time_us = epoch + i * (interval_us + interval_error_us);
|
| + int64_t system_time_us = system_start_us + i * interval_us;
|
| + // And system time readings are subject to jitter.
|
| + int64_t system_measured_us = system_time_us + random.Rand(jitter_us);
|
| +
|
| + int out_width;
|
| + int out_height;
|
| + int crop_width;
|
| + int crop_height;
|
| + int crop_x;
|
| + int crop_y;
|
| + int64_t translated_time_us;
|
| +
|
| + EXPECT_TRUE(capturer_->AdaptFrame(kWidth, kHeight,
|
| + camera_time_us, system_measured_us,
|
| + &out_width, &out_height,
|
| + &crop_width, &crop_height,
|
| + &crop_x, &crop_y, &translated_time_us));
|
| +
|
| + // The relative frequency error contributes to the expected error
|
| + // by a factor which is the difference between the current time
|
| + // and the average of earlier sample times. This expression is
|
| + // accurate as long as we do plain averaging (i.e, for the first
|
| + // 100 frames), after which it converges exponentially to the
|
| + // limit of interval_us * (window_size - 1).
|
| + int64_t expected_error_us = jitter_us / 2 +
|
| + rel_freq_error * i * interval_us / 2;
|
| +
|
| + if (i == 0) {
|
| + EXPECT_EQ(translated_time_us, system_measured_us);
|
| + } else {
|
| + EXPECT_NEAR(translated_time_us, system_time_us + expected_error_us,
|
| + 2.0 * jitter_us / sqrt(std::max(i, 100)));
|
| + }
|
| + }
|
| +}
|
| +
|
| +TEST_F(VideoCapturerTest, TimestampTranslationBypass) {
|
| + const int kWidth = 800;
|
| + const int kHeight = 400;
|
| +
|
| + const int64_t jitter_us = 10000;
|
| + const int64_t interval_us = 33333; // 30 FPS
|
| + const int nframes = 50;
|
| +
|
| + const int64_t system_start_us = rtc::TimeMicros();
|
| + webrtc::Random random(17);
|
| +
|
| + for (int i = 0; i < nframes; i++) {
|
| + // Camera time and system time are the same, but we read the
|
| + // system time a few ms later.
|
| + int64_t camera_time_us = system_start_us + i * interval_us;
|
| + int64_t system_measured_us = camera_time_us + random.Rand(jitter_us);
|
| +
|
| + int out_width;
|
| + int out_height;
|
| + int crop_width;
|
| + int crop_height;
|
| + int crop_x;
|
| + int crop_y;
|
| + int64_t translated_time_us;
|
| +
|
| + EXPECT_TRUE(capturer_->AdaptFrame(kWidth, kHeight,
|
| + camera_time_us, system_measured_us,
|
| + &out_width, &out_height,
|
| + &crop_width, &crop_height,
|
| + &crop_x, &crop_y, &translated_time_us));
|
| + EXPECT_EQ(camera_time_us, translated_time_us);
|
| + }
|
| +}
|
|
|