Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(94)

Unified Diff: webrtc/video/vie_encoder_unittest.cc

Issue 2716643002: Add framerate to VideoSinkWants and ability to signal on overuse (Closed)
Patch Set: comments Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/video/vie_encoder_unittest.cc
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
index 320f664840456f8ea1a7b2415f1d19f035a68f13..d8dc841f009d121ea3a5e0118c8b92ea218df449 100644
--- a/webrtc/video/vie_encoder_unittest.cc
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -12,6 +12,7 @@
#include <utility>
#include "webrtc/api/video/i420_buffer.h"
+#include "webrtc/base/fakeclock.h"
#include "webrtc/base/logging.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h"
@@ -33,7 +34,9 @@ const int kMinPixelsPerFrame = 320 * 180;
#else
const int kMinPixelsPerFrame = 120 * 90;
#endif
-}
+const int kMinFramerateFps = 2;
+const int64_t kFrameTimeoutMs = 100;
+} // namespace
namespace webrtc {
@@ -133,16 +136,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
int cropped_height = 0;
int out_width = 0;
int out_height = 0;
- if (adaption_enabled() &&
- adapter_.AdaptFrameResolution(video_frame.width(), video_frame.height(),
- video_frame.timestamp_us() * 1000,
- &cropped_width, &cropped_height,
- &out_width, &out_height)) {
- VideoFrame adapted_frame(
- new rtc::RefCountedObject<TestBuffer>(nullptr, out_width, out_height),
- 99, 99, kVideoRotation_0);
- adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
- test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
+ if (adaption_enabled()) {
+ if (adapter_.AdaptFrameResolution(
+ video_frame.width(), video_frame.height(),
+ video_frame.timestamp_us() * 1000, &cropped_width,
+ &cropped_height, &out_width, &out_height)) {
+ VideoFrame adapted_frame(new rtc::RefCountedObject<TestBuffer>(
+ nullptr, out_width, out_height),
+ 99, 99, kVideoRotation_0);
+ adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
+ test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
+ }
} else {
test::FrameForwarder::IncomingCapturedFrame(video_frame);
}
@@ -151,8 +155,9 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
rtc::CritScope cs(&crit_);
- adapter_.OnResolutionRequest(wants.target_pixel_count,
- wants.max_pixel_count);
+ adapter_.OnResolutionFramerateRequest(wants.target_pixel_count,
+ wants.max_pixel_count,
+ wants.max_framerate_fps_);
test::FrameForwarder::AddOrUpdateSink(sink, wants);
}
@@ -231,6 +236,7 @@ class ViEEncoderTest : public ::testing::Test {
new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99,
kVideoRotation_0);
frame.set_ntp_time_ms(ntp_time_ms);
+ frame.set_timestamp_us(ntp_time_ms * 1000);
return frame;
}
@@ -319,9 +325,14 @@ class ViEEncoderTest : public ::testing::Test {
void WaitForEncodedFrame(uint32_t expected_width,
uint32_t expected_height) {
+ EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
+ CheckLastFrameSizeMathces(expected_width, expected_height);
+ }
+
+ void CheckLastFrameSizeMathces(uint32_t expected_width,
+ uint32_t expected_height) {
uint32_t width = 0;
uint32_t height = 0;
- EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
{
rtc::CritScope lock(&crit_);
width = last_width_;
@@ -333,6 +344,10 @@ class ViEEncoderTest : public ::testing::Test {
void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(20)); }
+ bool WaitForFrame(int64_t timeout_ms) {
+ return encoded_frame_event_.Wait(timeout_ms);
+ }
+
void SetExpectNoFrames() {
rtc::CritScope lock(&crit_);
expect_frames_ = false;
@@ -1225,7 +1240,7 @@ TEST_F(ViEEncoderTest, AdaptsResolutionOnOveruse) {
CreateFrame(2, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
- // Trigger CPU normal use, return to original resoluton;
+ // Trigger CPU normal use, return to original resolution;
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(
CreateFrame(3, kFrameWidth, kFrameHeight));
@@ -1233,4 +1248,159 @@ TEST_F(ViEEncoderTest, AdaptsResolutionOnOveruse) {
vie_encoder_->Stop();
}
+
+TEST_F(ViEEncoderTest, AdaptsFrameOnOveruseWithMaintainResolution) {
+ const int kDefaultFramerateFps = 30;
+ const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps;
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ rtc::ScopedFakeClock fake_clock;
+
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ vie_encoder_->SetSource(
+ &video_source_,
+ VideoSendStream::DegradationPreference::kMaintainResolution);
+ video_source_.set_adaptation_enabled(true);
+
+ fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
+ int64_t timestamp_ms = kFrameIntervalMs;
+
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+
+ // Try to trigger overuse. No fps estimate available => no effect.
+ vie_encoder_->TriggerCpuOveruse();
+
+ // Insert frames for one second to get a stable estimate.
+ for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ }
+
+ // Trigger CPU overuse, reduce framerate by 2/3.
+ vie_encoder_->TriggerCpuOveruse();
+ int num_frames_dropped = 0;
+ for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ ++num_frames_dropped;
+ } else {
+ sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
+ }
+ }
+
+ // TODO(sprang): Find where there's rounding errors or stuff causing the
+ // margin here to be a little larger than we'd like (input fps estimate is
+ // off) and the frame dropping is a little too aggressive.
+ const int kErrorMargin = 5;
+ EXPECT_NEAR(num_frames_dropped,
+ kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+ kErrorMargin);
+
+ // Trigger CPU overuse, reduce framerate by 2/3 again.
+ vie_encoder_->TriggerCpuOveruse();
+ num_frames_dropped = 0;
+ for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ ++num_frames_dropped;
+ } else {
+ sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
+ }
+ }
+ EXPECT_NEAR(num_frames_dropped,
+ kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9),
+ kErrorMargin);
+
+ // Go back up one step.
+ vie_encoder_->TriggerCpuNormalUsage();
+ num_frames_dropped = 0;
+ for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ ++num_frames_dropped;
+ } else {
+ sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
+ }
+ }
+ EXPECT_NEAR(num_frames_dropped,
+ kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
+ kErrorMargin);
+
+ // Go back up to original mode.
+ vie_encoder_->TriggerCpuNormalUsage();
+ num_frames_dropped = 0;
+ for (int i = 0; i < kDefaultFramerateFps; ++i) {
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
+ ++num_frames_dropped;
+ } else {
+ sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
+ }
+ }
+ EXPECT_NEAR(num_frames_dropped, 0, kErrorMargin);
+
+ vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) {
+ const int kFramerateFps = 5;
+ const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps;
+ const int kMinFpsFrameInterval = rtc::kNumMillisecsPerSec / kMinFramerateFps;
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+
+ rtc::ScopedFakeClock fake_clock;
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ vie_encoder_->SetSource(
+ &video_source_,
+ VideoSendStream::DegradationPreference::kMaintainResolution);
+ video_source_.set_adaptation_enabled(true);
+
+ fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
+ int64_t timestamp_ms = kFrameIntervalMs;
+
+ // Trigger overuse as much as we can.
+ for (int i = 0; i < ViEEncoder::kMaxCpuDowngrades; ++i) {
+ // Insert frames to get a new fps estimate...
+ for (int j = 0; j < kFramerateFps; ++j) {
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ timestamp_ms += kFrameIntervalMs;
+ fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
+ }
+ // ...and then try to adapt again.
+ vie_encoder_->TriggerCpuOveruse();
+ }
+
+ // Drain any frame in the pipeline.
+ sink_.WaitForFrame(kDefaultTimeoutMs);
+
+ // Insert frames at min fps, all should go through.
+ for (int i = 0; i < 10; ++i) {
+ timestamp_ms += kMinFpsFrameInterval;
+ fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000);
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ }
+
+ vie_encoder_->Stop();
+}
} // namespace webrtc

Powered by Google App Engine
This is Rietveld 408576698