OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include <limits> | 11 #include <limits> |
12 #include <utility> | 12 #include <utility> |
13 | 13 |
14 #include "webrtc/api/video/i420_buffer.h" | 14 #include "webrtc/api/video/i420_buffer.h" |
| 15 #include "webrtc/base/fakeclock.h" |
15 #include "webrtc/base/logging.h" | 16 #include "webrtc/base/logging.h" |
16 #include "webrtc/media/base/videoadapter.h" | 17 #include "webrtc/media/base/videoadapter.h" |
17 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h" | 18 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h" |
18 #include "webrtc/system_wrappers/include/metrics_default.h" | 19 #include "webrtc/system_wrappers/include/metrics_default.h" |
19 #include "webrtc/system_wrappers/include/sleep.h" | 20 #include "webrtc/system_wrappers/include/sleep.h" |
20 #include "webrtc/test/encoder_settings.h" | 21 #include "webrtc/test/encoder_settings.h" |
21 #include "webrtc/test/fake_encoder.h" | 22 #include "webrtc/test/fake_encoder.h" |
22 #include "webrtc/test/frame_generator.h" | 23 #include "webrtc/test/frame_generator.h" |
23 #include "webrtc/test/gmock.h" | 24 #include "webrtc/test/gmock.h" |
24 #include "webrtc/test/gtest.h" | 25 #include "webrtc/test/gtest.h" |
25 #include "webrtc/video/send_statistics_proxy.h" | 26 #include "webrtc/video/send_statistics_proxy.h" |
26 #include "webrtc/video/vie_encoder.h" | 27 #include "webrtc/video/vie_encoder.h" |
27 | 28 |
28 namespace { | 29 namespace { |
29 #if defined(WEBRTC_ANDROID) | 30 #if defined(WEBRTC_ANDROID) |
30 // TODO(kthelgason): Lower this limit when better testing | 31 // TODO(kthelgason): Lower this limit when better testing |
31 // on MediaCodec and fallback implementations are in place. | 32 // on MediaCodec and fallback implementations are in place. |
32 const int kMinPixelsPerFrame = 320 * 180; | 33 const int kMinPixelsPerFrame = 320 * 180; |
33 #else | 34 #else |
34 const int kMinPixelsPerFrame = 120 * 90; | 35 const int kMinPixelsPerFrame = 120 * 90; |
35 #endif | 36 #endif |
36 } | 37 const int kMinFramerateFps = 2; |
| 38 const int64_t kFrameTimeoutMs = 100; |
| 39 } // namespace |
37 | 40 |
38 namespace webrtc { | 41 namespace webrtc { |
39 | 42 |
40 using DegredationPreference = VideoSendStream::DegradationPreference; | 43 using DegredationPreference = VideoSendStream::DegradationPreference; |
41 using ScaleReason = AdaptationObserverInterface::AdaptReason; | 44 using ScaleReason = AdaptationObserverInterface::AdaptReason; |
42 using ::testing::_; | 45 using ::testing::_; |
43 using ::testing::Return; | 46 using ::testing::Return; |
44 | 47 |
45 namespace { | 48 namespace { |
46 const size_t kMaxPayloadLength = 1440; | 49 const size_t kMaxPayloadLength = 1440; |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
126 bool adaption_enabled() { | 129 bool adaption_enabled() { |
127 rtc::CritScope cs(&crit_); | 130 rtc::CritScope cs(&crit_); |
128 return adaptation_enabled_; | 131 return adaptation_enabled_; |
129 } | 132 } |
130 | 133 |
131 void IncomingCapturedFrame(const VideoFrame& video_frame) override { | 134 void IncomingCapturedFrame(const VideoFrame& video_frame) override { |
132 int cropped_width = 0; | 135 int cropped_width = 0; |
133 int cropped_height = 0; | 136 int cropped_height = 0; |
134 int out_width = 0; | 137 int out_width = 0; |
135 int out_height = 0; | 138 int out_height = 0; |
136 if (adaption_enabled() && | 139 if (adaption_enabled()) { |
137 adapter_.AdaptFrameResolution(video_frame.width(), video_frame.height(), | 140 if (adapter_.AdaptFrameResolution( |
138 video_frame.timestamp_us() * 1000, | 141 video_frame.width(), video_frame.height(), |
139 &cropped_width, &cropped_height, | 142 video_frame.timestamp_us() * 1000, &cropped_width, |
140 &out_width, &out_height)) { | 143 &cropped_height, &out_width, &out_height)) { |
141 VideoFrame adapted_frame( | 144 VideoFrame adapted_frame(new rtc::RefCountedObject<TestBuffer>( |
142 new rtc::RefCountedObject<TestBuffer>(nullptr, out_width, out_height), | 145 nullptr, out_width, out_height), |
143 99, 99, kVideoRotation_0); | 146 99, 99, kVideoRotation_0); |
144 adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); | 147 adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); |
145 test::FrameForwarder::IncomingCapturedFrame(adapted_frame); | 148 test::FrameForwarder::IncomingCapturedFrame(adapted_frame); |
| 149 } |
146 } else { | 150 } else { |
147 test::FrameForwarder::IncomingCapturedFrame(video_frame); | 151 test::FrameForwarder::IncomingCapturedFrame(video_frame); |
148 } | 152 } |
149 } | 153 } |
150 | 154 |
151 void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink, | 155 void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink, |
152 const rtc::VideoSinkWants& wants) override { | 156 const rtc::VideoSinkWants& wants) override { |
153 rtc::CritScope cs(&crit_); | 157 rtc::CritScope cs(&crit_); |
154 adapter_.OnResolutionRequest(wants.target_pixel_count, | 158 adapter_.OnResolutionFramerateRequest(wants.target_pixel_count, |
155 wants.max_pixel_count); | 159 wants.max_pixel_count, |
| 160 wants.max_framerate_fps_); |
156 test::FrameForwarder::AddOrUpdateSink(sink, wants); | 161 test::FrameForwarder::AddOrUpdateSink(sink, wants); |
157 } | 162 } |
158 | 163 |
159 cricket::VideoAdapter adapter_; | 164 cricket::VideoAdapter adapter_; |
160 bool adaptation_enabled_ GUARDED_BY(crit_); | 165 bool adaptation_enabled_ GUARDED_BY(crit_); |
161 }; | 166 }; |
162 } // namespace | 167 } // namespace |
163 | 168 |
164 class ViEEncoderTest : public ::testing::Test { | 169 class ViEEncoderTest : public ::testing::Test { |
165 public: | 170 public: |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
224 99, 99, kVideoRotation_0); | 229 99, 99, kVideoRotation_0); |
225 frame.set_ntp_time_ms(ntp_time_ms); | 230 frame.set_ntp_time_ms(ntp_time_ms); |
226 return frame; | 231 return frame; |
227 } | 232 } |
228 | 233 |
229 VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const { | 234 VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const { |
230 VideoFrame frame( | 235 VideoFrame frame( |
231 new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99, | 236 new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99, |
232 kVideoRotation_0); | 237 kVideoRotation_0); |
233 frame.set_ntp_time_ms(ntp_time_ms); | 238 frame.set_ntp_time_ms(ntp_time_ms); |
| 239 frame.set_timestamp_us(ntp_time_ms * 1000); |
234 return frame; | 240 return frame; |
235 } | 241 } |
236 | 242 |
237 class TestEncoder : public test::FakeEncoder { | 243 class TestEncoder : public test::FakeEncoder { |
238 public: | 244 public: |
239 TestEncoder() | 245 TestEncoder() |
240 : FakeEncoder(Clock::GetRealTimeClock()), | 246 : FakeEncoder(Clock::GetRealTimeClock()), |
241 continue_encode_event_(false, false) {} | 247 continue_encode_event_(false, false) {} |
242 | 248 |
243 VideoCodec codec_config() { | 249 VideoCodec codec_config() { |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
312 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); | 318 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); |
313 { | 319 { |
314 rtc::CritScope lock(&crit_); | 320 rtc::CritScope lock(&crit_); |
315 timestamp = last_timestamp_; | 321 timestamp = last_timestamp_; |
316 } | 322 } |
317 test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); | 323 test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); |
318 } | 324 } |
319 | 325 |
320 void WaitForEncodedFrame(uint32_t expected_width, | 326 void WaitForEncodedFrame(uint32_t expected_width, |
321 uint32_t expected_height) { | 327 uint32_t expected_height) { |
| 328 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); |
| 329 CheckLastFrameSizeMathces(expected_width, expected_height); |
| 330 } |
| 331 |
| 332 void CheckLastFrameSizeMathces(uint32_t expected_width, |
| 333 uint32_t expected_height) { |
322 uint32_t width = 0; | 334 uint32_t width = 0; |
323 uint32_t height = 0; | 335 uint32_t height = 0; |
324 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); | |
325 { | 336 { |
326 rtc::CritScope lock(&crit_); | 337 rtc::CritScope lock(&crit_); |
327 width = last_width_; | 338 width = last_width_; |
328 height = last_height_; | 339 height = last_height_; |
329 } | 340 } |
330 EXPECT_EQ(expected_height, height); | 341 EXPECT_EQ(expected_height, height); |
331 EXPECT_EQ(expected_width, width); | 342 EXPECT_EQ(expected_width, width); |
332 } | 343 } |
333 | 344 |
334 void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(20)); } | 345 void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(20)); } |
335 | 346 |
| 347 bool WaitForFrame(int64_t timeout_ms) { |
| 348 return encoded_frame_event_.Wait(timeout_ms); |
| 349 } |
| 350 |
336 void SetExpectNoFrames() { | 351 void SetExpectNoFrames() { |
337 rtc::CritScope lock(&crit_); | 352 rtc::CritScope lock(&crit_); |
338 expect_frames_ = false; | 353 expect_frames_ = false; |
339 } | 354 } |
340 | 355 |
341 int number_of_reconfigurations() { | 356 int number_of_reconfigurations() { |
342 rtc::CritScope lock(&crit_); | 357 rtc::CritScope lock(&crit_); |
343 return number_of_reconfigurations_; | 358 return number_of_reconfigurations_; |
344 } | 359 } |
345 | 360 |
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
622 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); | 637 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); |
623 | 638 |
624 EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); | 639 EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); |
625 EXPECT_FALSE(video_source_.sink_wants().max_pixel_count); | 640 EXPECT_FALSE(video_source_.sink_wants().max_pixel_count); |
626 | 641 |
627 int frame_width = 1280; | 642 int frame_width = 1280; |
628 int frame_height = 720; | 643 int frame_height = 720; |
629 | 644 |
630 // Trigger CPU overuse kMaxCpuDowngrades times. Every time, ViEEncoder should | 645 // Trigger CPU overuse kMaxCpuDowngrades times. Every time, ViEEncoder should |
631 // request lower resolution. | 646 // request lower resolution. |
632 for (int i = 1; i <= ViEEncoder::kMaxCpuDowngrades; ++i) { | 647 for (int i = 1; i <= ViEEncoder::kMaxCpuResolutionDowngrades; ++i) { |
633 video_source_.IncomingCapturedFrame( | 648 video_source_.IncomingCapturedFrame( |
634 CreateFrame(i, frame_width, frame_height)); | 649 CreateFrame(i, frame_width, frame_height)); |
635 sink_.WaitForEncodedFrame(i); | 650 sink_.WaitForEncodedFrame(i); |
636 | 651 |
637 vie_encoder_->TriggerCpuOveruse(); | 652 vie_encoder_->TriggerCpuOveruse(); |
638 | 653 |
639 EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); | 654 EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); |
640 EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or( | 655 EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or( |
641 std::numeric_limits<int>::max()), | 656 std::numeric_limits<int>::max()), |
642 frame_width * frame_height); | 657 frame_width * frame_height); |
643 | 658 |
644 frame_width /= 2; | 659 frame_width /= 2; |
645 frame_height /= 2; | 660 frame_height /= 2; |
646 } | 661 } |
647 | 662 |
648 // Trigger CPU overuse one more time. This should not trigger a request for | 663 // Trigger CPU overuse one more time. This should not trigger a request for |
649 // lower resolution. | 664 // lower resolution. |
650 rtc::VideoSinkWants current_wants = video_source_.sink_wants(); | 665 rtc::VideoSinkWants current_wants = video_source_.sink_wants(); |
651 video_source_.IncomingCapturedFrame(CreateFrame( | 666 video_source_.IncomingCapturedFrame(CreateFrame( |
652 ViEEncoder::kMaxCpuDowngrades + 1, frame_width, frame_height)); | 667 ViEEncoder::kMaxCpuResolutionDowngrades + 1, frame_width, frame_height)); |
653 sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuDowngrades + 1); | 668 sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuResolutionDowngrades + 1); |
654 vie_encoder_->TriggerCpuOveruse(); | 669 vie_encoder_->TriggerCpuOveruse(); |
655 EXPECT_EQ(video_source_.sink_wants().target_pixel_count, | 670 EXPECT_EQ(video_source_.sink_wants().target_pixel_count, |
656 current_wants.target_pixel_count); | 671 current_wants.target_pixel_count); |
657 EXPECT_EQ(video_source_.sink_wants().max_pixel_count, | 672 EXPECT_EQ(video_source_.sink_wants().max_pixel_count, |
658 current_wants.max_pixel_count); | 673 current_wants.max_pixel_count); |
659 | 674 |
660 // Trigger CPU normal use. | 675 // Trigger CPU normal use. |
661 vie_encoder_->TriggerCpuNormalUsage(); | 676 vie_encoder_->TriggerCpuNormalUsage(); |
662 EXPECT_EQ(frame_width * frame_height * 5 / 3, | 677 EXPECT_EQ(frame_width * frame_height * 5 / 3, |
663 video_source_.sink_wants().target_pixel_count.value_or(0)); | 678 video_source_.sink_wants().target_pixel_count.value_or(0)); |
(...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1227 video_source_.IncomingCapturedFrame( | 1242 video_source_.IncomingCapturedFrame( |
1228 CreateFrame(1, kFrameWidth, kFrameHeight)); | 1243 CreateFrame(1, kFrameWidth, kFrameHeight)); |
1229 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); | 1244 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); |
1230 | 1245 |
1231 // Trigger CPU overuse, downscale by 3/4. | 1246 // Trigger CPU overuse, downscale by 3/4. |
1232 vie_encoder_->TriggerCpuOveruse(); | 1247 vie_encoder_->TriggerCpuOveruse(); |
1233 video_source_.IncomingCapturedFrame( | 1248 video_source_.IncomingCapturedFrame( |
1234 CreateFrame(2, kFrameWidth, kFrameHeight)); | 1249 CreateFrame(2, kFrameWidth, kFrameHeight)); |
1235 sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); | 1250 sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); |
1236 | 1251 |
1237 // Trigger CPU normal use, return to original resoluton; | 1252 // Trigger CPU normal use, return to original resolution; |
1238 vie_encoder_->TriggerCpuNormalUsage(); | 1253 vie_encoder_->TriggerCpuNormalUsage(); |
1239 video_source_.IncomingCapturedFrame( | 1254 video_source_.IncomingCapturedFrame( |
1240 CreateFrame(3, kFrameWidth, kFrameHeight)); | 1255 CreateFrame(3, kFrameWidth, kFrameHeight)); |
1241 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); | 1256 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); |
1242 | 1257 |
1243 vie_encoder_->Stop(); | 1258 vie_encoder_->Stop(); |
1244 } | 1259 } |
| 1260 |
| 1261 TEST_F(ViEEncoderTest, AdaptsFrameOnOveruseWithMaintainResolution) { |
| 1262 const int kDefaultFramerateFps = 30; |
| 1263 const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps; |
| 1264 const int kFrameWidth = 1280; |
| 1265 const int kFrameHeight = 720; |
| 1266 rtc::ScopedFakeClock fake_clock; |
| 1267 |
| 1268 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); |
| 1269 vie_encoder_->SetSource( |
| 1270 &video_source_, |
| 1271 VideoSendStream::DegradationPreference::kMaintainResolution); |
| 1272 video_source_.set_adaptation_enabled(true); |
| 1273 |
| 1274 fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); |
| 1275 int64_t timestamp_ms = kFrameIntervalMs; |
| 1276 |
| 1277 video_source_.IncomingCapturedFrame( |
| 1278 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1279 sink_.WaitForEncodedFrame(timestamp_ms); |
| 1280 |
| 1281 // Try to trigger overuse. No fps estimate available => no effect. |
| 1282 vie_encoder_->TriggerCpuOveruse(); |
| 1283 |
| 1284 // Insert frames for one second to get a stable estimate. |
| 1285 for (int i = 0; i < kDefaultFramerateFps; ++i) { |
| 1286 timestamp_ms += kFrameIntervalMs; |
| 1287 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1288 video_source_.IncomingCapturedFrame( |
| 1289 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1290 sink_.WaitForEncodedFrame(timestamp_ms); |
| 1291 } |
| 1292 |
| 1293 // Trigger CPU overuse, reduce framerate by 2/3. |
| 1294 vie_encoder_->TriggerCpuOveruse(); |
| 1295 int num_frames_dropped = 0; |
| 1296 for (int i = 0; i < kDefaultFramerateFps; ++i) { |
| 1297 timestamp_ms += kFrameIntervalMs; |
| 1298 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1299 video_source_.IncomingCapturedFrame( |
| 1300 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1301 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { |
| 1302 ++num_frames_dropped; |
| 1303 } else { |
| 1304 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); |
| 1305 } |
| 1306 } |
| 1307 |
| 1308 // TODO(sprang): Find where there's rounding errors or stuff causing the |
| 1309 // margin here to be a little larger than we'd like (input fps estimate is |
| 1310 // off) and the frame dropping is a little too aggressive. |
| 1311 const int kErrorMargin = 5; |
| 1312 EXPECT_NEAR(num_frames_dropped, |
| 1313 kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), |
| 1314 kErrorMargin); |
| 1315 |
| 1316 // Trigger CPU overuse, reduce framerate by 2/3 again. |
| 1317 vie_encoder_->TriggerCpuOveruse(); |
| 1318 num_frames_dropped = 0; |
| 1319 for (int i = 0; i < kDefaultFramerateFps; ++i) { |
| 1320 timestamp_ms += kFrameIntervalMs; |
| 1321 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1322 video_source_.IncomingCapturedFrame( |
| 1323 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1324 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { |
| 1325 ++num_frames_dropped; |
| 1326 } else { |
| 1327 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); |
| 1328 } |
| 1329 } |
| 1330 EXPECT_NEAR(num_frames_dropped, |
| 1331 kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9), |
| 1332 kErrorMargin); |
| 1333 |
| 1334 // Go back up one step. |
| 1335 vie_encoder_->TriggerCpuNormalUsage(); |
| 1336 num_frames_dropped = 0; |
| 1337 for (int i = 0; i < kDefaultFramerateFps; ++i) { |
| 1338 timestamp_ms += kFrameIntervalMs; |
| 1339 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1340 video_source_.IncomingCapturedFrame( |
| 1341 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1342 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { |
| 1343 ++num_frames_dropped; |
| 1344 } else { |
| 1345 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); |
| 1346 } |
| 1347 } |
| 1348 EXPECT_NEAR(num_frames_dropped, |
| 1349 kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), |
| 1350 kErrorMargin); |
| 1351 |
| 1352 // Go back up to original mode. |
| 1353 vie_encoder_->TriggerCpuNormalUsage(); |
| 1354 num_frames_dropped = 0; |
| 1355 for (int i = 0; i < kDefaultFramerateFps; ++i) { |
| 1356 timestamp_ms += kFrameIntervalMs; |
| 1357 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1358 video_source_.IncomingCapturedFrame( |
| 1359 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1360 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { |
| 1361 ++num_frames_dropped; |
| 1362 } else { |
| 1363 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); |
| 1364 } |
| 1365 } |
| 1366 EXPECT_NEAR(num_frames_dropped, 0, kErrorMargin); |
| 1367 |
| 1368 vie_encoder_->Stop(); |
| 1369 } |
| 1370 |
| 1371 TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) { |
| 1372 const int kFramerateFps = 5; |
| 1373 const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps; |
| 1374 const int kMinFpsFrameInterval = rtc::kNumMillisecsPerSec / kMinFramerateFps; |
| 1375 const int kFrameWidth = 1280; |
| 1376 const int kFrameHeight = 720; |
| 1377 |
| 1378 rtc::ScopedFakeClock fake_clock; |
| 1379 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); |
| 1380 vie_encoder_->SetSource( |
| 1381 &video_source_, |
| 1382 VideoSendStream::DegradationPreference::kMaintainResolution); |
| 1383 video_source_.set_adaptation_enabled(true); |
| 1384 |
| 1385 fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); |
| 1386 int64_t timestamp_ms = kFrameIntervalMs; |
| 1387 |
| 1388 // Trigger overuse as much as we can. |
| 1389 for (int i = 0; i < ViEEncoder::kMaxCpuResolutionDowngrades; ++i) { |
| 1390 // Insert frames to get a new fps estimate... |
| 1391 for (int j = 0; j < kFramerateFps; ++j) { |
| 1392 video_source_.IncomingCapturedFrame( |
| 1393 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1394 timestamp_ms += kFrameIntervalMs; |
| 1395 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); |
| 1396 } |
| 1397 // ...and then try to adapt again. |
| 1398 vie_encoder_->TriggerCpuOveruse(); |
| 1399 } |
| 1400 |
| 1401 // Drain any frame in the pipeline. |
| 1402 sink_.WaitForFrame(kDefaultTimeoutMs); |
| 1403 |
| 1404 // Insert frames at min fps, all should go through. |
| 1405 for (int i = 0; i < 10; ++i) { |
| 1406 timestamp_ms += kMinFpsFrameInterval; |
| 1407 fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000); |
| 1408 video_source_.IncomingCapturedFrame( |
| 1409 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); |
| 1410 sink_.WaitForEncodedFrame(timestamp_ms); |
| 1411 } |
| 1412 |
| 1413 vie_encoder_->Stop(); |
| 1414 } |
1245 } // namespace webrtc | 1415 } // namespace webrtc |
OLD | NEW |