Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include <limits> | 11 #include <limits> |
| 12 #include <utility> | 12 #include <utility> |
| 13 | 13 |
| 14 #include "webrtc/api/video/i420_buffer.h" | 14 #include "webrtc/api/video/i420_buffer.h" |
| 15 #include "webrtc/base/fakeclock.h" | |
| 15 #include "webrtc/base/logging.h" | 16 #include "webrtc/base/logging.h" |
| 16 #include "webrtc/media/base/videoadapter.h" | 17 #include "webrtc/media/base/videoadapter.h" |
| 17 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h" | 18 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h" |
| 18 #include "webrtc/system_wrappers/include/metrics_default.h" | 19 #include "webrtc/system_wrappers/include/metrics_default.h" |
| 19 #include "webrtc/system_wrappers/include/sleep.h" | 20 #include "webrtc/system_wrappers/include/sleep.h" |
| 20 #include "webrtc/test/encoder_settings.h" | 21 #include "webrtc/test/encoder_settings.h" |
| 21 #include "webrtc/test/fake_encoder.h" | 22 #include "webrtc/test/fake_encoder.h" |
| 22 #include "webrtc/test/frame_generator.h" | 23 #include "webrtc/test/frame_generator.h" |
| 23 #include "webrtc/test/gmock.h" | 24 #include "webrtc/test/gmock.h" |
| 24 #include "webrtc/test/gtest.h" | 25 #include "webrtc/test/gtest.h" |
| 25 #include "webrtc/video/send_statistics_proxy.h" | 26 #include "webrtc/video/send_statistics_proxy.h" |
| 26 #include "webrtc/video/vie_encoder.h" | 27 #include "webrtc/video/vie_encoder.h" |
| 27 | 28 |
| 28 namespace { | 29 namespace { |
| 29 #if defined(WEBRTC_ANDROID) | 30 #if defined(WEBRTC_ANDROID) |
| 30 // TODO(kthelgason): Lower this limit when better testing | 31 // TODO(kthelgason): Lower this limit when better testing |
| 31 // on MediaCodec and fallback implementations are in place. | 32 // on MediaCodec and fallback implementations are in place. |
| 32 const int kMinPixelsPerFrame = 320 * 180; | 33 const int kMinPixelsPerFrame = 320 * 180; |
| 33 #else | 34 #else |
| 34 const int kMinPixelsPerFrame = 120 * 90; | 35 const int kMinPixelsPerFrame = 120 * 90; |
| 35 #endif | 36 #endif |
| 36 } | 37 const int kMinFramerateFps = 2; |
| 38 const int64_t kFrameTimeoutMs = 100; | |
| 39 } // namespace | |
| 37 | 40 |
| 38 namespace webrtc { | 41 namespace webrtc { |
| 39 | 42 |
| 40 using DegredationPreference = VideoSendStream::DegradationPreference; | 43 using DegredationPreference = VideoSendStream::DegradationPreference; |
| 41 using ScaleReason = AdaptationObserverInterface::AdaptReason; | 44 using ScaleReason = AdaptationObserverInterface::AdaptReason; |
| 42 using ::testing::_; | 45 using ::testing::_; |
| 43 using ::testing::Return; | 46 using ::testing::Return; |
| 44 | 47 |
| 45 namespace { | 48 namespace { |
| 46 const size_t kMaxPayloadLength = 1440; | 49 const size_t kMaxPayloadLength = 1440; |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 126 bool adaption_enabled() { | 129 bool adaption_enabled() { |
| 127 rtc::CritScope cs(&crit_); | 130 rtc::CritScope cs(&crit_); |
| 128 return adaptation_enabled_; | 131 return adaptation_enabled_; |
| 129 } | 132 } |
| 130 | 133 |
| 131 void IncomingCapturedFrame(const VideoFrame& video_frame) override { | 134 void IncomingCapturedFrame(const VideoFrame& video_frame) override { |
| 132 int cropped_width = 0; | 135 int cropped_width = 0; |
| 133 int cropped_height = 0; | 136 int cropped_height = 0; |
| 134 int out_width = 0; | 137 int out_width = 0; |
| 135 int out_height = 0; | 138 int out_height = 0; |
| 136 if (adaption_enabled() && | 139 if (adaption_enabled()) { |
| 137 adapter_.AdaptFrameResolution(video_frame.width(), video_frame.height(), | 140 if (adapter_.AdaptFrameResolution( |
| 138 video_frame.timestamp_us() * 1000, | 141 video_frame.width(), video_frame.height(), |
| 139 &cropped_width, &cropped_height, | 142 video_frame.timestamp_us() * 1000, &cropped_width, |
| 140 &out_width, &out_height)) { | 143 &cropped_height, &out_width, &out_height)) { |
| 141 VideoFrame adapted_frame( | 144 VideoFrame adapted_frame(new rtc::RefCountedObject<TestBuffer>( |
| 142 new rtc::RefCountedObject<TestBuffer>(nullptr, out_width, out_height), | 145 nullptr, out_width, out_height), |
| 143 99, 99, kVideoRotation_0); | 146 99, 99, kVideoRotation_0); |
| 144 adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); | 147 adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); |
| 145 test::FrameForwarder::IncomingCapturedFrame(adapted_frame); | 148 test::FrameForwarder::IncomingCapturedFrame(adapted_frame); |
| 149 } | |
| 146 } else { | 150 } else { |
| 147 test::FrameForwarder::IncomingCapturedFrame(video_frame); | 151 test::FrameForwarder::IncomingCapturedFrame(video_frame); |
| 148 } | 152 } |
| 149 } | 153 } |
| 150 | 154 |
| 151 void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink, | 155 void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink, |
| 152 const rtc::VideoSinkWants& wants) override { | 156 const rtc::VideoSinkWants& wants) override { |
| 153 rtc::CritScope cs(&crit_); | 157 rtc::CritScope cs(&crit_); |
| 154 adapter_.OnResolutionRequest(wants.target_pixel_count, | 158 adapter_.OnResolutionFramerateRequest(wants.target_pixel_count, |
| 155 wants.max_pixel_count); | 159 wants.max_pixel_count, |
| 160 wants.max_framerate_fps_); | |
| 156 test::FrameForwarder::AddOrUpdateSink(sink, wants); | 161 test::FrameForwarder::AddOrUpdateSink(sink, wants); |
| 157 } | 162 } |
| 158 | 163 |
| 159 cricket::VideoAdapter adapter_; | 164 cricket::VideoAdapter adapter_; |
| 160 bool adaptation_enabled_ GUARDED_BY(crit_); | 165 bool adaptation_enabled_ GUARDED_BY(crit_); |
| 161 }; | 166 }; |
| 162 } // namespace | 167 } // namespace |
| 163 | 168 |
| 164 class ViEEncoderTest : public ::testing::Test { | 169 class ViEEncoderTest : public ::testing::Test { |
| 165 public: | 170 public: |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 224 99, 99, kVideoRotation_0); | 229 99, 99, kVideoRotation_0); |
| 225 frame.set_ntp_time_ms(ntp_time_ms); | 230 frame.set_ntp_time_ms(ntp_time_ms); |
| 226 return frame; | 231 return frame; |
| 227 } | 232 } |
| 228 | 233 |
| 229 VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const { | 234 VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const { |
| 230 VideoFrame frame( | 235 VideoFrame frame( |
| 231 new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99, | 236 new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99, |
| 232 kVideoRotation_0); | 237 kVideoRotation_0); |
| 233 frame.set_ntp_time_ms(ntp_time_ms); | 238 frame.set_ntp_time_ms(ntp_time_ms); |
| 239 frame.set_timestamp_us(ntp_time_ms * 1000); | |
| 234 return frame; | 240 return frame; |
| 235 } | 241 } |
| 236 | 242 |
| 237 class TestEncoder : public test::FakeEncoder { | 243 class TestEncoder : public test::FakeEncoder { |
| 238 public: | 244 public: |
| 239 TestEncoder() | 245 TestEncoder() |
| 240 : FakeEncoder(Clock::GetRealTimeClock()), | 246 : FakeEncoder(Clock::GetRealTimeClock()), |
| 241 continue_encode_event_(false, false) {} | 247 continue_encode_event_(false, false) {} |
| 242 | 248 |
| 243 VideoCodec codec_config() { | 249 VideoCodec codec_config() { |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 312 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); | 318 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); |
| 313 { | 319 { |
| 314 rtc::CritScope lock(&crit_); | 320 rtc::CritScope lock(&crit_); |
| 315 timestamp = last_timestamp_; | 321 timestamp = last_timestamp_; |
| 316 } | 322 } |
| 317 test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); | 323 test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); |
| 318 } | 324 } |
| 319 | 325 |
| 320 void WaitForEncodedFrame(uint32_t expected_width, | 326 void WaitForEncodedFrame(uint32_t expected_width, |
| 321 uint32_t expected_height) { | 327 uint32_t expected_height) { |
| 328 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); | |
| 329 CheckLastFrameSizeMathces(expected_width, expected_height); | |
| 330 } | |
| 331 | |
| 332 void CheckLastFrameSizeMathces(uint32_t expected_width, | |
| 333 uint32_t expected_height) { | |
| 322 uint32_t width = 0; | 334 uint32_t width = 0; |
| 323 uint32_t height = 0; | 335 uint32_t height = 0; |
| 324 EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); | |
| 325 { | 336 { |
| 326 rtc::CritScope lock(&crit_); | 337 rtc::CritScope lock(&crit_); |
| 327 width = last_width_; | 338 width = last_width_; |
| 328 height = last_height_; | 339 height = last_height_; |
| 329 } | 340 } |
| 330 EXPECT_EQ(expected_height, height); | 341 EXPECT_EQ(expected_height, height); |
| 331 EXPECT_EQ(expected_width, width); | 342 EXPECT_EQ(expected_width, width); |
| 332 } | 343 } |
| 333 | 344 |
| 334 void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(20)); } | 345 void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(20)); } |
| 335 | 346 |
| 347 bool WaitForFrame(int64_t timeout_ms) { | |
| 348 return encoded_frame_event_.Wait(timeout_ms); | |
| 349 } | |
| 350 | |
| 336 void SetExpectNoFrames() { | 351 void SetExpectNoFrames() { |
| 337 rtc::CritScope lock(&crit_); | 352 rtc::CritScope lock(&crit_); |
| 338 expect_frames_ = false; | 353 expect_frames_ = false; |
| 339 } | 354 } |
| 340 | 355 |
| 341 int number_of_reconfigurations() { | 356 int number_of_reconfigurations() { |
| 342 rtc::CritScope lock(&crit_); | 357 rtc::CritScope lock(&crit_); |
| 343 return number_of_reconfigurations_; | 358 return number_of_reconfigurations_; |
| 344 } | 359 } |
| 345 | 360 |
| (...skipping 872 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1218 video_source_.IncomingCapturedFrame( | 1233 video_source_.IncomingCapturedFrame( |
| 1219 CreateFrame(1, kFrameWidth, kFrameHeight)); | 1234 CreateFrame(1, kFrameWidth, kFrameHeight)); |
| 1220 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); | 1235 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); |
| 1221 | 1236 |
| 1222 // Trigger CPU overuse, downscale by 3/4. | 1237 // Trigger CPU overuse, downscale by 3/4. |
| 1223 vie_encoder_->TriggerCpuOveruse(); | 1238 vie_encoder_->TriggerCpuOveruse(); |
| 1224 video_source_.IncomingCapturedFrame( | 1239 video_source_.IncomingCapturedFrame( |
| 1225 CreateFrame(2, kFrameWidth, kFrameHeight)); | 1240 CreateFrame(2, kFrameWidth, kFrameHeight)); |
| 1226 sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); | 1241 sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); |
| 1227 | 1242 |
| 1228 // Trigger CPU normal use, return to original resoluton; | 1243 // Trigger CPU normal use, return to original resolution; |
| 1229 vie_encoder_->TriggerCpuNormalUsage(); | 1244 vie_encoder_->TriggerCpuNormalUsage(); |
| 1230 video_source_.IncomingCapturedFrame( | 1245 video_source_.IncomingCapturedFrame( |
| 1231 CreateFrame(3, kFrameWidth, kFrameHeight)); | 1246 CreateFrame(3, kFrameWidth, kFrameHeight)); |
| 1232 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); | 1247 sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); |
| 1233 | 1248 |
| 1234 vie_encoder_->Stop(); | 1249 vie_encoder_->Stop(); |
| 1235 } | 1250 } |
| 1251 | |
| 1252 TEST_F(ViEEncoderTest, AdaptsFrameOnOveruseWithMaintainResolution) { | |
| 1253 const int kDefaultFramerateFps = 30; | |
| 1254 const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps; | |
| 1255 const int kFrameWidth = 1280; | |
| 1256 const int kFrameHeight = 720; | |
| 1257 rtc::ScopedFakeClock fake_clock; | |
| 1258 | |
| 1259 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); | |
| 1260 vie_encoder_->SetSource( | |
| 1261 &video_source_, | |
| 1262 VideoSendStream::DegradationPreference::kMaintainResolution); | |
| 1263 video_source_.set_adaptation_enabled(true); | |
| 1264 | |
| 1265 fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); | |
| 1266 int64_t timestamp_ms = kFrameIntervalMs; | |
| 1267 | |
| 1268 video_source_.IncomingCapturedFrame( | |
| 1269 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1270 sink_.WaitForEncodedFrame(timestamp_ms); | |
| 1271 | |
| 1272 // Try to trigger overuse. No fps estimate available => no effect. | |
| 1273 vie_encoder_->TriggerCpuOveruse(); | |
| 1274 | |
| 1275 // Insert frames for one second to get a stable estimate. | |
| 1276 for (int i = 0; i < kDefaultFramerateFps; ++i) { | |
| 1277 timestamp_ms += kFrameIntervalMs; | |
| 1278 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1279 video_source_.IncomingCapturedFrame( | |
| 1280 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1281 sink_.WaitForEncodedFrame(timestamp_ms); | |
| 1282 } | |
| 1283 | |
| 1284 // Trigger CPU overuse, reduce framerate by 2/3. | |
| 1285 vie_encoder_->TriggerCpuOveruse(); | |
| 1286 int num_frames_dropped = 0; | |
| 1287 for (int i = 0; i < kDefaultFramerateFps; ++i) { | |
| 1288 timestamp_ms += kFrameIntervalMs; | |
| 1289 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1290 video_source_.IncomingCapturedFrame( | |
| 1291 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1292 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { | |
| 1293 ++num_frames_dropped; | |
| 1294 } else { | |
| 1295 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); | |
| 1296 } | |
| 1297 } | |
| 1298 | |
| 1299 // TODO(sprang): Find where there's roundign errors or stuff causing the | |
|
magjed_webrtc
2017/02/28 14:21:55
spelling nit: rounding
Also, this comment is a lit
sprang_webrtc
2017/02/28 15:15:30
Done. And yes :(
| |
| 1300 // margin here to be a little larger than we'd like (input fps estimate is | |
| 1301 // off) and the frame dropping is a little too aggressive. | |
| 1302 const int kErrorMargin = 5; | |
| 1303 EXPECT_NEAR(num_frames_dropped, | |
| 1304 kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), | |
| 1305 kErrorMargin); | |
| 1306 | |
| 1307 // Trigger CPU overuse, reduce framerate by 2/3 again. | |
| 1308 vie_encoder_->TriggerCpuOveruse(); | |
| 1309 num_frames_dropped = 0; | |
| 1310 for (int i = 0; i < kDefaultFramerateFps; ++i) { | |
| 1311 timestamp_ms += kFrameIntervalMs; | |
| 1312 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1313 video_source_.IncomingCapturedFrame( | |
| 1314 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1315 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { | |
| 1316 ++num_frames_dropped; | |
| 1317 } else { | |
| 1318 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); | |
| 1319 } | |
| 1320 } | |
| 1321 EXPECT_NEAR(num_frames_dropped, | |
| 1322 kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9), | |
| 1323 kErrorMargin); | |
| 1324 | |
| 1325 // Go back up one step. | |
| 1326 vie_encoder_->TriggerCpuNormalUsage(); | |
| 1327 num_frames_dropped = 0; | |
| 1328 for (int i = 0; i < kDefaultFramerateFps; ++i) { | |
| 1329 timestamp_ms += kFrameIntervalMs; | |
| 1330 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1331 video_source_.IncomingCapturedFrame( | |
| 1332 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1333 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { | |
| 1334 ++num_frames_dropped; | |
| 1335 } else { | |
| 1336 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); | |
| 1337 } | |
| 1338 } | |
| 1339 EXPECT_NEAR(num_frames_dropped, | |
| 1340 kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), | |
| 1341 kErrorMargin); | |
| 1342 | |
| 1343 // Go back up to original mode. | |
| 1344 vie_encoder_->TriggerCpuNormalUsage(); | |
| 1345 num_frames_dropped = 0; | |
| 1346 for (int i = 0; i < kDefaultFramerateFps; ++i) { | |
| 1347 timestamp_ms += kFrameIntervalMs; | |
| 1348 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1349 video_source_.IncomingCapturedFrame( | |
| 1350 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1351 if (!sink_.WaitForFrame(kFrameTimeoutMs)) { | |
| 1352 ++num_frames_dropped; | |
| 1353 } else { | |
| 1354 sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); | |
| 1355 } | |
| 1356 } | |
| 1357 EXPECT_NEAR(num_frames_dropped, 0, kErrorMargin); | |
| 1358 | |
| 1359 vie_encoder_->Stop(); | |
| 1360 } | |
| 1361 | |
| 1362 TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) { | |
| 1363 const int kFramerateFps = 5; | |
| 1364 const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps; | |
| 1365 const int kMinFpsFrameInterval = rtc::kNumMillisecsPerSec / kMinFramerateFps; | |
| 1366 const int kFrameWidth = 1280; | |
| 1367 const int kFrameHeight = 720; | |
| 1368 | |
| 1369 rtc::ScopedFakeClock fake_clock; | |
| 1370 vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); | |
| 1371 vie_encoder_->SetSource( | |
| 1372 &video_source_, | |
| 1373 VideoSendStream::DegradationPreference::kMaintainResolution); | |
| 1374 video_source_.set_adaptation_enabled(true); | |
| 1375 | |
| 1376 fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); | |
| 1377 int64_t timestamp_ms = kFrameIntervalMs; | |
| 1378 | |
| 1379 // Trigger overuse as much as we can. | |
| 1380 for (int i = 0; i < ViEEncoder::kMaxCpuDowngrades; ++i) { | |
| 1381 // Insert frames to get a new fps estimate... | |
| 1382 for (int j = 0; j < kFramerateFps; ++j) { | |
| 1383 video_source_.IncomingCapturedFrame( | |
| 1384 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1385 timestamp_ms += kFrameIntervalMs; | |
| 1386 fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); | |
| 1387 } | |
| 1388 // ...and then try to adapt again. | |
| 1389 vie_encoder_->TriggerCpuOveruse(); | |
| 1390 } | |
| 1391 | |
| 1392 // Drain any frame in the pipeline. | |
| 1393 sink_.WaitForFrame(kDefaultTimeoutMs); | |
| 1394 | |
| 1395 // Insert frames at min fps, all should go through. | |
| 1396 for (int i = 0; i < 10; ++i) { | |
| 1397 timestamp_ms += kMinFpsFrameInterval; | |
| 1398 fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000); | |
| 1399 video_source_.IncomingCapturedFrame( | |
| 1400 CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); | |
| 1401 sink_.WaitForEncodedFrame(timestamp_ms); | |
| 1402 } | |
| 1403 | |
| 1404 vie_encoder_->Stop(); | |
| 1405 } | |
| 1236 } // namespace webrtc | 1406 } // namespace webrtc |
| OLD | NEW |