| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include <stdio.h> | 10 #include <stdio.h> |
| 11 | 11 |
| 12 #include <deque> | |
| 13 #include <map> | |
| 14 | |
| 15 #include "testing/gtest/include/gtest/gtest.h" | 12 #include "testing/gtest/include/gtest/gtest.h" |
| 16 | 13 #include "webrtc/video/video_quality_test.h" |
| 17 #include "webrtc/base/format_macros.h" | |
| 18 #include "webrtc/base/scoped_ptr.h" | |
| 19 #include "webrtc/base/thread_annotations.h" | |
| 20 #include "webrtc/call.h" | |
| 21 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | |
| 22 #include "webrtc/frame_callback.h" | |
| 23 #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" | |
| 24 #include "webrtc/system_wrappers/interface/clock.h" | |
| 25 #include "webrtc/system_wrappers/interface/cpu_info.h" | |
| 26 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" | |
| 27 #include "webrtc/system_wrappers/interface/event_wrapper.h" | |
| 28 #include "webrtc/system_wrappers/interface/sleep.h" | |
| 29 #include "webrtc/test/encoder_settings.h" | |
| 30 #include "webrtc/test/fake_encoder.h" | |
| 31 #include "webrtc/test/frame_generator.h" | |
| 32 #include "webrtc/test/frame_generator_capturer.h" | |
| 33 #include "webrtc/test/statistics.h" | |
| 34 #include "webrtc/test/testsupport/fileutils.h" | |
| 35 #include "webrtc/typedefs.h" | |
| 36 #include "webrtc/video/full_stack.h" | |
| 37 | 14 |
| 38 namespace webrtc { | 15 namespace webrtc { |
| 39 | 16 |
| 40 static const int kFullStackTestDurationSecs = 60; | 17 static const int kFullStackTestDurationSecs = 60; |
| 41 static const int kSendStatsPollingIntervalMs = 1000; | |
| 42 | 18 |
| 43 class VideoAnalyzer : public PacketReceiver, | 19 class FullStackTest : public VideoQualityTest { |
| 44 public newapi::Transport, | |
| 45 public VideoRenderer, | |
| 46 public VideoCaptureInput, | |
| 47 public EncodedFrameObserver { | |
| 48 public: | 20 public: |
| 49 VideoAnalyzer(VideoCaptureInput* input, | 21 void RunTest(const VideoQualityTest::Params ¶ms) { |
| 50 Transport* transport, | 22 RunWithAnalyzer(params); |
| 51 const char* test_label, | |
| 52 double avg_psnr_threshold, | |
| 53 double avg_ssim_threshold, | |
| 54 int duration_frames, | |
| 55 const std::string& graph_data_output_filename) | |
| 56 : input_(input), | |
| 57 transport_(transport), | |
| 58 receiver_(nullptr), | |
| 59 send_stream_(nullptr), | |
| 60 test_label_(test_label), | |
| 61 graph_data_output_filename_(graph_data_output_filename), | |
| 62 frames_to_process_(duration_frames), | |
| 63 frames_recorded_(0), | |
| 64 frames_processed_(0), | |
| 65 dropped_frames_(0), | |
| 66 last_render_time_(0), | |
| 67 rtp_timestamp_delta_(0), | |
| 68 avg_psnr_threshold_(avg_psnr_threshold), | |
| 69 avg_ssim_threshold_(avg_ssim_threshold), | |
| 70 comparison_available_event_(EventWrapper::Create()), | |
| 71 done_(EventWrapper::Create()) { | |
| 72 // Create thread pool for CPU-expensive PSNR/SSIM calculations. | |
| 73 | |
| 74 // Try to use about as many threads as cores, but leave kMinCoresLeft alone, | |
| 75 // so that we don't accidentally starve "real" worker threads (codec etc). | |
| 76 // Also, don't allocate more than kMaxComparisonThreads, even if there are | |
| 77 // spare cores. | |
| 78 | |
| 79 uint32_t num_cores = CpuInfo::DetectNumberOfCores(); | |
| 80 RTC_DCHECK_GE(num_cores, 1u); | |
| 81 static const uint32_t kMinCoresLeft = 4; | |
| 82 static const uint32_t kMaxComparisonThreads = 8; | |
| 83 | |
| 84 if (num_cores <= kMinCoresLeft) { | |
| 85 num_cores = 1; | |
| 86 } else { | |
| 87 num_cores -= kMinCoresLeft; | |
| 88 num_cores = std::min(num_cores, kMaxComparisonThreads); | |
| 89 } | |
| 90 | |
| 91 for (uint32_t i = 0; i < num_cores; ++i) { | |
| 92 rtc::scoped_ptr<ThreadWrapper> thread = | |
| 93 ThreadWrapper::CreateThread(&FrameComparisonThread, this, "Analyzer"); | |
| 94 EXPECT_TRUE(thread->Start()); | |
| 95 comparison_thread_pool_.push_back(thread.release()); | |
| 96 } | |
| 97 | |
| 98 stats_polling_thread_ = | |
| 99 ThreadWrapper::CreateThread(&PollStatsThread, this, "StatsPoller"); | |
| 100 EXPECT_TRUE(stats_polling_thread_->Start()); | |
| 101 } | 23 } |
| 102 | |
| 103 ~VideoAnalyzer() { | |
| 104 for (ThreadWrapper* thread : comparison_thread_pool_) { | |
| 105 EXPECT_TRUE(thread->Stop()); | |
| 106 delete thread; | |
| 107 } | |
| 108 } | |
| 109 | |
| 110 virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; } | |
| 111 | |
| 112 DeliveryStatus DeliverPacket(MediaType media_type, | |
| 113 const uint8_t* packet, | |
| 114 size_t length, | |
| 115 const PacketTime& packet_time) override { | |
| 116 rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create()); | |
| 117 RTPHeader header; | |
| 118 parser->Parse(packet, length, &header); | |
| 119 { | |
| 120 rtc::CritScope lock(&crit_); | |
| 121 recv_times_[header.timestamp - rtp_timestamp_delta_] = | |
| 122 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); | |
| 123 } | |
| 124 | |
| 125 return receiver_->DeliverPacket(media_type, packet, length, packet_time); | |
| 126 } | |
| 127 | |
| 128 void IncomingCapturedFrame(const VideoFrame& video_frame) override { | |
| 129 VideoFrame copy = video_frame; | |
| 130 copy.set_timestamp(copy.ntp_time_ms() * 90); | |
| 131 | |
| 132 { | |
| 133 rtc::CritScope lock(&crit_); | |
| 134 if (first_send_frame_.IsZeroSize() && rtp_timestamp_delta_ == 0) | |
| 135 first_send_frame_ = copy; | |
| 136 | |
| 137 frames_.push_back(copy); | |
| 138 } | |
| 139 | |
| 140 input_->IncomingCapturedFrame(video_frame); | |
| 141 } | |
| 142 | |
| 143 bool SendRtp(const uint8_t* packet, size_t length) override { | |
| 144 rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create()); | |
| 145 RTPHeader header; | |
| 146 parser->Parse(packet, length, &header); | |
| 147 | |
| 148 { | |
| 149 rtc::CritScope lock(&crit_); | |
| 150 if (rtp_timestamp_delta_ == 0) { | |
| 151 rtp_timestamp_delta_ = | |
| 152 header.timestamp - first_send_frame_.timestamp(); | |
| 153 first_send_frame_.Reset(); | |
| 154 } | |
| 155 uint32_t timestamp = header.timestamp - rtp_timestamp_delta_; | |
| 156 send_times_[timestamp] = | |
| 157 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); | |
| 158 encoded_frame_sizes_[timestamp] += | |
| 159 length - (header.headerLength + header.paddingLength); | |
| 160 } | |
| 161 | |
| 162 return transport_->SendRtp(packet, length); | |
| 163 } | |
| 164 | |
| 165 bool SendRtcp(const uint8_t* packet, size_t length) override { | |
| 166 return transport_->SendRtcp(packet, length); | |
| 167 } | |
| 168 | |
| 169 void EncodedFrameCallback(const EncodedFrame& frame) override { | |
| 170 rtc::CritScope lock(&comparison_lock_); | |
| 171 if (frames_recorded_ < frames_to_process_) | |
| 172 encoded_frame_size_.AddSample(frame.length_); | |
| 173 } | |
| 174 | |
| 175 void RenderFrame(const VideoFrame& video_frame, | |
| 176 int time_to_render_ms) override { | |
| 177 int64_t render_time_ms = | |
| 178 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); | |
| 179 uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_; | |
| 180 | |
| 181 rtc::CritScope lock(&crit_); | |
| 182 | |
| 183 while (frames_.front().timestamp() < send_timestamp) { | |
| 184 AddFrameComparison(frames_.front(), last_rendered_frame_, true, | |
| 185 render_time_ms); | |
| 186 frames_.pop_front(); | |
| 187 } | |
| 188 | |
| 189 VideoFrame reference_frame = frames_.front(); | |
| 190 frames_.pop_front(); | |
| 191 assert(!reference_frame.IsZeroSize()); | |
| 192 EXPECT_EQ(reference_frame.timestamp(), send_timestamp); | |
| 193 assert(reference_frame.timestamp() == send_timestamp); | |
| 194 | |
| 195 AddFrameComparison(reference_frame, video_frame, false, render_time_ms); | |
| 196 | |
| 197 last_rendered_frame_ = video_frame; | |
| 198 } | |
| 199 | |
| 200 bool IsTextureSupported() const override { return false; } | |
| 201 | |
| 202 void Wait() { | |
| 203 // Frame comparisons can be very expensive. Wait for test to be done, but | |
| 204 // at time-out check if frames_processed is going up. If so, give it more | |
| 205 // time, otherwise fail. Hopefully this will reduce test flakiness. | |
| 206 | |
| 207 int last_frames_processed = -1; | |
| 208 EventTypeWrapper eventType; | |
| 209 int iteration = 0; | |
| 210 while ((eventType = done_->Wait(FullStackTest::kDefaultTimeoutMs)) != | |
| 211 kEventSignaled) { | |
| 212 int frames_processed; | |
| 213 { | |
| 214 rtc::CritScope crit(&comparison_lock_); | |
| 215 frames_processed = frames_processed_; | |
| 216 } | |
| 217 | |
| 218 // Print some output so test infrastructure won't think we've crashed. | |
| 219 const char* kKeepAliveMessages[3] = { | |
| 220 "Uh, I'm-I'm not quite dead, sir.", | |
| 221 "Uh, I-I think uh, I could pull through, sir.", | |
| 222 "Actually, I think I'm all right to come with you--"}; | |
| 223 printf("- %s\n", kKeepAliveMessages[iteration++ % 3]); | |
| 224 | |
| 225 if (last_frames_processed == -1) { | |
| 226 last_frames_processed = frames_processed; | |
| 227 continue; | |
| 228 } | |
| 229 ASSERT_GT(frames_processed, last_frames_processed) | |
| 230 << "Analyzer stalled while waiting for test to finish."; | |
| 231 last_frames_processed = frames_processed; | |
| 232 } | |
| 233 | |
| 234 if (iteration > 0) | |
| 235 printf("- Farewell, sweet Concorde!\n"); | |
| 236 | |
| 237 // Signal stats polling thread if that is still waiting and stop it now, | |
| 238 // since it uses the send_stream_ reference that might be reclaimed after | |
| 239 // returning from this method. | |
| 240 done_->Set(); | |
| 241 EXPECT_TRUE(stats_polling_thread_->Stop()); | |
| 242 } | |
| 243 | |
| 244 VideoCaptureInput* input_; | |
| 245 Transport* transport_; | |
| 246 PacketReceiver* receiver_; | |
| 247 VideoSendStream* send_stream_; | |
| 248 | |
| 249 private: | |
| 250 struct FrameComparison { | |
| 251 FrameComparison() | |
| 252 : dropped(false), | |
| 253 send_time_ms(0), | |
| 254 recv_time_ms(0), | |
| 255 render_time_ms(0), | |
| 256 encoded_frame_size(0) {} | |
| 257 | |
| 258 FrameComparison(const VideoFrame& reference, | |
| 259 const VideoFrame& render, | |
| 260 bool dropped, | |
| 261 int64_t send_time_ms, | |
| 262 int64_t recv_time_ms, | |
| 263 int64_t render_time_ms, | |
| 264 size_t encoded_frame_size) | |
| 265 : reference(reference), | |
| 266 render(render), | |
| 267 dropped(dropped), | |
| 268 send_time_ms(send_time_ms), | |
| 269 recv_time_ms(recv_time_ms), | |
| 270 render_time_ms(render_time_ms), | |
| 271 encoded_frame_size(encoded_frame_size) {} | |
| 272 | |
| 273 VideoFrame reference; | |
| 274 VideoFrame render; | |
| 275 bool dropped; | |
| 276 int64_t send_time_ms; | |
| 277 int64_t recv_time_ms; | |
| 278 int64_t render_time_ms; | |
| 279 size_t encoded_frame_size; | |
| 280 }; | |
| 281 | |
| 282 struct Sample { | |
| 283 Sample(double dropped, | |
| 284 double input_time_ms, | |
| 285 double send_time_ms, | |
| 286 double recv_time_ms, | |
| 287 double encoded_frame_size, | |
| 288 double psnr, | |
| 289 double ssim, | |
| 290 double render_time_ms) | |
| 291 : dropped(dropped), | |
| 292 input_time_ms(input_time_ms), | |
| 293 send_time_ms(send_time_ms), | |
| 294 recv_time_ms(recv_time_ms), | |
| 295 encoded_frame_size(encoded_frame_size), | |
| 296 psnr(psnr), | |
| 297 ssim(ssim), | |
| 298 render_time_ms(render_time_ms) {} | |
| 299 | |
| 300 double dropped; | |
| 301 double input_time_ms; | |
| 302 double send_time_ms; | |
| 303 double recv_time_ms; | |
| 304 double encoded_frame_size; | |
| 305 double psnr; | |
| 306 double ssim; | |
| 307 double render_time_ms; | |
| 308 }; | |
| 309 | |
| 310 void AddFrameComparison(const VideoFrame& reference, | |
| 311 const VideoFrame& render, | |
| 312 bool dropped, | |
| 313 int64_t render_time_ms) | |
| 314 EXCLUSIVE_LOCKS_REQUIRED(crit_) { | |
| 315 int64_t send_time_ms = send_times_[reference.timestamp()]; | |
| 316 send_times_.erase(reference.timestamp()); | |
| 317 int64_t recv_time_ms = recv_times_[reference.timestamp()]; | |
| 318 recv_times_.erase(reference.timestamp()); | |
| 319 | |
| 320 size_t encoded_size = encoded_frame_sizes_[reference.timestamp()]; | |
| 321 encoded_frame_sizes_.erase(reference.timestamp()); | |
| 322 | |
| 323 VideoFrame reference_copy; | |
| 324 VideoFrame render_copy; | |
| 325 reference_copy.CopyFrame(reference); | |
| 326 render_copy.CopyFrame(render); | |
| 327 | |
| 328 rtc::CritScope crit(&comparison_lock_); | |
| 329 comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped, | |
| 330 send_time_ms, recv_time_ms, | |
| 331 render_time_ms, encoded_size)); | |
| 332 comparison_available_event_->Set(); | |
| 333 } | |
| 334 | |
| 335 static bool PollStatsThread(void* obj) { | |
| 336 return static_cast<VideoAnalyzer*>(obj)->PollStats(); | |
| 337 } | |
| 338 | |
| 339 bool PollStats() { | |
| 340 switch (done_->Wait(kSendStatsPollingIntervalMs)) { | |
| 341 case kEventSignaled: | |
| 342 case kEventError: | |
| 343 done_->Set(); // Make sure main thread is also signaled. | |
| 344 return false; | |
| 345 case kEventTimeout: | |
| 346 break; | |
| 347 default: | |
| 348 RTC_NOTREACHED(); | |
| 349 } | |
| 350 | |
| 351 VideoSendStream::Stats stats = send_stream_->GetStats(); | |
| 352 | |
| 353 rtc::CritScope crit(&comparison_lock_); | |
| 354 encode_frame_rate_.AddSample(stats.encode_frame_rate); | |
| 355 encode_time_ms.AddSample(stats.avg_encode_time_ms); | |
| 356 encode_usage_percent.AddSample(stats.encode_usage_percent); | |
| 357 media_bitrate_bps.AddSample(stats.media_bitrate_bps); | |
| 358 | |
| 359 return true; | |
| 360 } | |
| 361 | |
| 362 static bool FrameComparisonThread(void* obj) { | |
| 363 return static_cast<VideoAnalyzer*>(obj)->CompareFrames(); | |
| 364 } | |
| 365 | |
| 366 bool CompareFrames() { | |
| 367 if (AllFramesRecorded()) | |
| 368 return false; | |
| 369 | |
| 370 VideoFrame reference; | |
| 371 VideoFrame render; | |
| 372 FrameComparison comparison; | |
| 373 | |
| 374 if (!PopComparison(&comparison)) { | |
| 375 // Wait until new comparison task is available, or test is done. | |
| 376 // If done, wake up remaining threads waiting. | |
| 377 comparison_available_event_->Wait(1000); | |
| 378 if (AllFramesRecorded()) { | |
| 379 comparison_available_event_->Set(); | |
| 380 return false; | |
| 381 } | |
| 382 return true; // Try again. | |
| 383 } | |
| 384 | |
| 385 PerformFrameComparison(comparison); | |
| 386 | |
| 387 if (FrameProcessed()) { | |
| 388 PrintResults(); | |
| 389 if (!graph_data_output_filename_.empty()) | |
| 390 PrintSamplesToFile(); | |
| 391 done_->Set(); | |
| 392 comparison_available_event_->Set(); | |
| 393 return false; | |
| 394 } | |
| 395 | |
| 396 return true; | |
| 397 } | |
| 398 | |
| 399 bool PopComparison(FrameComparison* comparison) { | |
| 400 rtc::CritScope crit(&comparison_lock_); | |
| 401 // If AllFramesRecorded() is true, it means we have already popped | |
| 402 // frames_to_process_ frames from comparisons_, so there is no more work | |
| 403 // for this thread to be done. frames_processed_ might still be lower if | |
| 404 // all comparisons are not done, but those frames are currently being | |
| 405 // worked on by other threads. | |
| 406 if (comparisons_.empty() || AllFramesRecorded()) | |
| 407 return false; | |
| 408 | |
| 409 *comparison = comparisons_.front(); | |
| 410 comparisons_.pop_front(); | |
| 411 | |
| 412 FrameRecorded(); | |
| 413 return true; | |
| 414 } | |
| 415 | |
| 416 // Increment counter for number of frames received for comparison. | |
| 417 void FrameRecorded() { | |
| 418 rtc::CritScope crit(&comparison_lock_); | |
| 419 ++frames_recorded_; | |
| 420 } | |
| 421 | |
| 422 // Returns true if all frames to be compared have been taken from the queue. | |
| 423 bool AllFramesRecorded() { | |
| 424 rtc::CritScope crit(&comparison_lock_); | |
| 425 assert(frames_recorded_ <= frames_to_process_); | |
| 426 return frames_recorded_ == frames_to_process_; | |
| 427 } | |
| 428 | |
| 429 // Increase count of number of frames processed. Returns true if this was the | |
| 430 // last frame to be processed. | |
| 431 bool FrameProcessed() { | |
| 432 rtc::CritScope crit(&comparison_lock_); | |
| 433 ++frames_processed_; | |
| 434 assert(frames_processed_ <= frames_to_process_); | |
| 435 return frames_processed_ == frames_to_process_; | |
| 436 } | |
| 437 | |
| 438 void PrintResults() { | |
| 439 rtc::CritScope crit(&comparison_lock_); | |
| 440 PrintResult("psnr", psnr_, " dB"); | |
| 441 PrintResult("ssim", ssim_, ""); | |
| 442 PrintResult("sender_time", sender_time_, " ms"); | |
| 443 printf("RESULT dropped_frames: %s = %d frames\n", test_label_, | |
| 444 dropped_frames_); | |
| 445 PrintResult("receiver_time", receiver_time_, " ms"); | |
| 446 PrintResult("total_delay_incl_network", end_to_end_, " ms"); | |
| 447 PrintResult("time_between_rendered_frames", rendered_delta_, " ms"); | |
| 448 PrintResult("encoded_frame_size", encoded_frame_size_, " bytes"); | |
| 449 PrintResult("encode_frame_rate", encode_frame_rate_, " fps"); | |
| 450 PrintResult("encode_time", encode_time_ms, " ms"); | |
| 451 PrintResult("encode_usage_percent", encode_usage_percent, " percent"); | |
| 452 PrintResult("media_bitrate", media_bitrate_bps, " bps"); | |
| 453 | |
| 454 EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_); | |
| 455 EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_); | |
| 456 } | |
| 457 | |
| 458 void PerformFrameComparison(const FrameComparison& comparison) { | |
| 459 // Perform expensive psnr and ssim calculations while not holding lock. | |
| 460 double psnr = I420PSNR(&comparison.reference, &comparison.render); | |
| 461 double ssim = I420SSIM(&comparison.reference, &comparison.render); | |
| 462 | |
| 463 int64_t input_time_ms = comparison.reference.ntp_time_ms(); | |
| 464 | |
| 465 rtc::CritScope crit(&comparison_lock_); | |
| 466 if (!graph_data_output_filename_.empty()) { | |
| 467 samples_.push_back(Sample( | |
| 468 comparison.dropped, input_time_ms, comparison.send_time_ms, | |
| 469 comparison.recv_time_ms, comparison.encoded_frame_size, psnr, ssim, | |
| 470 comparison.render_time_ms)); | |
| 471 } | |
| 472 psnr_.AddSample(psnr); | |
| 473 ssim_.AddSample(ssim); | |
| 474 | |
| 475 if (comparison.dropped) { | |
| 476 ++dropped_frames_; | |
| 477 return; | |
| 478 } | |
| 479 if (last_render_time_ != 0) | |
| 480 rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_); | |
| 481 last_render_time_ = comparison.render_time_ms; | |
| 482 | |
| 483 sender_time_.AddSample(comparison.send_time_ms - input_time_ms); | |
| 484 receiver_time_.AddSample(comparison.render_time_ms - | |
| 485 comparison.recv_time_ms); | |
| 486 end_to_end_.AddSample(comparison.render_time_ms - input_time_ms); | |
| 487 encoded_frame_size_.AddSample(comparison.encoded_frame_size); | |
| 488 } | |
| 489 | |
| 490 void PrintResult(const char* result_type, | |
| 491 test::Statistics stats, | |
| 492 const char* unit) { | |
| 493 printf("RESULT %s: %s = {%f, %f}%s\n", | |
| 494 result_type, | |
| 495 test_label_, | |
| 496 stats.Mean(), | |
| 497 stats.StandardDeviation(), | |
| 498 unit); | |
| 499 } | |
| 500 | |
| 501 void PrintSamplesToFile(void) { | |
| 502 FILE* out = fopen(graph_data_output_filename_.c_str(), "w"); | |
| 503 RTC_CHECK(out != nullptr) << "Couldn't open file: " | |
| 504 << graph_data_output_filename_; | |
| 505 | |
| 506 rtc::CritScope crit(&comparison_lock_); | |
| 507 std::sort(samples_.begin(), samples_.end(), | |
| 508 [](const Sample& A, const Sample& B) | |
| 509 -> bool { return A.input_time_ms < B.input_time_ms; }); | |
| 510 | |
| 511 fprintf(out, "%s\n", test_label_); | |
| 512 fprintf(out, "%" PRIuS "\n", samples_.size()); | |
| 513 fprintf(out, | |
| 514 "dropped " | |
| 515 "input_time_ms " | |
| 516 "send_time_ms " | |
| 517 "recv_time_ms " | |
| 518 "encoded_frame_size " | |
| 519 "psnr " | |
| 520 "ssim " | |
| 521 "render_time_ms\n"); | |
| 522 for (const Sample& sample : samples_) { | |
| 523 fprintf(out, "%lf %lf %lf %lf %lf %lf %lf %lf\n", sample.dropped, | |
| 524 sample.input_time_ms, sample.send_time_ms, sample.recv_time_ms, | |
| 525 sample.encoded_frame_size, sample.psnr, sample.ssim, | |
| 526 sample.render_time_ms); | |
| 527 } | |
| 528 fclose(out); | |
| 529 } | |
| 530 | |
| 531 const char* const test_label_; | |
| 532 std::string graph_data_output_filename_; | |
| 533 std::vector<Sample> samples_ GUARDED_BY(comparison_lock_); | |
| 534 test::Statistics sender_time_ GUARDED_BY(comparison_lock_); | |
| 535 test::Statistics receiver_time_ GUARDED_BY(comparison_lock_); | |
| 536 test::Statistics psnr_ GUARDED_BY(comparison_lock_); | |
| 537 test::Statistics ssim_ GUARDED_BY(comparison_lock_); | |
| 538 test::Statistics end_to_end_ GUARDED_BY(comparison_lock_); | |
| 539 test::Statistics rendered_delta_ GUARDED_BY(comparison_lock_); | |
| 540 test::Statistics encoded_frame_size_ GUARDED_BY(comparison_lock_); | |
| 541 test::Statistics encode_frame_rate_ GUARDED_BY(comparison_lock_); | |
| 542 test::Statistics encode_time_ms GUARDED_BY(comparison_lock_); | |
| 543 test::Statistics encode_usage_percent GUARDED_BY(comparison_lock_); | |
| 544 test::Statistics media_bitrate_bps GUARDED_BY(comparison_lock_); | |
| 545 | |
| 546 const int frames_to_process_; | |
| 547 int frames_recorded_; | |
| 548 int frames_processed_; | |
| 549 int dropped_frames_; | |
| 550 int64_t last_render_time_; | |
| 551 uint32_t rtp_timestamp_delta_; | |
| 552 | |
| 553 rtc::CriticalSection crit_; | |
| 554 std::deque<VideoFrame> frames_ GUARDED_BY(crit_); | |
| 555 std::deque<VideoSendStream::Stats> send_stats_ GUARDED_BY(crit_); | |
| 556 VideoFrame last_rendered_frame_ GUARDED_BY(crit_); | |
| 557 std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_); | |
| 558 std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_); | |
| 559 std::map<uint32_t, size_t> encoded_frame_sizes_ GUARDED_BY(crit_); | |
| 560 VideoFrame first_send_frame_ GUARDED_BY(crit_); | |
| 561 const double avg_psnr_threshold_; | |
| 562 const double avg_ssim_threshold_; | |
| 563 | |
| 564 rtc::CriticalSection comparison_lock_; | |
| 565 std::vector<ThreadWrapper*> comparison_thread_pool_; | |
| 566 rtc::scoped_ptr<ThreadWrapper> stats_polling_thread_; | |
| 567 const rtc::scoped_ptr<EventWrapper> comparison_available_event_; | |
| 568 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); | |
| 569 const rtc::scoped_ptr<EventWrapper> done_; | |
| 570 }; | 24 }; |
| 571 | 25 |
| 572 void FullStackTest::RunTest(const FullStackTestParams& params) { | |
| 573 // TODO(ivica): Add num_temporal_layers as a param. | |
| 574 unsigned char num_temporal_layers = | |
| 575 params.graph_data_output_filename.empty() ? 2 : 1; | |
| 576 | |
| 577 test::DirectTransport send_transport(params.link); | |
| 578 test::DirectTransport recv_transport(params.link); | |
| 579 VideoAnalyzer analyzer(nullptr, &send_transport, params.test_label, | |
| 580 params.avg_psnr_threshold, params.avg_ssim_threshold, | |
| 581 params.test_durations_secs * params.clip.fps, | |
| 582 params.graph_data_output_filename); | |
| 583 | |
| 584 CreateCalls(Call::Config(), Call::Config()); | |
| 585 | |
| 586 analyzer.SetReceiver(receiver_call_->Receiver()); | |
| 587 send_transport.SetReceiver(&analyzer); | |
| 588 recv_transport.SetReceiver(sender_call_->Receiver()); | |
| 589 | |
| 590 CreateSendConfig(1, &analyzer); | |
| 591 | |
| 592 rtc::scoped_ptr<VideoEncoder> encoder; | |
| 593 if (params.codec == "VP8") { | |
| 594 encoder = | |
| 595 rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp8)); | |
| 596 send_config_.encoder_settings.encoder = encoder.get(); | |
| 597 send_config_.encoder_settings.payload_name = "VP8"; | |
| 598 } else if (params.codec == "VP9") { | |
| 599 encoder = | |
| 600 rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp9)); | |
| 601 send_config_.encoder_settings.encoder = encoder.get(); | |
| 602 send_config_.encoder_settings.payload_name = "VP9"; | |
| 603 } else { | |
| 604 RTC_NOTREACHED() << "Codec not supported!"; | |
| 605 return; | |
| 606 } | |
| 607 send_config_.encoder_settings.payload_type = 124; | |
| 608 | |
| 609 send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | |
| 610 send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]); | |
| 611 send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; | |
| 612 | |
| 613 VideoStream* stream = &encoder_config_.streams[0]; | |
| 614 stream->width = params.clip.width; | |
| 615 stream->height = params.clip.height; | |
| 616 stream->min_bitrate_bps = params.min_bitrate_bps; | |
| 617 stream->target_bitrate_bps = params.target_bitrate_bps; | |
| 618 stream->max_bitrate_bps = params.max_bitrate_bps; | |
| 619 stream->max_framerate = params.clip.fps; | |
| 620 | |
| 621 VideoCodecVP8 vp8_settings; | |
| 622 VideoCodecVP9 vp9_settings; | |
| 623 if (params.mode == ContentMode::kScreensharingStaticImage || | |
| 624 params.mode == ContentMode::kScreensharingScrollingImage) { | |
| 625 encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; | |
| 626 encoder_config_.min_transmit_bitrate_bps = 400 * 1000; | |
| 627 if (params.codec == "VP8") { | |
| 628 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); | |
| 629 vp8_settings.denoisingOn = false; | |
| 630 vp8_settings.frameDroppingOn = false; | |
| 631 vp8_settings.numberOfTemporalLayers = num_temporal_layers; | |
| 632 encoder_config_.encoder_specific_settings = &vp8_settings; | |
| 633 } else if (params.codec == "VP9") { | |
| 634 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); | |
| 635 vp9_settings.denoisingOn = false; | |
| 636 vp9_settings.frameDroppingOn = false; | |
| 637 vp9_settings.numberOfTemporalLayers = num_temporal_layers; | |
| 638 encoder_config_.encoder_specific_settings = &vp9_settings; | |
| 639 } | |
| 640 | |
| 641 stream->temporal_layer_thresholds_bps.clear(); | |
| 642 if (num_temporal_layers > 1) { | |
| 643 stream->temporal_layer_thresholds_bps.push_back( | |
| 644 stream->target_bitrate_bps); | |
| 645 } | |
| 646 } | |
| 647 | |
| 648 CreateMatchingReceiveConfigs(&recv_transport); | |
| 649 receive_configs_[0].renderer = &analyzer; | |
| 650 receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | |
| 651 receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0]; | |
| 652 receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type = | |
| 653 kSendRtxPayloadType; | |
| 654 | |
| 655 for (auto& config : receive_configs_) | |
| 656 config.pre_decode_callback = &analyzer; | |
| 657 CreateStreams(); | |
| 658 analyzer.input_ = send_stream_->Input(); | |
| 659 analyzer.send_stream_ = send_stream_; | |
| 660 | |
| 661 std::vector<std::string> slides; | |
| 662 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); | |
| 663 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); | |
| 664 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); | |
| 665 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); | |
| 666 size_t kSlidesWidth = 1850; | |
| 667 size_t kSlidesHeight = 1110; | |
| 668 | |
| 669 Clock* clock = Clock::GetRealTimeClock(); | |
| 670 rtc::scoped_ptr<test::FrameGenerator> frame_generator; | |
| 671 | |
| 672 switch (params.mode) { | |
| 673 case ContentMode::kRealTimeVideo: | |
| 674 frame_generator.reset(test::FrameGenerator::CreateFromYuvFile( | |
| 675 std::vector<std::string>(1, | |
| 676 test::ResourcePath(params.clip.name, "yuv")), | |
| 677 params.clip.width, params.clip.height, 1)); | |
| 678 break; | |
| 679 case ContentMode::kScreensharingScrollingImage: | |
| 680 frame_generator.reset( | |
| 681 test::FrameGenerator::CreateScrollingInputFromYuvFiles( | |
| 682 clock, slides, kSlidesWidth, kSlidesHeight, params.clip.width, | |
| 683 params.clip.height, 2000, | |
| 684 8000)); // Scroll for 2 seconds, then pause for 8. | |
| 685 break; | |
| 686 case ContentMode::kScreensharingStaticImage: | |
| 687 frame_generator.reset(test::FrameGenerator::CreateFromYuvFile( | |
| 688 slides, kSlidesWidth, kSlidesHeight, | |
| 689 10 * params.clip.fps)); // Cycle image every 10 seconds. | |
| 690 break; | |
| 691 } | |
| 692 | |
| 693 ASSERT_TRUE(frame_generator.get() != nullptr); | |
| 694 frame_generator_capturer_.reset(new test::FrameGeneratorCapturer( | |
| 695 clock, &analyzer, frame_generator.release(), params.clip.fps)); | |
| 696 ASSERT_TRUE(frame_generator_capturer_->Init()); | |
| 697 | |
| 698 Start(); | |
| 699 | |
| 700 analyzer.Wait(); | |
| 701 | |
| 702 send_transport.StopSending(); | |
| 703 recv_transport.StopSending(); | |
| 704 | |
| 705 Stop(); | |
| 706 | |
| 707 DestroyStreams(); | |
| 708 } | |
| 709 | 26 |
| 710 TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) { | 27 TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) { |
| 711 FullStackTestParams paris_qcif = {"net_delay_0_0_plr_0", | 28 VideoQualityTest::Params paris_qcif = { |
| 712 {"paris_qcif", 176, 144, 30}, | 29 {176, 144, 30, 300000, 300000, 300000, "VP8", 1}, |
| 713 ContentMode::kRealTimeVideo, | 30 {"paris_qcif"}, |
| 714 300000, | 31 {}, |
| 715 300000, | 32 {"net_delay_0_0_plr_0", 36.0, 0.96, kFullStackTestDurationSecs}}; |
| 716 300000, | |
| 717 36.0, | |
| 718 0.96, | |
| 719 kFullStackTestDurationSecs, | |
| 720 "VP8"}; | |
| 721 RunTest(paris_qcif); | 33 RunTest(paris_qcif); |
| 722 } | 34 } |
| 723 | 35 |
| 724 TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) { | 36 TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) { |
| 725 // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. | 37 // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. |
| 726 FullStackTestParams foreman_cif = {"foreman_cif_net_delay_0_0_plr_0", | 38 VideoQualityTest::Params foreman_cif = { |
| 727 {"foreman_cif", 352, 288, 30}, | 39 {352, 288, 30, 700000, 700000, 700000, "VP8", 1}, |
| 728 ContentMode::kRealTimeVideo, | 40 {"foreman_cif"}, |
| 729 700000, | 41 {}, |
| 730 700000, | 42 {"foreman_cif_net_delay_0_0_plr_0", 0.0, 0.0, kFullStackTestDurationSecs} |
| 731 700000, | 43 }; |
| 732 0.0, | |
| 733 0.0, | |
| 734 kFullStackTestDurationSecs, | |
| 735 "VP8"}; | |
| 736 RunTest(foreman_cif); | 44 RunTest(foreman_cif); |
| 737 } | 45 } |
| 738 | 46 |
| 739 TEST_F(FullStackTest, ForemanCifPlr5) { | 47 TEST_F(FullStackTest, ForemanCifPlr5) { |
| 740 FullStackTestParams foreman_cif = {"foreman_cif_delay_50_0_plr_5", | 48 VideoQualityTest::Params foreman_cif = { |
| 741 {"foreman_cif", 352, 288, 30}, | 49 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 742 ContentMode::kRealTimeVideo, | 50 {"foreman_cif"}, |
| 743 30000, | 51 {}, |
| 744 500000, | 52 {"foreman_cif_delay_50_0_plr_5", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 745 2000000, | 53 foreman_cif.pipe.loss_percent = 5; |
| 746 0.0, | 54 foreman_cif.pipe.queue_delay_ms = 50; |
| 747 0.0, | |
| 748 kFullStackTestDurationSecs, | |
| 749 "VP8"}; | |
| 750 foreman_cif.link.loss_percent = 5; | |
| 751 foreman_cif.link.queue_delay_ms = 50; | |
| 752 RunTest(foreman_cif); | 55 RunTest(foreman_cif); |
| 753 } | 56 } |
| 754 | 57 |
| 755 TEST_F(FullStackTest, ForemanCif500kbps) { | 58 TEST_F(FullStackTest, ForemanCif500kbps) { |
| 756 FullStackTestParams foreman_cif = {"foreman_cif_500kbps", | 59 VideoQualityTest::Params foreman_cif = { |
| 757 {"foreman_cif", 352, 288, 30}, | 60 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 758 ContentMode::kRealTimeVideo, | 61 {"foreman_cif"}, |
| 759 30000, | 62 {}, |
| 760 500000, | 63 {"foreman_cif_500kbps", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 761 2000000, | 64 foreman_cif.pipe.queue_length_packets = 0; |
| 762 0.0, | 65 foreman_cif.pipe.queue_delay_ms = 0; |
| 763 0.0, | 66 foreman_cif.pipe.link_capacity_kbps = 500; |
| 764 kFullStackTestDurationSecs, | |
| 765 "VP8"}; | |
| 766 foreman_cif.link.queue_length_packets = 0; | |
| 767 foreman_cif.link.queue_delay_ms = 0; | |
| 768 foreman_cif.link.link_capacity_kbps = 500; | |
| 769 RunTest(foreman_cif); | 67 RunTest(foreman_cif); |
| 770 } | 68 } |
| 771 | 69 |
| 772 TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) { | 70 TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) { |
| 773 FullStackTestParams foreman_cif = {"foreman_cif_500kbps_32pkts_queue", | 71 VideoQualityTest::Params foreman_cif = { |
| 774 {"foreman_cif", 352, 288, 30}, | 72 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 775 ContentMode::kRealTimeVideo, | 73 {"foreman_cif"}, |
| 776 30000, | 74 {}, |
| 777 500000, | 75 {"foreman_cif_500kbps_32pkts_queue", 0.0, 0.0, kFullStackTestDurationSecs} |
| 778 2000000, | 76 }; |
| 779 0.0, | 77 foreman_cif.pipe.queue_length_packets = 32; |
| 780 0.0, | 78 foreman_cif.pipe.queue_delay_ms = 0; |
| 781 kFullStackTestDurationSecs, | 79 foreman_cif.pipe.link_capacity_kbps = 500; |
| 782 "VP8"}; | |
| 783 foreman_cif.link.queue_length_packets = 32; | |
| 784 foreman_cif.link.queue_delay_ms = 0; | |
| 785 foreman_cif.link.link_capacity_kbps = 500; | |
| 786 RunTest(foreman_cif); | 80 RunTest(foreman_cif); |
| 787 } | 81 } |
| 788 | 82 |
| 789 TEST_F(FullStackTest, ForemanCif500kbps100ms) { | 83 TEST_F(FullStackTest, ForemanCif500kbps100ms) { |
| 790 FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms", | 84 VideoQualityTest::Params foreman_cif = { |
| 791 {"foreman_cif", 352, 288, 30}, | 85 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 792 ContentMode::kRealTimeVideo, | 86 {"foreman_cif"}, |
| 793 30000, | 87 {}, |
| 794 500000, | 88 {"foreman_cif_500kbps_100ms", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 795 2000000, | 89 foreman_cif.pipe.queue_length_packets = 0; |
| 796 0.0, | 90 foreman_cif.pipe.queue_delay_ms = 100; |
| 797 0.0, | 91 foreman_cif.pipe.link_capacity_kbps = 500; |
| 798 kFullStackTestDurationSecs, | |
| 799 "VP8"}; | |
| 800 foreman_cif.link.queue_length_packets = 0; | |
| 801 foreman_cif.link.queue_delay_ms = 100; | |
| 802 foreman_cif.link.link_capacity_kbps = 500; | |
| 803 RunTest(foreman_cif); | 92 RunTest(foreman_cif); |
| 804 } | 93 } |
| 805 | 94 |
| 806 TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) { | 95 TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) { |
| 807 FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms_32pkts_queue", | 96 VideoQualityTest::Params foreman_cif = { |
| 808 {"foreman_cif", 352, 288, 30}, | 97 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 809 ContentMode::kRealTimeVideo, | 98 {"foreman_cif"}, |
| 810 30000, | 99 {}, |
| 811 500000, | 100 {"foreman_cif_500kbps_100ms_32pkts_queue", 0.0, 0.0, |
| 812 2000000, | 101 kFullStackTestDurationSecs}}; |
| 813 0.0, | 102 foreman_cif.pipe.queue_length_packets = 32; |
| 814 0.0, | 103 foreman_cif.pipe.queue_delay_ms = 100; |
| 815 kFullStackTestDurationSecs, | 104 foreman_cif.pipe.link_capacity_kbps = 500; |
| 816 "VP8"}; | |
| 817 foreman_cif.link.queue_length_packets = 32; | |
| 818 foreman_cif.link.queue_delay_ms = 100; | |
| 819 foreman_cif.link.link_capacity_kbps = 500; | |
| 820 RunTest(foreman_cif); | 105 RunTest(foreman_cif); |
| 821 } | 106 } |
| 822 | 107 |
| 823 TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { | 108 TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { |
| 824 FullStackTestParams foreman_cif = {"foreman_cif_1000kbps_100ms_32pkts_queue", | 109 VideoQualityTest::Params foreman_cif = { |
| 825 {"foreman_cif", 352, 288, 30}, | 110 {352, 288, 30, 30000, 500000, 2000000, "VP8", 1}, |
| 826 ContentMode::kRealTimeVideo, | 111 {"foreman_cif"}, |
| 827 30000, | 112 {}, |
| 828 2000000, | 113 {"foreman_cif_1000kbps_100ms_32pkts_queue", 0.0, 0.0, |
| 829 2000000, | 114 kFullStackTestDurationSecs}}; |
| 830 0.0, | 115 foreman_cif.pipe.queue_length_packets = 32; |
| 831 0.0, | 116 foreman_cif.pipe.queue_delay_ms = 100; |
| 832 kFullStackTestDurationSecs, | 117 foreman_cif.pipe.link_capacity_kbps = 1000; |
| 833 "VP8"}; | |
| 834 foreman_cif.link.queue_length_packets = 32; | |
| 835 foreman_cif.link.queue_delay_ms = 100; | |
| 836 foreman_cif.link.link_capacity_kbps = 1000; | |
| 837 RunTest(foreman_cif); | 118 RunTest(foreman_cif); |
| 838 } | 119 } |
| 839 | 120 |
| 840 // Temporarily disabled on Android due to low test timeouts. | 121 // Temporarily disabled on Android due to low test timeouts. |
| 841 // https://code.google.com/p/chromium/issues/detail?id=513170 | 122 // https://code.google.com/p/chromium/issues/detail?id=513170 |
| 842 #include "webrtc/test/testsupport/gtest_disable.h" | 123 #include "webrtc/test/testsupport/gtest_disable.h" |
| 843 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL)) { | 124 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL)) { |
| 844 FullStackTestParams screenshare_params = { | 125 VideoQualityTest::Params screenshare = { |
| 845 "screenshare_slides", | 126 {1850, 1110, 5, 50000, 200000, 2000000, "VP8", 2, 400000}, |
| 846 {"screenshare_slides", 1850, 1110, 5}, | 127 {}, // Video-specific. |
| 847 ContentMode::kScreensharingStaticImage, | 128 {true, 10}, // Screenshare-specific. |
| 848 50000, | 129 {"screenshare_slides", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 849 200000, | 130 RunTest(screenshare); |
| 850 2000000, | |
| 851 0.0, | |
| 852 0.0, | |
| 853 kFullStackTestDurationSecs, | |
| 854 "VP8"}; | |
| 855 RunTest(screenshare_params); | |
| 856 } | 131 } |
| 857 | 132 |
| 858 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL_Scroll)) { | 133 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL_Scroll)) { |
| 859 FullStackTestParams screenshare_params = { | 134 VideoQualityTest::Params config = { |
| 860 "screenshare_slides_scrolling", | 135 {1850, 1110 / 2, 5, 50000, 200000, 2000000, "VP8", 2, 400000}, |
| 861 // Crop height by two, scrolling vertically only. | 136 {}, |
| 862 {"screenshare_slides_scrolling", 1850, 1110 / 2, 5}, | 137 {true, 10, 2}, |
| 863 ContentMode::kScreensharingScrollingImage, | 138 {"screenshare_slides_scrolling", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 864 50000, | 139 RunTest(config); |
| 865 200000, | |
| 866 2000000, | |
| 867 0.0, | |
| 868 0.0, | |
| 869 kFullStackTestDurationSecs, | |
| 870 "VP8"}; | |
| 871 RunTest(screenshare_params); | |
| 872 } | 140 } |
| 873 | 141 |
| 874 // Disabled on Android along with VP8 screenshare above. | 142 // Disabled on Android along with VP8 screenshare above. |
| 875 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP9_2TL)) { | 143 TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP9_2TL)) { |
| 876 FullStackTestParams screenshare_params = { | 144 VideoQualityTest::Params screenshare = { |
| 877 "screenshare_slides_vp9_2tl", | 145 {1850, 1110, 5, 50000, 200000, 2000000, "VP9", 2, 400000}, |
| 878 {"screenshare_slides", 1850, 1110, 5}, | 146 {}, |
| 879 ContentMode::kScreensharingStaticImage, | 147 {true, 10}, |
| 880 50000, | 148 {"screenshare_slides_vp9_2tl", 0.0, 0.0, kFullStackTestDurationSecs}}; |
| 881 200000, | 149 RunTest(screenshare); |
| 882 2000000, | |
| 883 0.0, | |
| 884 0.0, | |
| 885 kFullStackTestDurationSecs, | |
| 886 "VP9"}; | |
| 887 RunTest(screenshare_params); | |
| 888 } | 150 } |
| 889 } // namespace webrtc | 151 } // namespace webrtc |
| OLD | NEW |