| Index: webrtc/video/video_quality_test.cc
|
| diff --git a/webrtc/video/full_stack.cc b/webrtc/video/video_quality_test.cc
|
| similarity index 60%
|
| copy from webrtc/video/full_stack.cc
|
| copy to webrtc/video/video_quality_test.cc
|
| index 3fb1db66a5eb740c71009470984adb8a1c686cdc..d440858155a9b4ebdc4b829454e1f5ec53392e54 100644
|
| --- a/webrtc/video/full_stack.cc
|
| +++ b/webrtc/video/video_quality_test.cc
|
| @@ -1,5 +1,5 @@
|
| /*
|
| - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
| + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
| *
|
| * Use of this source code is governed by a BSD-style license
|
| * that can be found in the LICENSE file in the root of the source
|
| @@ -9,36 +9,32 @@
|
| */
|
| #include <stdio.h>
|
|
|
| +#include <algorithm>
|
| #include <deque>
|
| #include <map>
|
| +#include <vector>
|
|
|
| #include "testing/gtest/include/gtest/gtest.h"
|
|
|
| +#include "webrtc/base/checks.h"
|
| #include "webrtc/base/format_macros.h"
|
| #include "webrtc/base/scoped_ptr.h"
|
| -#include "webrtc/base/thread_annotations.h"
|
| #include "webrtc/call.h"
|
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
| -#include "webrtc/frame_callback.h"
|
| #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
|
| -#include "webrtc/system_wrappers/interface/clock.h"
|
| #include "webrtc/system_wrappers/interface/cpu_info.h"
|
| -#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
| -#include "webrtc/system_wrappers/interface/event_wrapper.h"
|
| -#include "webrtc/system_wrappers/interface/sleep.h"
|
| -#include "webrtc/test/encoder_settings.h"
|
| -#include "webrtc/test/fake_encoder.h"
|
| -#include "webrtc/test/frame_generator.h"
|
| -#include "webrtc/test/frame_generator_capturer.h"
|
| +#include "webrtc/test/layer_filtering_transport.h"
|
| +#include "webrtc/test/run_loop.h"
|
| #include "webrtc/test/statistics.h"
|
| #include "webrtc/test/testsupport/fileutils.h"
|
| -#include "webrtc/typedefs.h"
|
| -#include "webrtc/video/full_stack.h"
|
| +#include "webrtc/test/video_renderer.h"
|
| +#include "webrtc/video/video_quality_test.h"
|
|
|
| namespace webrtc {
|
|
|
| -static const int kFullStackTestDurationSecs = 60;
|
| static const int kSendStatsPollingIntervalMs = 1000;
|
| +static const int kPayloadTypeVP8 = 123;
|
| +static const int kPayloadTypeVP9 = 124;
|
|
|
| class VideoAnalyzer : public PacketReceiver,
|
| public newapi::Transport,
|
| @@ -48,17 +44,17 @@ class VideoAnalyzer : public PacketReceiver,
|
| public:
|
| VideoAnalyzer(VideoCaptureInput* input,
|
| Transport* transport,
|
| - const char* test_label,
|
| + const std::string& test_label,
|
| double avg_psnr_threshold,
|
| double avg_ssim_threshold,
|
| int duration_frames,
|
| - const std::string& graph_data_output_filename)
|
| + FILE* graph_data_output_file)
|
| : input_(input),
|
| transport_(transport),
|
| receiver_(nullptr),
|
| send_stream_(nullptr),
|
| test_label_(test_label),
|
| - graph_data_output_filename_(graph_data_output_filename),
|
| + graph_data_output_file_(graph_data_output_file),
|
| frames_to_process_(duration_frames),
|
| frames_recorded_(0),
|
| frames_processed_(0),
|
| @@ -148,8 +144,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| {
|
| rtc::CritScope lock(&crit_);
|
| if (rtp_timestamp_delta_ == 0) {
|
| - rtp_timestamp_delta_ =
|
| - header.timestamp - first_send_frame_.timestamp();
|
| + rtp_timestamp_delta_ = header.timestamp - first_send_frame_.timestamp();
|
| first_send_frame_.Reset();
|
| }
|
| uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
|
| @@ -207,7 +202,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| int last_frames_processed = -1;
|
| EventTypeWrapper eventType;
|
| int iteration = 0;
|
| - while ((eventType = done_->Wait(FullStackTest::kDefaultTimeoutMs)) !=
|
| + while ((eventType = done_->Wait(VideoQualityTest::kDefaultTimeoutMs)) !=
|
| kEventSignaled) {
|
| int frames_processed;
|
| {
|
| @@ -386,7 +381,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|
|
| if (FrameProcessed()) {
|
| PrintResults();
|
| - if (!graph_data_output_filename_.empty())
|
| + if (graph_data_output_file_)
|
| PrintSamplesToFile();
|
| done_->Set();
|
| comparison_available_event_->Set();
|
| @@ -440,7 +435,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| PrintResult("psnr", psnr_, " dB");
|
| PrintResult("ssim", ssim_, "");
|
| PrintResult("sender_time", sender_time_, " ms");
|
| - printf("RESULT dropped_frames: %s = %d frames\n", test_label_,
|
| + printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
|
| dropped_frames_);
|
| PrintResult("receiver_time", receiver_time_, " ms");
|
| PrintResult("total_delay_incl_network", end_to_end_, " ms");
|
| @@ -463,11 +458,11 @@ class VideoAnalyzer : public PacketReceiver,
|
| int64_t input_time_ms = comparison.reference.ntp_time_ms();
|
|
|
| rtc::CritScope crit(&comparison_lock_);
|
| - if (!graph_data_output_filename_.empty()) {
|
| - samples_.push_back(Sample(
|
| - comparison.dropped, input_time_ms, comparison.send_time_ms,
|
| - comparison.recv_time_ms, comparison.encoded_frame_size, psnr, ssim,
|
| - comparison.render_time_ms));
|
| + if (graph_data_output_file_) {
|
| + samples_.push_back(
|
| + Sample(comparison.dropped, input_time_ms, comparison.send_time_ms,
|
| + comparison.recv_time_ms, comparison.encoded_frame_size, psnr,
|
| + ssim, comparison.render_time_ms));
|
| }
|
| psnr_.AddSample(psnr);
|
| ssim_.AddSample(ssim);
|
| @@ -492,23 +487,21 @@ class VideoAnalyzer : public PacketReceiver,
|
| const char* unit) {
|
| printf("RESULT %s: %s = {%f, %f}%s\n",
|
| result_type,
|
| - test_label_,
|
| + test_label_.c_str(),
|
| stats.Mean(),
|
| stats.StandardDeviation(),
|
| unit);
|
| }
|
|
|
| void PrintSamplesToFile(void) {
|
| - FILE* out = fopen(graph_data_output_filename_.c_str(), "w");
|
| - RTC_CHECK(out != nullptr) << "Couldn't open file: "
|
| - << graph_data_output_filename_;
|
| -
|
| + FILE* out = graph_data_output_file_;
|
| rtc::CritScope crit(&comparison_lock_);
|
| std::sort(samples_.begin(), samples_.end(),
|
| - [](const Sample& A, const Sample& B)
|
| - -> bool { return A.input_time_ms < B.input_time_ms; });
|
| + [](const Sample& A, const Sample& B) -> bool {
|
| + return A.input_time_ms < B.input_time_ms;
|
| + });
|
|
|
| - fprintf(out, "%s\n", test_label_);
|
| + fprintf(out, "%s\n", test_label_.c_str());
|
| fprintf(out, "%" PRIuS "\n", samples_.size());
|
| fprintf(out,
|
| "dropped "
|
| @@ -525,11 +518,10 @@ class VideoAnalyzer : public PacketReceiver,
|
| sample.encoded_frame_size, sample.psnr, sample.ssim,
|
| sample.render_time_ms);
|
| }
|
| - fclose(out);
|
| }
|
|
|
| - const char* const test_label_;
|
| - std::string graph_data_output_filename_;
|
| + const std::string test_label_;
|
| + FILE* const graph_data_output_file_;
|
| std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
|
| test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
|
| test::Statistics receiver_time_ GUARDED_BY(comparison_lock_);
|
| @@ -552,7 +544,6 @@ class VideoAnalyzer : public PacketReceiver,
|
|
|
| rtc::CriticalSection crit_;
|
| std::deque<VideoFrame> frames_ GUARDED_BY(crit_);
|
| - std::deque<VideoSendStream::Stats> send_stats_ GUARDED_BY(crit_);
|
| VideoFrame last_rendered_frame_ GUARDED_BY(crit_);
|
| std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_);
|
| std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_);
|
| @@ -569,131 +560,190 @@ class VideoAnalyzer : public PacketReceiver,
|
| const rtc::scoped_ptr<EventWrapper> done_;
|
| };
|
|
|
| -void FullStackTest::RunTest(const FullStackTestParams& params) {
|
| - // TODO(ivica): Add num_temporal_layers as a param.
|
| - unsigned char num_temporal_layers =
|
| - params.graph_data_output_filename.empty() ? 2 : 1;
|
| +VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {}
|
|
|
| - test::DirectTransport send_transport(params.link);
|
| - test::DirectTransport recv_transport(params.link);
|
| - VideoAnalyzer analyzer(nullptr, &send_transport, params.test_label,
|
| - params.avg_psnr_threshold, params.avg_ssim_threshold,
|
| - params.test_durations_secs * params.clip.fps,
|
| - params.graph_data_output_filename);
|
| +void VideoQualityTest::ValidateParams(const Params& params) {
|
| + RTC_CHECK_GE(params.common.max_bitrate_bps, params.common.target_bitrate_bps);
|
| + RTC_CHECK_GE(params.common.target_bitrate_bps, params.common.min_bitrate_bps);
|
| + RTC_CHECK_LT(params.common.tl_discard_threshold,
|
| + params.common.num_temporal_layers);
|
| +}
|
|
|
| - CreateCalls(Call::Config(), Call::Config());
|
| +void VideoQualityTest::TestBody() {}
|
|
|
| - analyzer.SetReceiver(receiver_call_->Receiver());
|
| - send_transport.SetReceiver(&analyzer);
|
| - recv_transport.SetReceiver(sender_call_->Receiver());
|
| +void VideoQualityTest::SetupFullStack(const Params& params,
|
| + newapi::Transport* send_transport,
|
| + newapi::Transport* recv_transport) {
|
| + if (params.logs)
|
| + trace_to_stderr_.reset(new test::TraceToStderr);
|
| +
|
| + CreateSendConfig(1, send_transport);
|
|
|
| - CreateSendConfig(1, &analyzer);
|
| -
|
| - rtc::scoped_ptr<VideoEncoder> encoder;
|
| - if (params.codec == "VP8") {
|
| - encoder =
|
| - rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp8));
|
| - send_config_.encoder_settings.encoder = encoder.get();
|
| - send_config_.encoder_settings.payload_name = "VP8";
|
| - } else if (params.codec == "VP9") {
|
| - encoder =
|
| - rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp9));
|
| - send_config_.encoder_settings.encoder = encoder.get();
|
| - send_config_.encoder_settings.payload_name = "VP9";
|
| + int payload_type;
|
| + if (params.common.codec == "VP8") {
|
| + encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
|
| + payload_type = kPayloadTypeVP8;
|
| + } else if (params.common.codec == "VP9") {
|
| + encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
|
| + payload_type = kPayloadTypeVP9;
|
| } else {
|
| RTC_NOTREACHED() << "Codec not supported!";
|
| return;
|
| }
|
| - send_config_.encoder_settings.payload_type = 124;
|
| + send_config_.encoder_settings.encoder = encoder_.get();
|
| + send_config_.encoder_settings.payload_name = params.common.codec;
|
| + send_config_.encoder_settings.payload_type = payload_type;
|
|
|
| send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
|
| send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
|
| send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
|
|
|
| + // Automatically fill out streams[0] with params.
|
| VideoStream* stream = &encoder_config_.streams[0];
|
| - stream->width = params.clip.width;
|
| - stream->height = params.clip.height;
|
| - stream->min_bitrate_bps = params.min_bitrate_bps;
|
| - stream->target_bitrate_bps = params.target_bitrate_bps;
|
| - stream->max_bitrate_bps = params.max_bitrate_bps;
|
| - stream->max_framerate = params.clip.fps;
|
| -
|
| - VideoCodecVP8 vp8_settings;
|
| - VideoCodecVP9 vp9_settings;
|
| - if (params.mode == ContentMode::kScreensharingStaticImage ||
|
| - params.mode == ContentMode::kScreensharingScrollingImage) {
|
| - encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
|
| - encoder_config_.min_transmit_bitrate_bps = 400 * 1000;
|
| - if (params.codec == "VP8") {
|
| - vp8_settings = VideoEncoder::GetDefaultVp8Settings();
|
| - vp8_settings.denoisingOn = false;
|
| - vp8_settings.frameDroppingOn = false;
|
| - vp8_settings.numberOfTemporalLayers = num_temporal_layers;
|
| - encoder_config_.encoder_specific_settings = &vp8_settings;
|
| - } else if (params.codec == "VP9") {
|
| - vp9_settings = VideoEncoder::GetDefaultVp9Settings();
|
| - vp9_settings.denoisingOn = false;
|
| - vp9_settings.frameDroppingOn = false;
|
| - vp9_settings.numberOfTemporalLayers = num_temporal_layers;
|
| - encoder_config_.encoder_specific_settings = &vp9_settings;
|
| - }
|
| -
|
| - stream->temporal_layer_thresholds_bps.clear();
|
| - if (num_temporal_layers > 1) {
|
| - stream->temporal_layer_thresholds_bps.push_back(
|
| - stream->target_bitrate_bps);
|
| - }
|
| + stream->width = params.common.width;
|
| + stream->height = params.common.height;
|
| + stream->min_bitrate_bps = params.common.min_bitrate_bps;
|
| + stream->target_bitrate_bps = params.common.target_bitrate_bps;
|
| + stream->max_bitrate_bps = params.common.max_bitrate_bps;
|
| + stream->max_framerate = static_cast<int>(params.common.fps);
|
| +
|
| + stream->temporal_layer_thresholds_bps.clear();
|
| + if (params.common.num_temporal_layers > 1) {
|
| + stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
|
| }
|
|
|
| - CreateMatchingReceiveConfigs(&recv_transport);
|
| - receive_configs_[0].renderer = &analyzer;
|
| + CreateMatchingReceiveConfigs(recv_transport);
|
| +
|
| receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
|
| receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0];
|
| receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type =
|
| kSendRtxPayloadType;
|
|
|
| - for (auto& config : receive_configs_)
|
| - config.pre_decode_callback = &analyzer;
|
| - CreateStreams();
|
| - analyzer.input_ = send_stream_->Input();
|
| - analyzer.send_stream_ = send_stream_;
|
| + encoder_config_.min_transmit_bitrate_bps = params.common.min_transmit_bps;
|
| +}
|
| +
|
| +void VideoQualityTest::SetupScreenshare(const Params& params) {
|
| + RTC_CHECK(params.screenshare.enabled);
|
| +
|
| + // Fill out codec settings.
|
| + encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
|
| + if (params.common.codec == "VP8") {
|
| + codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
|
| + codec_settings_.VP8.denoisingOn = false;
|
| + codec_settings_.VP8.frameDroppingOn = false;
|
| + codec_settings_.VP8.numberOfTemporalLayers =
|
| + static_cast<unsigned char>(params.common.num_temporal_layers);
|
| + encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
|
| + } else if (params.common.codec == "VP9") {
|
| + codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
|
| + codec_settings_.VP9.denoisingOn = false;
|
| + codec_settings_.VP9.frameDroppingOn = false;
|
| + codec_settings_.VP9.numberOfTemporalLayers =
|
| + static_cast<unsigned char>(params.common.num_temporal_layers);
|
| + encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
|
| + }
|
|
|
| + // Setup frame generator.
|
| + const size_t kWidth = 1850;
|
| + const size_t kHeight = 1110;
|
| std::vector<std::string> slides;
|
| slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
|
| - size_t kSlidesWidth = 1850;
|
| - size_t kSlidesHeight = 1110;
|
| -
|
| - Clock* clock = Clock::GetRealTimeClock();
|
| - rtc::scoped_ptr<test::FrameGenerator> frame_generator;
|
| -
|
| - switch (params.mode) {
|
| - case ContentMode::kRealTimeVideo:
|
| - frame_generator.reset(test::FrameGenerator::CreateFromYuvFile(
|
| - std::vector<std::string>(1,
|
| - test::ResourcePath(params.clip.name, "yuv")),
|
| - params.clip.width, params.clip.height, 1));
|
| - break;
|
| - case ContentMode::kScreensharingScrollingImage:
|
| - frame_generator.reset(
|
| - test::FrameGenerator::CreateScrollingInputFromYuvFiles(
|
| - clock, slides, kSlidesWidth, kSlidesHeight, params.clip.width,
|
| - params.clip.height, 2000,
|
| - 8000)); // Scroll for 2 seconds, then pause for 8.
|
| - break;
|
| - case ContentMode::kScreensharingStaticImage:
|
| - frame_generator.reset(test::FrameGenerator::CreateFromYuvFile(
|
| - slides, kSlidesWidth, kSlidesHeight,
|
| - 10 * params.clip.fps)); // Cycle image every 10 seconds.
|
| - break;
|
| +
|
| + if (params.screenshare.scroll_duration == 0) {
|
| + // Cycle image every slide_change_interval seconds.
|
| + frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
|
| + slides, kWidth, kHeight,
|
| + params.screenshare.slide_change_interval * params.common.fps));
|
| + } else {
|
| + RTC_CHECK_LE(params.common.width, kWidth);
|
| + RTC_CHECK_LE(params.common.height, kHeight);
|
| + RTC_CHECK_GT(params.screenshare.slide_change_interval, 0);
|
| + const int kPauseDurationMs = (params.screenshare.slide_change_interval -
|
| + params.screenshare.scroll_duration) * 1000;
|
| + RTC_CHECK_LE(params.screenshare.scroll_duration,
|
| + params.screenshare.slide_change_interval);
|
| +
|
| + frame_generator_.reset(
|
| + test::FrameGenerator::CreateScrollingInputFromYuvFiles(
|
| + clock_, slides, kWidth, kHeight, params.common.width,
|
| + params.common.height, params.screenshare.scroll_duration * 1000,
|
| + kPauseDurationMs));
|
| + }
|
| +}
|
| +
|
| +void VideoQualityTest::CreateCapturer(const Params& params,
|
| + VideoCaptureInput* input) {
|
| + if (params.screenshare.enabled) {
|
| + frame_generator_capturer_.reset(new test::FrameGeneratorCapturer(
|
| + clock_, input, frame_generator_.release(), params.common.fps));
|
| + EXPECT_TRUE(frame_generator_capturer_->Init());
|
| + } else {
|
| + if (params.video.clip_name.empty()) {
|
| + frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
|
| + input, params.common.width, params.common.height, params.common.fps,
|
| + clock_));
|
| + EXPECT_TRUE(frame_generator_capturer_->Init());
|
| + } else {
|
| + frame_generator_capturer_.reset(
|
| + test::FrameGeneratorCapturer::CreateFromYuvFile(
|
| + input, test::ResourcePath(params.video.clip_name, "yuv"),
|
| + params.common.width, params.common.height, params.common.fps,
|
| + clock_));
|
| + ASSERT_TRUE(frame_generator_capturer_.get() != nullptr)
|
| + << "Could not create capturer for " << params.video.clip_name
|
| + << ".yuv. Is this resource file present?";
|
| + }
|
| + }
|
| +}
|
| +
|
| +void VideoQualityTest::RunWithAnalyzer(const Params& params) {
|
| + // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
|
| + // differentiate between the analyzer and the renderer case.
|
| + ValidateParams(params);
|
| +
|
| + FILE* graph_data_output_file = nullptr;
|
| + if (!params.analyzer.graph_data_output_filename.empty()) {
|
| + graph_data_output_file =
|
| + fopen(params.analyzer.graph_data_output_filename.c_str(), "w");
|
| + RTC_CHECK(graph_data_output_file != nullptr)
|
| + << "Can't open the file "
|
| + << params.analyzer.graph_data_output_filename << "!";
|
| }
|
|
|
| - ASSERT_TRUE(frame_generator.get() != nullptr);
|
| - frame_generator_capturer_.reset(new test::FrameGeneratorCapturer(
|
| - clock, &analyzer, frame_generator.release(), params.clip.fps));
|
| - ASSERT_TRUE(frame_generator_capturer_->Init());
|
| + test::LayerFilteringTransport send_transport(
|
| + params.pipe, kPayloadTypeVP8, kPayloadTypeVP9,
|
| + static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
|
| + test::DirectTransport recv_transport(params.pipe);
|
| + VideoAnalyzer analyzer(
|
| + nullptr, &send_transport, params.analyzer.test_label,
|
| + params.analyzer.avg_psnr_threshold, params.analyzer.avg_ssim_threshold,
|
| + params.analyzer.test_durations_secs * params.common.fps,
|
| + graph_data_output_file);
|
| +
|
| + Call::Config call_config;
|
| + call_config.bitrate_config = params.common.call_bitrate_config;
|
| + CreateCalls(call_config, call_config);
|
| +
|
| + analyzer.SetReceiver(receiver_call_->Receiver());
|
| + send_transport.SetReceiver(&analyzer);
|
| + recv_transport.SetReceiver(sender_call_->Receiver());
|
| +
|
| + SetupFullStack(params, &analyzer, &recv_transport);
|
| + receive_configs_[0].renderer = &analyzer;
|
| + for (auto& config : receive_configs_)
|
| + config.pre_decode_callback = &analyzer;
|
| +
|
| + if (params.screenshare.enabled)
|
| + SetupScreenshare(params);
|
| +
|
| + CreateCapturer(params, &analyzer);
|
| +
|
| + CreateStreams();
|
| + analyzer.input_ = send_stream_->Input();
|
| + analyzer.send_stream_ = send_stream_;
|
|
|
| Start();
|
|
|
| @@ -705,185 +755,62 @@ void FullStackTest::RunTest(const FullStackTestParams& params) {
|
| Stop();
|
|
|
| DestroyStreams();
|
| -}
|
|
|
| -TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
|
| - FullStackTestParams paris_qcif = {"net_delay_0_0_plr_0",
|
| - {"paris_qcif", 176, 144, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 300000,
|
| - 300000,
|
| - 300000,
|
| - 36.0,
|
| - 0.96,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - RunTest(paris_qcif);
|
| + if (graph_data_output_file)
|
| + fclose(graph_data_output_file);
|
| }
|
|
|
| -TEST_F(FullStackTest, ForemanCifWithoutPacketLoss) {
|
| - // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
|
| - FullStackTestParams foreman_cif = {"foreman_cif_net_delay_0_0_plr_0",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 700000,
|
| - 700000,
|
| - 700000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - RunTest(foreman_cif);
|
| -}
|
| +void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
|
| + ValidateParams(params);
|
|
|
| -TEST_F(FullStackTest, ForemanCifPlr5) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_delay_50_0_plr_5",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 500000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.loss_percent = 5;
|
| - foreman_cif.link.queue_delay_ms = 50;
|
| - RunTest(foreman_cif);
|
| -}
|
| + rtc::scoped_ptr<test::VideoRenderer> local_preview(
|
| + test::VideoRenderer::Create("Local Preview", params.common.width,
|
| + params.common.height));
|
| + rtc::scoped_ptr<test::VideoRenderer> loopback_video(
|
| + test::VideoRenderer::Create("Loopback Video", params.common.width,
|
| + params.common.height));
|
|
|
| -TEST_F(FullStackTest, ForemanCif500kbps) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_500kbps",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 500000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.queue_length_packets = 0;
|
| - foreman_cif.link.queue_delay_ms = 0;
|
| - foreman_cif.link.link_capacity_kbps = 500;
|
| - RunTest(foreman_cif);
|
| -}
|
| + // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
|
| + // match the full stack tests.
|
| + Call::Config call_config;
|
| + call_config.bitrate_config = params.common.call_bitrate_config;
|
| + rtc::scoped_ptr<Call> call(Call::Create(call_config));
|
|
|
| -TEST_F(FullStackTest, ForemanCif500kbpsLimitedQueue) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_500kbps_32pkts_queue",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 500000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.queue_length_packets = 32;
|
| - foreman_cif.link.queue_delay_ms = 0;
|
| - foreman_cif.link.link_capacity_kbps = 500;
|
| - RunTest(foreman_cif);
|
| -}
|
| + test::LayerFilteringTransport transport(
|
| + params.pipe, kPayloadTypeVP8, kPayloadTypeVP9,
|
| + static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
|
| + // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
|
| + // least share as much code as possible. That way this test would also match
|
| + // the full stack tests better.
|
| + transport.SetReceiver(call->Receiver());
|
|
|
| -TEST_F(FullStackTest, ForemanCif500kbps100ms) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 500000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.queue_length_packets = 0;
|
| - foreman_cif.link.queue_delay_ms = 100;
|
| - foreman_cif.link.link_capacity_kbps = 500;
|
| - RunTest(foreman_cif);
|
| -}
|
| + SetupFullStack(params, &transport, &transport);
|
| + send_config_.local_renderer = local_preview.get();
|
| + receive_configs_[0].renderer = loopback_video.get();
|
|
|
| -TEST_F(FullStackTest, ForemanCif500kbps100msLimitedQueue) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_500kbps_100ms_32pkts_queue",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 500000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.queue_length_packets = 32;
|
| - foreman_cif.link.queue_delay_ms = 100;
|
| - foreman_cif.link.link_capacity_kbps = 500;
|
| - RunTest(foreman_cif);
|
| -}
|
| + if (params.screenshare.enabled)
|
| + SetupScreenshare(params);
|
|
|
| -TEST_F(FullStackTest, ForemanCif1000kbps100msLimitedQueue) {
|
| - FullStackTestParams foreman_cif = {"foreman_cif_1000kbps_100ms_32pkts_queue",
|
| - {"foreman_cif", 352, 288, 30},
|
| - ContentMode::kRealTimeVideo,
|
| - 30000,
|
| - 2000000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - foreman_cif.link.queue_length_packets = 32;
|
| - foreman_cif.link.queue_delay_ms = 100;
|
| - foreman_cif.link.link_capacity_kbps = 1000;
|
| - RunTest(foreman_cif);
|
| -}
|
| + send_stream_ = call->CreateVideoSendStream(send_config_, encoder_config_);
|
| + CreateCapturer(params, send_stream_->Input());
|
|
|
| -// Temporarily disabled on Android due to low test timeouts.
|
| -// https://code.google.com/p/chromium/issues/detail?id=513170
|
| -#include "webrtc/test/testsupport/gtest_disable.h"
|
| -TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL)) {
|
| - FullStackTestParams screenshare_params = {
|
| - "screenshare_slides",
|
| - {"screenshare_slides", 1850, 1110, 5},
|
| - ContentMode::kScreensharingStaticImage,
|
| - 50000,
|
| - 200000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - RunTest(screenshare_params);
|
| -}
|
| + VideoReceiveStream* receive_stream =
|
| + call->CreateVideoReceiveStream(receive_configs_[0]);
|
|
|
| -TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL_Scroll)) {
|
| - FullStackTestParams screenshare_params = {
|
| - "screenshare_slides_scrolling",
|
| - // Crop height by two, scrolling vertically only.
|
| - {"screenshare_slides_scrolling", 1850, 1110 / 2, 5},
|
| - ContentMode::kScreensharingScrollingImage,
|
| - 50000,
|
| - 200000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP8"};
|
| - RunTest(screenshare_params);
|
| -}
|
| + receive_stream->Start();
|
| + send_stream_->Start();
|
| + frame_generator_capturer_->Start();
|
| +
|
| + test::PressEnterToContinue();
|
|
|
| -// Disabled on Android along with VP8 screenshare above.
|
| -TEST_F(FullStackTest, DISABLED_ON_ANDROID(ScreenshareSlidesVP9_2TL)) {
|
| - FullStackTestParams screenshare_params = {
|
| - "screenshare_slides_vp9_2tl",
|
| - {"screenshare_slides", 1850, 1110, 5},
|
| - ContentMode::kScreensharingStaticImage,
|
| - 50000,
|
| - 200000,
|
| - 2000000,
|
| - 0.0,
|
| - 0.0,
|
| - kFullStackTestDurationSecs,
|
| - "VP9"};
|
| - RunTest(screenshare_params);
|
| + frame_generator_capturer_->Stop();
|
| + send_stream_->Stop();
|
| + receive_stream->Stop();
|
| +
|
| + call->DestroyVideoReceiveStream(receive_stream);
|
| + call->DestroyVideoSendStream(send_stream_);
|
| +
|
| + transport.StopSending();
|
| }
|
| +
|
| } // namespace webrtc
|
|
|