| Index: webrtc/video/full_stack_before.cc
|
| diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/full_stack_before.cc
|
| similarity index 55%
|
| copy from webrtc/video/video_quality_test.cc
|
| copy to webrtc/video/full_stack_before.cc
|
| index c452e11895f082f30788a3d30503da1e81d92abc..9128c3e46e2011dc9b71019c07c3493edd83c0ff 100644
|
| --- a/webrtc/video/video_quality_test.cc
|
| +++ b/webrtc/video/full_stack_before.cc
|
| @@ -1,5 +1,5 @@
|
| /*
|
| - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
| + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
| *
|
| * Use of this source code is governed by a BSD-style license
|
| * that can be found in the LICENSE file in the root of the source
|
| @@ -9,54 +9,87 @@
|
| */
|
| #include <stdio.h>
|
|
|
| -#include <algorithm>
|
| #include <deque>
|
| #include <map>
|
| -#include <vector>
|
|
|
| #include "testing/gtest/include/gtest/gtest.h"
|
|
|
| -#include "webrtc/base/checks.h"
|
| #include "webrtc/base/format_macros.h"
|
| #include "webrtc/base/scoped_ptr.h"
|
| +#include "webrtc/base/thread_annotations.h"
|
| #include "webrtc/call.h"
|
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
| +#include "webrtc/frame_callback.h"
|
| #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
|
| +#include "webrtc/system_wrappers/interface/clock.h"
|
| #include "webrtc/system_wrappers/interface/cpu_info.h"
|
| -#include "webrtc/test/layer_filtering_transport.h"
|
| -#include "webrtc/test/run_loop.h"
|
| +#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
| +#include "webrtc/system_wrappers/interface/event_wrapper.h"
|
| +#include "webrtc/system_wrappers/interface/sleep.h"
|
| +#include "webrtc/test/call_test.h"
|
| +#include "webrtc/test/direct_transport.h"
|
| +#include "webrtc/test/encoder_settings.h"
|
| +#include "webrtc/test/fake_encoder.h"
|
| +#include "webrtc/test/frame_generator.h"
|
| +#include "webrtc/test/frame_generator_capturer.h"
|
| #include "webrtc/test/statistics.h"
|
| #include "webrtc/test/testsupport/fileutils.h"
|
| -#include "webrtc/test/video_renderer.h"
|
| -#include "webrtc/video/video_quality_test.h"
|
| +#include "webrtc/typedefs.h"
|
|
|
| namespace webrtc {
|
|
|
| -static const int kTransportSeqExtensionId =
|
| - VideoQualityTest::kAbsSendTimeExtensionId + 1;
|
| +enum class ContentMode {
|
| + kRealTimeVideo,
|
| + kScreensharingStaticImage,
|
| + kScreensharingScrollingImage,
|
| +};
|
| +
|
| +struct FullStackTestBeforeParams {
|
| + const char* test_label;
|
| + struct {
|
| + const char* name;
|
| + size_t width, height;
|
| + int fps;
|
| + } clip;
|
| + ContentMode mode;
|
| + int min_bitrate_bps;
|
| + int target_bitrate_bps;
|
| + int max_bitrate_bps;
|
| + double avg_psnr_threshold;
|
| + double avg_ssim_threshold;
|
| + int test_durations_secs;
|
| + std::string codec;
|
| + FakeNetworkPipe::Config link;
|
| + std::string graph_data_output_filename;
|
| +};
|
| +
|
| +class FullStackTestBefore : public test::CallTest {
|
| + protected:
|
| + void RunTest(const FullStackTestBeforeParams& params);
|
| +};
|
| +
|
| +static const int kFullStackTestBeforeDurationSecs = 60;
|
| static const int kSendStatsPollingIntervalMs = 1000;
|
| -static const int kPayloadTypeVP8 = 123;
|
| -static const int kPayloadTypeVP9 = 124;
|
|
|
| -class VideoAnalyzer : public PacketReceiver,
|
| +class VideoAnalyzerBefore : public PacketReceiver,
|
| public Transport,
|
| public VideoRenderer,
|
| public VideoCaptureInput,
|
| public EncodedFrameObserver {
|
| public:
|
| - VideoAnalyzer(VideoCaptureInput* input,
|
| + VideoAnalyzerBefore(VideoCaptureInput* input,
|
| Transport* transport,
|
| - const std::string& test_label,
|
| + const char* test_label,
|
| double avg_psnr_threshold,
|
| double avg_ssim_threshold,
|
| int duration_frames,
|
| - FILE* graph_data_output_file)
|
| + const std::string& graph_data_output_filename)
|
| : input_(input),
|
| transport_(transport),
|
| receiver_(nullptr),
|
| send_stream_(nullptr),
|
| test_label_(test_label),
|
| - graph_data_output_file_(graph_data_output_file),
|
| + graph_data_output_filename_(graph_data_output_filename),
|
| frames_to_process_(duration_frames),
|
| frames_recorded_(0),
|
| frames_processed_(0),
|
| @@ -98,7 +131,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| EXPECT_TRUE(stats_polling_thread_->Start());
|
| }
|
|
|
| - ~VideoAnalyzer() {
|
| + ~VideoAnalyzerBefore() {
|
| for (ThreadWrapper* thread : comparison_thread_pool_) {
|
| EXPECT_TRUE(thread->Stop());
|
| delete thread;
|
| @@ -148,7 +181,8 @@ class VideoAnalyzer : public PacketReceiver,
|
| {
|
| rtc::CritScope lock(&crit_);
|
| if (rtp_timestamp_delta_ == 0) {
|
| - rtp_timestamp_delta_ = header.timestamp - first_send_frame_.timestamp();
|
| + rtp_timestamp_delta_ =
|
| + header.timestamp - first_send_frame_.timestamp();
|
| first_send_frame_.Reset();
|
| }
|
| uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
|
| @@ -206,7 +240,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| int last_frames_processed = -1;
|
| EventTypeWrapper eventType;
|
| int iteration = 0;
|
| - while ((eventType = done_->Wait(VideoQualityTest::kDefaultTimeoutMs)) !=
|
| + while ((eventType = done_->Wait(FullStackTestBefore::kDefaultTimeoutMs)) !=
|
| kEventSignaled) {
|
| int frames_processed;
|
| {
|
| @@ -332,7 +366,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| }
|
|
|
| static bool PollStatsThread(void* obj) {
|
| - return static_cast<VideoAnalyzer*>(obj)->PollStats();
|
| + return static_cast<VideoAnalyzerBefore*>(obj)->PollStats();
|
| }
|
|
|
| bool PollStats() {
|
| @@ -359,7 +393,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| }
|
|
|
| static bool FrameComparisonThread(void* obj) {
|
| - return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
|
| + return static_cast<VideoAnalyzerBefore*>(obj)->CompareFrames();
|
| }
|
|
|
| bool CompareFrames() {
|
| @@ -385,7 +419,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|
|
| if (FrameProcessed()) {
|
| PrintResults();
|
| - if (graph_data_output_file_)
|
| + if (!graph_data_output_filename_.empty())
|
| PrintSamplesToFile();
|
| done_->Set();
|
| comparison_available_event_->Set();
|
| @@ -439,7 +473,7 @@ class VideoAnalyzer : public PacketReceiver,
|
| PrintResult("psnr", psnr_, " dB");
|
| PrintResult("ssim", ssim_, "");
|
| PrintResult("sender_time", sender_time_, " ms");
|
| - printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
|
| + printf("RESULT dropped_frames: %s = %d frames\n", test_label_,
|
| dropped_frames_);
|
| PrintResult("receiver_time", receiver_time_, " ms");
|
| PrintResult("total_delay_incl_network", end_to_end_, " ms");
|
| @@ -462,11 +496,11 @@ class VideoAnalyzer : public PacketReceiver,
|
| int64_t input_time_ms = comparison.reference.ntp_time_ms();
|
|
|
| rtc::CritScope crit(&comparison_lock_);
|
| - if (graph_data_output_file_) {
|
| - samples_.push_back(
|
| - Sample(comparison.dropped, input_time_ms, comparison.send_time_ms,
|
| - comparison.recv_time_ms, comparison.encoded_frame_size, psnr,
|
| - ssim, comparison.render_time_ms));
|
| + if (!graph_data_output_filename_.empty()) {
|
| + samples_.push_back(Sample(
|
| + comparison.dropped, input_time_ms, comparison.send_time_ms,
|
| + comparison.recv_time_ms, comparison.encoded_frame_size, psnr, ssim,
|
| + comparison.render_time_ms));
|
| }
|
| psnr_.AddSample(psnr);
|
| ssim_.AddSample(ssim);
|
| @@ -491,21 +525,23 @@ class VideoAnalyzer : public PacketReceiver,
|
| const char* unit) {
|
| printf("RESULT %s: %s = {%f, %f}%s\n",
|
| result_type,
|
| - test_label_.c_str(),
|
| + test_label_,
|
| stats.Mean(),
|
| stats.StandardDeviation(),
|
| unit);
|
| }
|
|
|
| void PrintSamplesToFile(void) {
|
| - FILE* out = graph_data_output_file_;
|
| + FILE* out = fopen(graph_data_output_filename_.c_str(), "w");
|
| + RTC_CHECK(out != nullptr) << "Couldn't open file: "
|
| + << graph_data_output_filename_;
|
| +
|
| rtc::CritScope crit(&comparison_lock_);
|
| std::sort(samples_.begin(), samples_.end(),
|
| - [](const Sample& A, const Sample& B) -> bool {
|
| - return A.input_time_ms < B.input_time_ms;
|
| - });
|
| + [](const Sample& A, const Sample& B)
|
| + -> bool { return A.input_time_ms < B.input_time_ms; });
|
|
|
| - fprintf(out, "%s\n", test_label_.c_str());
|
| + fprintf(out, "%s\n", test_label_);
|
| fprintf(out, "%" PRIuS "\n", samples_.size());
|
| fprintf(out,
|
| "dropped "
|
| @@ -522,10 +558,11 @@ class VideoAnalyzer : public PacketReceiver,
|
| sample.encoded_frame_size, sample.psnr, sample.ssim,
|
| sample.render_time_ms);
|
| }
|
| + fclose(out);
|
| }
|
|
|
| - const std::string test_label_;
|
| - FILE* const graph_data_output_file_;
|
| + const char* const test_label_;
|
| + std::string graph_data_output_filename_;
|
| std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
|
| test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
|
| test::Statistics receiver_time_ GUARDED_BY(comparison_lock_);
|
| @@ -548,6 +585,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|
|
| rtc::CriticalSection crit_;
|
| std::deque<VideoFrame> frames_ GUARDED_BY(crit_);
|
| + std::deque<VideoSendStream::Stats> send_stats_ GUARDED_BY(crit_);
|
| VideoFrame last_rendered_frame_ GUARDED_BY(crit_);
|
| std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_);
|
| std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_);
|
| @@ -564,278 +602,346 @@ class VideoAnalyzer : public PacketReceiver,
|
| const rtc::scoped_ptr<EventWrapper> done_;
|
| };
|
|
|
| -VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {}
|
| -
|
| -void VideoQualityTest::ValidateParams(const Params& params) {
|
| - RTC_CHECK_GE(params.common.max_bitrate_bps, params.common.target_bitrate_bps);
|
| - RTC_CHECK_GE(params.common.target_bitrate_bps, params.common.min_bitrate_bps);
|
| - RTC_CHECK_LT(params.common.tl_discard_threshold,
|
| - params.common.num_temporal_layers);
|
| -}
|
| +void FullStackTestBefore::RunTest(const FullStackTestBeforeParams& params) {
|
| + // TODO(ivica): Add num_temporal_layers as a param.
|
| + unsigned char num_temporal_layers =
|
| + params.graph_data_output_filename.empty() ? 2 : 1;
|
|
|
| -void VideoQualityTest::TestBody() {}
|
| + test::DirectTransport send_transport(params.link);
|
| + test::DirectTransport recv_transport(params.link);
|
| + VideoAnalyzerBefore analyzer(nullptr, &send_transport, params.test_label,
|
| + params.avg_psnr_threshold, params.avg_ssim_threshold,
|
| + params.test_durations_secs * params.clip.fps,
|
| + params.graph_data_output_filename);
|
|
|
| -void VideoQualityTest::SetupFullStack(const Params& params,
|
| - Transport* send_transport,
|
| - Transport* recv_transport) {
|
| - if (params.logs)
|
| - trace_to_stderr_.reset(new test::TraceToStderr);
|
| + CreateCalls(Call::Config(), Call::Config());
|
|
|
| - CreateSendConfig(1, send_transport);
|
| + analyzer.SetReceiver(receiver_call_->Receiver());
|
| + send_transport.SetReceiver(&analyzer);
|
| + recv_transport.SetReceiver(sender_call_->Receiver());
|
|
|
| - int payload_type;
|
| - if (params.common.codec == "VP8") {
|
| - encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
|
| - payload_type = kPayloadTypeVP8;
|
| - } else if (params.common.codec == "VP9") {
|
| - encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
|
| - payload_type = kPayloadTypeVP9;
|
| + CreateSendConfig(1, &analyzer);
|
| +
|
| + rtc::scoped_ptr<VideoEncoder> encoder;
|
| + if (params.codec == "VP8") {
|
| + encoder =
|
| + rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp8));
|
| + send_config_.encoder_settings.encoder = encoder.get();
|
| + send_config_.encoder_settings.payload_name = "VP8";
|
| + } else if (params.codec == "VP9") {
|
| + encoder =
|
| + rtc::scoped_ptr<VideoEncoder>(VideoEncoder::Create(VideoEncoder::kVp9));
|
| + send_config_.encoder_settings.encoder = encoder.get();
|
| + send_config_.encoder_settings.payload_name = "VP9";
|
| } else {
|
| RTC_NOTREACHED() << "Codec not supported!";
|
| return;
|
| }
|
| - send_config_.encoder_settings.encoder = encoder_.get();
|
| - send_config_.encoder_settings.payload_name = params.common.codec;
|
| - send_config_.encoder_settings.payload_type = payload_type;
|
| + send_config_.encoder_settings.payload_type = 124;
|
|
|
| send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
|
| send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
|
| send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
|
|
|
| - send_config_.rtp.extensions.clear();
|
| - if (params.common.send_side_bwe) {
|
| - send_config_.rtp.extensions.push_back(RtpExtension(
|
| - RtpExtension::kTransportSequenceNumber, kTransportSeqExtensionId));
|
| - } else {
|
| - send_config_.rtp.extensions.push_back(
|
| - RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
|
| - }
|
| -
|
| - // Automatically fill out streams[0] with params.
|
| VideoStream* stream = &encoder_config_.streams[0];
|
| - stream->width = params.common.width;
|
| - stream->height = params.common.height;
|
| - stream->min_bitrate_bps = params.common.min_bitrate_bps;
|
| - stream->target_bitrate_bps = params.common.target_bitrate_bps;
|
| - stream->max_bitrate_bps = params.common.max_bitrate_bps;
|
| - stream->max_framerate = static_cast<int>(params.common.fps);
|
| -
|
| - stream->temporal_layer_thresholds_bps.clear();
|
| - if (params.common.num_temporal_layers > 1) {
|
| - stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
|
| - }
|
| + stream->width = params.clip.width;
|
| + stream->height = params.clip.height;
|
| + stream->min_bitrate_bps = params.min_bitrate_bps;
|
| + stream->target_bitrate_bps = params.target_bitrate_bps;
|
| + stream->max_bitrate_bps = params.max_bitrate_bps;
|
| + stream->max_framerate = params.clip.fps;
|
| +
|
| + VideoCodecVP8 vp8_settings;
|
| + VideoCodecVP9 vp9_settings;
|
| + if (params.mode == ContentMode::kScreensharingStaticImage ||
|
| + params.mode == ContentMode::kScreensharingScrollingImage) {
|
| + encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
|
| + encoder_config_.min_transmit_bitrate_bps = 400 * 1000;
|
| + if (params.codec == "VP8") {
|
| + vp8_settings = VideoEncoder::GetDefaultVp8Settings();
|
| + vp8_settings.denoisingOn = false;
|
| + vp8_settings.frameDroppingOn = false;
|
| + vp8_settings.numberOfTemporalLayers = num_temporal_layers;
|
| + encoder_config_.encoder_specific_settings = &vp8_settings;
|
| + } else if (params.codec == "VP9") {
|
| + vp9_settings = VideoEncoder::GetDefaultVp9Settings();
|
| + vp9_settings.denoisingOn = false;
|
| + vp9_settings.frameDroppingOn = false;
|
| + vp9_settings.numberOfTemporalLayers = num_temporal_layers;
|
| + encoder_config_.encoder_specific_settings = &vp9_settings;
|
| + }
|
|
|
| - CreateMatchingReceiveConfigs(recv_transport);
|
| + stream->temporal_layer_thresholds_bps.clear();
|
| + if (num_temporal_layers > 1) {
|
| + stream->temporal_layer_thresholds_bps.push_back(
|
| + stream->target_bitrate_bps);
|
| + }
|
| + }
|
|
|
| + CreateMatchingReceiveConfigs(&recv_transport);
|
| + receive_configs_[0].renderer = &analyzer;
|
| receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
|
| receive_configs_[0].rtp.rtx[kSendRtxPayloadType].ssrc = kSendRtxSsrcs[0];
|
| receive_configs_[0].rtp.rtx[kSendRtxPayloadType].payload_type =
|
| kSendRtxPayloadType;
|
|
|
| - encoder_config_.min_transmit_bitrate_bps = params.common.min_transmit_bps;
|
| -}
|
| -
|
| -void VideoQualityTest::SetupScreenshare(const Params& params) {
|
| - RTC_CHECK(params.screenshare.enabled);
|
| -
|
| - // Fill out codec settings.
|
| - encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
|
| - if (params.common.codec == "VP8") {
|
| - codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
|
| - codec_settings_.VP8.denoisingOn = false;
|
| - codec_settings_.VP8.frameDroppingOn = false;
|
| - codec_settings_.VP8.numberOfTemporalLayers =
|
| - static_cast<unsigned char>(params.common.num_temporal_layers);
|
| - encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
|
| - } else if (params.common.codec == "VP9") {
|
| - codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
|
| - codec_settings_.VP9.denoisingOn = false;
|
| - codec_settings_.VP9.frameDroppingOn = false;
|
| - codec_settings_.VP9.numberOfTemporalLayers =
|
| - static_cast<unsigned char>(params.common.num_temporal_layers);
|
| - encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
|
| - }
|
| + for (auto& config : receive_configs_)
|
| + config.pre_decode_callback = &analyzer;
|
| + CreateStreams();
|
| + analyzer.input_ = send_stream_->Input();
|
| + analyzer.send_stream_ = send_stream_;
|
|
|
| - // Setup frame generator.
|
| - const size_t kWidth = 1850;
|
| - const size_t kHeight = 1110;
|
| std::vector<std::string> slides;
|
| slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
|
| slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
|
| -
|
| - if (params.screenshare.scroll_duration == 0) {
|
| - // Cycle image every slide_change_interval seconds.
|
| - frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
|
| - slides, kWidth, kHeight,
|
| - params.screenshare.slide_change_interval * params.common.fps));
|
| - } else {
|
| - RTC_CHECK_LE(params.common.width, kWidth);
|
| - RTC_CHECK_LE(params.common.height, kHeight);
|
| - RTC_CHECK_GT(params.screenshare.slide_change_interval, 0);
|
| - const int kPauseDurationMs = (params.screenshare.slide_change_interval -
|
| - params.screenshare.scroll_duration) * 1000;
|
| - RTC_CHECK_LE(params.screenshare.scroll_duration,
|
| - params.screenshare.slide_change_interval);
|
| -
|
| - if (params.screenshare.scroll_duration) {
|
| - frame_generator_.reset(
|
| + size_t kSlidesWidth = 1850;
|
| + size_t kSlidesHeight = 1110;
|
| +
|
| + Clock* clock = Clock::GetRealTimeClock();
|
| + rtc::scoped_ptr<test::FrameGenerator> frame_generator;
|
| +
|
| + switch (params.mode) {
|
| + case ContentMode::kRealTimeVideo:
|
| + frame_generator.reset(test::FrameGenerator::CreateFromYuvFile(
|
| + std::vector<std::string>(1,
|
| + test::ResourcePath(params.clip.name, "yuv")),
|
| + params.clip.width, params.clip.height, 1));
|
| + break;
|
| + case ContentMode::kScreensharingScrollingImage:
|
| + frame_generator.reset(
|
| test::FrameGenerator::CreateScrollingInputFromYuvFiles(
|
| - clock_, slides, kWidth, kHeight, params.common.width,
|
| - params.common.height, params.screenshare.scroll_duration * 1000,
|
| - kPauseDurationMs));
|
| - } else {
|
| - frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
|
| - slides, kWidth, kHeight,
|
| - params.screenshare.slide_change_interval * params.common.fps));
|
| - }
|
| + clock, slides, kSlidesWidth, kSlidesHeight, params.clip.width,
|
| + params.clip.height, 2000,
|
| + 8000)); // Scroll for 2 seconds, then pause for 8.
|
| + break;
|
| + case ContentMode::kScreensharingStaticImage:
|
| + frame_generator.reset(test::FrameGenerator::CreateFromYuvFile(
|
| + slides, kSlidesWidth, kSlidesHeight,
|
| + 10 * params.clip.fps)); // Cycle image every 10 seconds.
|
| + break;
|
| }
|
| -}
|
|
|
| -void VideoQualityTest::CreateCapturer(const Params& params,
|
| - VideoCaptureInput* input) {
|
| - if (params.screenshare.enabled) {
|
| - test::FrameGeneratorCapturer *frame_generator_capturer =
|
| - new test::FrameGeneratorCapturer(
|
| - clock_, input, frame_generator_.release(), params.common.fps);
|
| - EXPECT_TRUE(frame_generator_capturer->Init());
|
| - capturer_.reset(frame_generator_capturer);
|
| - } else {
|
| - if (params.video.clip_name.empty()) {
|
| - capturer_.reset(test::VideoCapturer::Create(
|
| - input, params.common.width, params.common.height, params.common.fps,
|
| - clock_));
|
| - } else {
|
| - capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
|
| - input, test::ResourcePath(params.video.clip_name, "yuv"),
|
| - params.common.width, params.common.height, params.common.fps,
|
| - clock_));
|
| - ASSERT_TRUE(capturer_.get() != nullptr)
|
| - << "Could not create capturer for " << params.video.clip_name
|
| - << ".yuv. Is this resource file present?";
|
| - }
|
| - }
|
| -}
|
| -
|
| -void VideoQualityTest::RunWithAnalyzer(const Params& params) {
|
| - // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
|
| - // differentiate between the analyzer and the renderer case.
|
| - ValidateParams(params);
|
| -
|
| - FILE* graph_data_output_file = nullptr;
|
| - if (!params.analyzer.graph_data_output_filename.empty()) {
|
| - graph_data_output_file =
|
| - fopen(params.analyzer.graph_data_output_filename.c_str(), "w");
|
| - RTC_CHECK(graph_data_output_file != nullptr)
|
| - << "Can't open the file "
|
| - << params.analyzer.graph_data_output_filename << "!";
|
| - }
|
| -
|
| - test::LayerFilteringTransport send_transport(
|
| - params.pipe, kPayloadTypeVP8, kPayloadTypeVP9,
|
| - static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
|
| - test::DirectTransport recv_transport(params.pipe);
|
| - VideoAnalyzer analyzer(
|
| - nullptr, &send_transport, params.analyzer.test_label,
|
| - params.analyzer.avg_psnr_threshold, params.analyzer.avg_ssim_threshold,
|
| - params.analyzer.test_durations_secs * params.common.fps,
|
| - graph_data_output_file);
|
| -
|
| - Call::Config call_config;
|
| - call_config.bitrate_config = params.common.call_bitrate_config;
|
| - CreateCalls(call_config, call_config);
|
| -
|
| - analyzer.SetReceiver(receiver_call_->Receiver());
|
| - send_transport.SetReceiver(&analyzer);
|
| - recv_transport.SetReceiver(sender_call_->Receiver());
|
| -
|
| - SetupFullStack(params, &analyzer, &recv_transport);
|
| - receive_configs_[0].renderer = &analyzer;
|
| - for (auto& config : receive_configs_)
|
| - config.pre_decode_callback = &analyzer;
|
| -
|
| - if (params.screenshare.enabled)
|
| - SetupScreenshare(params);
|
| -
|
| - CreateCapturer(params, &analyzer);
|
| + ASSERT_TRUE(frame_generator.get() != nullptr);
|
| + frame_generator_capturer_.reset(new test::FrameGeneratorCapturer(
|
| + clock, &analyzer, frame_generator.release(), params.clip.fps));
|
| + ASSERT_TRUE(frame_generator_capturer_->Init());
|
|
|
| - CreateStreams();
|
| - analyzer.input_ = send_stream_->Input();
|
| - analyzer.send_stream_ = send_stream_;
|
| -
|
| - send_stream_->Start();
|
| - for (size_t i = 0; i < receive_streams_.size(); ++i)
|
| - receive_streams_[i]->Start();
|
| - capturer_->Start();
|
| + Start();
|
|
|
| analyzer.Wait();
|
|
|
| send_transport.StopSending();
|
| recv_transport.StopSending();
|
|
|
| - capturer_->Stop();
|
| - for (size_t i = 0; i < receive_streams_.size(); ++i)
|
| - receive_streams_[i]->Stop();
|
| - send_stream_->Stop();
|
| + Stop();
|
|
|
| DestroyStreams();
|
| -
|
| - if (graph_data_output_file)
|
| - fclose(graph_data_output_file);
|
| }
|
|
|
| -void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
|
| - ValidateParams(params);
|
| -
|
| - rtc::scoped_ptr<test::VideoRenderer> local_preview(
|
| - test::VideoRenderer::Create("Local Preview", params.common.width,
|
| - params.common.height));
|
| - rtc::scoped_ptr<test::VideoRenderer> loopback_video(
|
| - test::VideoRenderer::Create("Loopback Video", params.common.width,
|
| - params.common.height));
|
| -
|
| - // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
|
| - // match the full stack tests.
|
| - Call::Config call_config;
|
| - call_config.bitrate_config = params.common.call_bitrate_config;
|
| - rtc::scoped_ptr<Call> call(Call::Create(call_config));
|
| -
|
| - test::LayerFilteringTransport transport(
|
| - params.pipe, kPayloadTypeVP8, kPayloadTypeVP9,
|
| - static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
|
| - // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
|
| - // least share as much code as possible. That way this test would also match
|
| - // the full stack tests better.
|
| - transport.SetReceiver(call->Receiver());
|
| -
|
| - SetupFullStack(params, &transport, &transport);
|
| - send_config_.local_renderer = local_preview.get();
|
| - receive_configs_[0].renderer = loopback_video.get();
|
| -
|
| - if (params.screenshare.enabled)
|
| - SetupScreenshare(params);
|
| -
|
| - send_stream_ = call->CreateVideoSendStream(send_config_, encoder_config_);
|
| - CreateCapturer(params, send_stream_->Input());
|
| -
|
| - VideoReceiveStream* receive_stream =
|
| - call->CreateVideoReceiveStream(receive_configs_[0]);
|
| -
|
| - receive_stream->Start();
|
| - send_stream_->Start();
|
| - capturer_->Start();
|
| -
|
| - test::PressEnterToContinue();
|
| -
|
| - capturer_->Stop();
|
| - send_stream_->Stop();
|
| - receive_stream->Stop();
|
| -
|
| - call->DestroyVideoReceiveStream(receive_stream);
|
| - call->DestroyVideoSendStream(send_stream_);
|
| +// TEST_F(FullStackTestBefore, ParisQcifWithoutPacketLoss) {
|
| +// FullStackTestBeforeParams paris_qcif = {"net_delay_0_0_plr_0",
|
| +// {"paris_qcif", 176, 144, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 300000,
|
| +// 300000,
|
| +// 300000,
|
| +// 36.0,
|
| +// 0.96,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// RunTest(paris_qcif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCifWithoutPacketLoss) {
|
| +// // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_net_delay_0_0_plr_0",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 700000,
|
| +// 700000,
|
| +// 700000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCifPlr5) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_delay_50_0_plr_5",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 500000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.loss_percent = 5;
|
| +// foreman_cif.link.queue_delay_ms = 50;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCif500kbps) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_500kbps",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 500000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.queue_length_packets = 0;
|
| +// foreman_cif.link.queue_delay_ms = 0;
|
| +// foreman_cif.link.link_capacity_kbps = 500;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCif500kbpsLimitedQueue) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_500kbps_32pkts_queue",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 500000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.queue_length_packets = 32;
|
| +// foreman_cif.link.queue_delay_ms = 0;
|
| +// foreman_cif.link.link_capacity_kbps = 500;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCif500kbps100ms) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_500kbps_100ms",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 500000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.queue_length_packets = 0;
|
| +// foreman_cif.link.queue_delay_ms = 100;
|
| +// foreman_cif.link.link_capacity_kbps = 500;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCif500kbps100msLimitedQueue) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_500kbps_100ms_32pkts_queue",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 500000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.queue_length_packets = 32;
|
| +// foreman_cif.link.queue_delay_ms = 100;
|
| +// foreman_cif.link.link_capacity_kbps = 500;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, ForemanCif1000kbps100msLimitedQueue) {
|
| +// FullStackTestBeforeParams foreman_cif = {"foreman_cif_1000kbps_100ms_32pkts_queue",
|
| +// {"foreman_cif", 352, 288, 30},
|
| +// ContentMode::kRealTimeVideo,
|
| +// 30000,
|
| +// 2000000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// foreman_cif.link.queue_length_packets = 32;
|
| +// foreman_cif.link.queue_delay_ms = 100;
|
| +// foreman_cif.link.link_capacity_kbps = 1000;
|
| +// RunTest(foreman_cif);
|
| +// }
|
| +//
|
| +// // Temporarily disabled on Android due to low test timeouts.
|
| +// // https://code.google.com/p/chromium/issues/detail?id=513170
|
| +// #include "webrtc/test/testsupport/gtest_disable.h"
|
| +// TEST_F(FullStackTestBefore, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL)) {
|
| +// FullStackTestBeforeParams screenshare_params = {
|
| +// "screenshare_slides",
|
| +// {"screenshare_slides", 1850, 1110, 5},
|
| +// ContentMode::kScreensharingStaticImage,
|
| +// 50000,
|
| +// 200000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// RunTest(screenshare_params);
|
| +// }
|
| +//
|
| +// TEST_F(FullStackTestBefore, DISABLED_ON_ANDROID(ScreenshareSlidesVP8_2TL_Scroll)) {
|
| +// FullStackTestBeforeParams screenshare_params = {
|
| +// "screenshare_slides_scrolling",
|
| +// // Crop height by two, scrolling vertically only.
|
| +// {"screenshare_slides_scrolling", 1850, 1110 / 2, 5},
|
| +// ContentMode::kScreensharingScrollingImage,
|
| +// 50000,
|
| +// 200000,
|
| +// 2000000,
|
| +// 0.0,
|
| +// 0.0,
|
| +// kFullStackTestBeforeDurationSecs,
|
| +// "VP8"};
|
| +// RunTest(screenshare_params);
|
| +// }
|
| +
|
| +#define SCREENSHARE_VP8_2TL { \
|
| + FullStackTestBeforeParams screenshare_params = { \
|
| + "screenshare_slides", \
|
| + {"screenshare_slides", 1850, 1110, 5}, \
|
| + ContentMode::kScreensharingStaticImage, \
|
| + 50000, \
|
| + 200000, \
|
| + 2000000, \
|
| + 0.0, \
|
| + 0.0, \
|
| + kFullStackTestBeforeDurationSecs, \
|
| + "VP8"}; \
|
| + RunTest(screenshare_params); \
|
| +}
|
|
|
| - transport.StopSending();
|
| +#define SCREENSHARE_VP9_2TL { \
|
| + FullStackTestBeforeParams screenshare_params = { \
|
| + "screenshare_slides_vp9_2tl", \
|
| + {"screenshare_slides", 1850, 1110, 5}, \
|
| + ContentMode::kScreensharingStaticImage, \
|
| + 50000, \
|
| + 200000, \
|
| + 2000000, \
|
| + 0.0, \
|
| + 0.0, \
|
| + kFullStackTestBeforeDurationSecs, \
|
| + "VP9"}; \
|
| + RunTest(screenshare_params); \
|
| }
|
|
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_0) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_1) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_2) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_3) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_4) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_5) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_6) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_7) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_8) SCREENSHARE_VP8_2TL;
|
| +TEST_F(FullStackTestBefore, ScreenshareSlidesVP8_2TL_Before_9) SCREENSHARE_VP8_2TL;
|
| } // namespace webrtc
|
|
|