Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(116)

Unified Diff: webrtc/video/video_quality_test.cc

Issue 2463733002: Revert of "Separating video settings in VideoQualityTest". (Closed)
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « webrtc/video/video_quality_test.h ('k') | no next file » | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: webrtc/video/video_quality_test.cc
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index c4d13e65cfe1812c5e96a3eacc50f368b41162aa..dabe50067697b11dd0901d0d2a47113341bcf65b 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -820,26 +820,13 @@
VideoQualityTest::VideoQualityTest()
: clock_(Clock::GetRealTimeClock()), receive_logs_(0), send_logs_(0) {}
-VideoQualityTest::Params::Params()
- : call({false, Call::Config::BitrateConfig()}),
- video({false, 640, 480, 30, 50, 800, 800, false, "VP8", 1, -1, 0, false,
- "", ""}),
- audio({false, false}),
- screenshare({false, 10, 0}),
- analyzer({"", 0.0, 0.0, 0, "", ""}),
- pipe(),
- logs(false),
- ss({std::vector<VideoStream>(), 0, 0, -1, std::vector<SpatialLayer>()}) {}
-
-VideoQualityTest::Params::~Params() = default;
-
void VideoQualityTest::TestBody() {}
std::string VideoQualityTest::GenerateGraphTitle() const {
std::stringstream ss;
- ss << params_.video.codec;
- ss << " (" << params_.video.target_bitrate_bps / 1000 << "kbps";
- ss << ", " << params_.video.fps << " FPS";
+ ss << params_.common.codec;
+ ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps";
+ ss << ", " << params_.common.fps << " FPS";
if (params_.screenshare.scroll_duration)
ss << ", " << params_.screenshare.scroll_duration << "s scroll";
if (params_.ss.streams.size() > 1)
@@ -864,23 +851,25 @@
// retransmitting the wrong packets.
RTC_CHECK(params_.ss.selected_sl == -1 ||
params_.ss.selected_sl == params_.ss.num_spatial_layers - 1);
- RTC_CHECK(params_.video.selected_tl == -1 ||
- params_.video.selected_tl ==
- params_.video.num_temporal_layers - 1);
+ RTC_CHECK(params_.common.selected_tl == -1 ||
+ params_.common.selected_tl ==
+ params_.common.num_temporal_layers - 1);
}
// TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it
// does in some parts of the code?
- RTC_CHECK_GE(params_.video.max_bitrate_bps, params_.video.target_bitrate_bps);
- RTC_CHECK_GE(params_.video.target_bitrate_bps, params_.video.min_bitrate_bps);
- RTC_CHECK_LT(params_.video.selected_tl, params_.video.num_temporal_layers);
+ RTC_CHECK_GE(params_.common.max_bitrate_bps,
+ params_.common.target_bitrate_bps);
+ RTC_CHECK_GE(params_.common.target_bitrate_bps,
+ params_.common.min_bitrate_bps);
+ RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers);
RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size());
for (const VideoStream& stream : params_.ss.streams) {
RTC_CHECK_GE(stream.min_bitrate_bps, 0);
RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()),
- params_.video.num_temporal_layers - 1);
+ params_.common.num_temporal_layers - 1);
}
// TODO(ivica): Should we check if the sum of all streams/layers is equal to
// the total bitrate? We anyway have to update them in the case bitrate
@@ -890,9 +879,9 @@
RTC_CHECK(params_.ss.spatial_layers.empty() ||
params_.ss.spatial_layers.size() ==
static_cast<size_t>(params_.ss.num_spatial_layers));
- if (params_.video.codec == "VP8") {
+ if (params_.common.codec == "VP8") {
RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
- } else if (params_.video.codec == "VP9") {
+ } else if (params_.common.codec == "VP9") {
RTC_CHECK_EQ(params_.ss.streams.size(), 1u);
}
}
@@ -928,14 +917,14 @@
// Static.
VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) {
VideoStream stream;
- stream.width = params.video.width;
- stream.height = params.video.height;
- stream.max_framerate = params.video.fps;
- stream.min_bitrate_bps = params.video.min_bitrate_bps;
- stream.target_bitrate_bps = params.video.target_bitrate_bps;
- stream.max_bitrate_bps = params.video.max_bitrate_bps;
+ stream.width = params.common.width;
+ stream.height = params.common.height;
+ stream.max_framerate = params.common.fps;
+ stream.min_bitrate_bps = params.common.min_bitrate_bps;
+ stream.target_bitrate_bps = params.common.target_bitrate_bps;
+ stream.max_bitrate_bps = params.common.max_bitrate_bps;
stream.max_qp = 52;
- if (params.video.num_temporal_layers == 2)
+ if (params.common.num_temporal_layers == 2)
stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
return stream;
}
@@ -978,7 +967,7 @@
stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end());
} else {
// Automatic TL thresholds for more than two layers not supported.
- RTC_CHECK_LE(params->video.num_temporal_layers, 2);
+ RTC_CHECK_LE(params->common.num_temporal_layers, 2);
}
params->ss.streams.push_back(stream);
}
@@ -1010,13 +999,13 @@
CreateSendConfig(num_streams, 0, send_transport);
int payload_type;
- if (params_.video.codec == "H264") {
+ if (params_.common.codec == "H264") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264));
payload_type = kPayloadTypeH264;
- } else if (params_.video.codec == "VP8") {
+ } else if (params_.common.codec == "VP8") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
payload_type = kPayloadTypeVP8;
- } else if (params_.video.codec == "VP9") {
+ } else if (params_.common.codec == "VP9") {
encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
payload_type = kPayloadTypeVP9;
} else {
@@ -1024,7 +1013,7 @@
return;
}
video_send_config_.encoder_settings.encoder = encoder_.get();
- video_send_config_.encoder_settings.payload_name = params_.video.codec;
+ video_send_config_.encoder_settings.payload_name = params_.common.codec;
video_send_config_.encoder_settings.payload_type = payload_type;
video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
@@ -1032,7 +1021,7 @@
video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
video_send_config_.rtp.extensions.clear();
- if (params_.call.send_side_bwe) {
+ if (params_.common.send_side_bwe) {
video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
test::kTransportSequenceNumberExtensionId));
@@ -1042,7 +1031,7 @@
}
video_encoder_config_.min_transmit_bitrate_bps =
- params_.video.min_transmit_bps;
+ params_.common.min_transmit_bps;
video_encoder_config_.number_of_streams = params_.ss.streams.size();
video_encoder_config_.max_bitrate_bps = 0;
@@ -1062,7 +1051,7 @@
video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i];
video_receive_configs_[i].rtp.rtx[payload_type].payload_type =
kSendRtxPayloadType;
- video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe;
+ video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe;
}
}
@@ -1071,20 +1060,20 @@
// Fill out codec settings.
video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
- if (params_.video.codec == "VP8") {
+ if (params_.common.codec == "VP8") {
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers =
- static_cast<unsigned char>(params_.video.num_temporal_layers);
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
- } else if (params_.video.codec == "VP9") {
+ } else if (params_.common.codec == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.denoisingOn = false;
vp9_settings.frameDroppingOn = false;
vp9_settings.numberOfTemporalLayers =
- static_cast<unsigned char>(params_.video.num_temporal_layers);
+ static_cast<unsigned char>(params_.common.num_temporal_layers);
vp9_settings.numberOfSpatialLayers =
static_cast<unsigned char>(params_.ss.num_spatial_layers);
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
@@ -1104,10 +1093,10 @@
// Cycle image every slide_change_interval seconds.
frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
slides, kWidth, kHeight,
- params_.screenshare.slide_change_interval * params_.video.fps));
+ params_.screenshare.slide_change_interval * params_.common.fps));
} else {
- RTC_CHECK_LE(params_.video.width, kWidth);
- RTC_CHECK_LE(params_.video.height, kHeight);
+ RTC_CHECK_LE(params_.common.width, kWidth);
+ RTC_CHECK_LE(params_.common.height, kHeight);
RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
params_.screenshare.scroll_duration) *
@@ -1117,8 +1106,8 @@
frame_generator_.reset(
test::FrameGenerator::CreateScrollingInputFromYuvFiles(
- clock_, slides, kWidth, kHeight, params_.video.width,
- params_.video.height, params_.screenshare.scroll_duration * 1000,
+ clock_, slides, kWidth, kHeight, params_.common.width,
+ params_.common.height, params_.screenshare.scroll_duration * 1000,
kPauseDurationMs));
}
}
@@ -1127,17 +1116,17 @@
if (params_.screenshare.enabled) {
test::FrameGeneratorCapturer* frame_generator_capturer =
new test::FrameGeneratorCapturer(clock_, frame_generator_.release(),
- params_.video.fps);
+ params_.common.fps);
EXPECT_TRUE(frame_generator_capturer->Init());
capturer_.reset(frame_generator_capturer);
} else {
if (params_.video.clip_name.empty()) {
capturer_.reset(test::VcmCapturer::Create(
- params_.video.width, params_.video.height, params_.video.fps));
+ params_.common.width, params_.common.height, params_.common.fps));
} else {
capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
test::ResourcePath(params_.video.clip_name, "yuv"),
- params_.video.width, params_.video.height, params_.video.fps,
+ params_.common.width, params_.common.height, params_.common.fps,
clock_));
ASSERT_TRUE(capturer_) << "Could not create capturer for "
<< params_.video.clip_name
@@ -1149,7 +1138,7 @@
void VideoQualityTest::RunWithAnalyzer(const Params& params) {
params_ = params;
- RTC_CHECK(!params_.audio.enabled);
+ RTC_CHECK(!params_.audio);
// TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
// differentiate between the analyzer and the renderer case.
CheckParams();
@@ -1165,13 +1154,13 @@
webrtc::RtcEventLogNullImpl event_log;
Call::Config call_config(&event_log_);
- call_config.bitrate_config = params.call.call_bitrate_config;
+ call_config.bitrate_config = params.common.call_bitrate_config;
CreateCalls(call_config, call_config);
test::LayerFilteringTransport send_transport(
- params_.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- params_.video.selected_tl, params_.ss.selected_sl);
- test::DirectTransport recv_transport(params_.pipe, receiver_call_.get());
+ params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
+ params.common.selected_tl, params_.ss.selected_sl);
+ test::DirectTransport recv_transport(params.pipe, receiver_call_.get());
std::string graph_title = params_.analyzer.graph_title;
if (graph_title.empty())
@@ -1187,8 +1176,8 @@
? params_.ss.selected_sl
: params_.ss.num_spatial_layers - 1;
bool disable_quality_check =
- selected_stream.width != params_.video.width ||
- selected_stream.height != params_.video.height ||
+ selected_stream.width != params_.common.width ||
+ selected_stream.height != params_.common.height ||
(!params_.ss.spatial_layers.empty() &&
params_.ss.spatial_layers[selected_sl].scaling_factor_num !=
params_.ss.spatial_layers[selected_sl].scaling_factor_den);
@@ -1202,7 +1191,7 @@
&send_transport, params_.analyzer.test_label,
disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold,
disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold,
- params_.analyzer.test_durations_secs * params_.video.fps,
+ params_.analyzer.test_durations_secs * params_.common.fps,
graph_data_output_file, graph_title,
kVideoSendSsrcs[params_.ss.selected_stream]);
@@ -1257,8 +1246,8 @@
CheckParams();
std::unique_ptr<test::VideoRenderer> local_preview(
- test::VideoRenderer::Create("Local Preview", params_.video.width,
- params_.video.height));
+ test::VideoRenderer::Create("Local Preview", params_.common.width,
+ params_.common.height));
size_t stream_id = params_.ss.selected_stream;
std::string title = "Loopback Video";
if (params_.ss.streams.size() > 1) {
@@ -1276,10 +1265,10 @@
// match the full stack tests.
webrtc::RtcEventLogNullImpl event_log;
Call::Config call_config(&event_log_);
- call_config.bitrate_config = params_.call.call_bitrate_config;
+ call_config.bitrate_config = params_.common.call_bitrate_config;
::VoiceEngineState voe;
- if (params_.audio.enabled) {
+ if (params_.audio) {
CreateVoiceEngine(&voe, decoder_factory_);
AudioState::Config audio_state_config;
audio_state_config.voice_engine = voe.voice_engine;
@@ -1290,7 +1279,7 @@
test::LayerFilteringTransport transport(
params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9,
- params.video.selected_tl, params_.ss.selected_sl);
+ params.common.selected_tl, params_.ss.selected_sl);
// TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
// least share as much code as possible. That way this test would also match
// the full stack tests better.
@@ -1300,13 +1289,13 @@
video_send_config_.pre_encode_callback = local_preview.get();
video_receive_configs_[stream_id].renderer = loopback_video.get();
- if (params_.audio.enabled && params_.audio.sync_video)
+ if (params_.audio && params_.audio_video_sync)
video_receive_configs_[stream_id].sync_group = kSyncGroup;
video_send_config_.suspend_below_min_bitrate =
- params_.video.suspend_below_min_bitrate;
-
- if (params.video.fec) {
+ params_.common.suspend_below_min_bitrate;
+
+ if (params.common.fec) {
video_send_config_.rtp.ulpfec.red_payload_type = kRedPayloadType;
video_send_config_.rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
video_receive_configs_[stream_id].rtp.ulpfec.red_payload_type =
@@ -1326,7 +1315,7 @@
video_send_stream_->SetSource(capturer_.get());
AudioReceiveStream* audio_receive_stream = nullptr;
- if (params_.audio.enabled) {
+ if (params_.audio) {
audio_send_config_ = AudioSendStream::Config(&transport);
audio_send_config_.voe_channel_id = voe.send_channel_id;
audio_send_config_.rtp.ssrc = kAudioSendSsrc;
@@ -1334,7 +1323,7 @@
// Add extension to enable audio send side BWE, and allow audio bit rate
// adaptation.
audio_send_config_.rtp.extensions.clear();
- if (params_.call.send_side_bwe) {
+ if (params_.common.send_side_bwe) {
audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension(
webrtc::RtpExtension::kTransportSequenceNumberUri,
test::kTransportSequenceNumberExtensionId));
@@ -1349,10 +1338,10 @@
audio_config.rtcp_send_transport = &transport;
audio_config.voe_channel_id = voe.receive_channel_id;
audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc;
- audio_config.rtp.transport_cc = params_.call.send_side_bwe;
+ audio_config.rtp.transport_cc = params_.common.send_side_bwe;
audio_config.rtp.extensions = audio_send_config_.rtp.extensions;
audio_config.decoder_factory = decoder_factory_;
- if (params_.audio.sync_video)
+ if (params_.audio_video_sync)
audio_config.sync_group = kSyncGroup;
audio_receive_stream = call->CreateAudioReceiveStream(audio_config);
@@ -1369,7 +1358,7 @@
video_send_stream_->Start();
capturer_->Start();
- if (params_.audio.enabled) {
+ if (params_.audio) {
// Start receiving audio.
audio_receive_stream->Start();
EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id));
@@ -1382,7 +1371,7 @@
test::PressEnterToContinue();
- if (params_.audio.enabled) {
+ if (params_.audio) {
// Stop sending audio.
EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id));
audio_send_stream_->Stop();
@@ -1401,22 +1390,22 @@
call->DestroyVideoReceiveStream(video_receive_stream);
call->DestroyVideoSendStream(video_send_stream_);
- if (params_.audio.enabled) {
- call->DestroyAudioSendStream(audio_send_stream_);
- call->DestroyAudioReceiveStream(audio_receive_stream);
+ if (params_.audio) {
+ call->DestroyAudioSendStream(audio_send_stream_);
+ call->DestroyAudioReceiveStream(audio_receive_stream);
}
transport.StopSending();
- if (params_.audio.enabled)
+ if (params_.audio)
DestroyVoiceEngine(&voe);
}
void VideoQualityTest::StartEncodedFrameLogs(VideoSendStream* stream) {
- if (!params_.video.encoded_frame_base_path.empty()) {
+ if (!params_.common.encoded_frame_base_path.empty()) {
std::ostringstream str;
str << send_logs_++;
std::string prefix =
- params_.video.encoded_frame_base_path + "." + str.str() + ".send.";
+ params_.common.encoded_frame_base_path + "." + str.str() + ".send.";
stream->EnableEncodedFrameRecording(
std::vector<rtc::PlatformFile>(
{rtc::CreatePlatformFile(prefix + "1.ivf"),
@@ -1426,11 +1415,11 @@
}
}
void VideoQualityTest::StartEncodedFrameLogs(VideoReceiveStream* stream) {
- if (!params_.video.encoded_frame_base_path.empty()) {
+ if (!params_.common.encoded_frame_base_path.empty()) {
std::ostringstream str;
str << receive_logs_++;
std::string path =
- params_.video.encoded_frame_base_path + "." + str.str() + ".recv.ivf";
+ params_.common.encoded_frame_base_path + "." + str.str() + ".recv.ivf";
stream->EnableEncodedFrameRecording(rtc::CreatePlatformFile(path),
10000000);
}
« no previous file with comments | « webrtc/video/video_quality_test.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698