Index: webrtc/video/video_loopback.cc |
diff --git a/webrtc/video/video_loopback.cc b/webrtc/video/video_loopback.cc |
index 6c2a1630fda8c1c4c16f63f56cbb4d7f6cb5fd1f..66bafbb6f00165bb0c894ec1d6f05a216ca746d3 100644 |
--- a/webrtc/video/video_loopback.cc |
+++ b/webrtc/video/video_loopback.cc |
@@ -10,7 +10,7 @@ |
#include <stdio.h> |
-#include "gflags/gflags.h" |
+#include "webrtc/base/flags.h" |
#include "webrtc/test/field_trial.h" |
#include "webrtc/test/gtest.h" |
#include "webrtc/test/run_test.h" |
@@ -20,171 +20,177 @@ namespace webrtc { |
namespace flags { |
// Flags common with screenshare loopback, with different default values. |
-DEFINE_int32(width, 640, "Video width."); |
+DEFINE_int(width, 640, "Video width."); |
size_t Width() { |
- return static_cast<size_t>(FLAGS_width); |
+ return static_cast<size_t>(FLAG_width); |
} |
-DEFINE_int32(height, 480, "Video height."); |
+DEFINE_int(height, 480, "Video height."); |
size_t Height() { |
- return static_cast<size_t>(FLAGS_height); |
+ return static_cast<size_t>(FLAG_height); |
} |
-DEFINE_int32(fps, 30, "Frames per second."); |
+DEFINE_int(fps, 30, "Frames per second."); |
int Fps() { |
- return static_cast<int>(FLAGS_fps); |
+ return static_cast<int>(FLAG_fps); |
} |
-DEFINE_int32(capture_device_index, 0, "Capture device to select"); |
+DEFINE_int(capture_device_index, 0, "Capture device to select"); |
size_t GetCaptureDevice() { |
- return static_cast<size_t>(FLAGS_capture_device_index); |
+ return static_cast<size_t>(FLAG_capture_device_index); |
} |
-DEFINE_int32(min_bitrate, 50, "Call and stream min bitrate in kbps."); |
+DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps."); |
int MinBitrateKbps() { |
- return static_cast<int>(FLAGS_min_bitrate); |
+ return static_cast<int>(FLAG_min_bitrate); |
} |
-DEFINE_int32(start_bitrate, 300, "Call start bitrate in kbps."); |
+DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps."); |
int StartBitrateKbps() { |
- return static_cast<int>(FLAGS_start_bitrate); |
+ return static_cast<int>(FLAG_start_bitrate); |
} |
-DEFINE_int32(target_bitrate, 800, "Stream target bitrate in kbps."); |
+DEFINE_int(target_bitrate, 800, "Stream target bitrate in kbps."); |
int TargetBitrateKbps() { |
- return static_cast<int>(FLAGS_target_bitrate); |
+ return static_cast<int>(FLAG_target_bitrate); |
} |
-DEFINE_int32(max_bitrate, 800, "Call and stream max bitrate in kbps."); |
+DEFINE_int(max_bitrate, 800, "Call and stream max bitrate in kbps."); |
int MaxBitrateKbps() { |
- return static_cast<int>(FLAGS_max_bitrate); |
+ return static_cast<int>(FLAG_max_bitrate); |
} |
DEFINE_bool(suspend_below_min_bitrate, |
false, |
"Suspends video below the configured min bitrate."); |
-DEFINE_int32(num_temporal_layers, |
- 1, |
- "Number of temporal layers. Set to 1-4 to override."); |
+DEFINE_int(num_temporal_layers, |
+ 1, |
+ "Number of temporal layers. Set to 1-4 to override."); |
int NumTemporalLayers() { |
- return static_cast<int>(FLAGS_num_temporal_layers); |
+ return static_cast<int>(FLAG_num_temporal_layers); |
} |
// Flags common with screenshare loopback, with equal default values. |
DEFINE_string(codec, "VP8", "Video codec to use."); |
std::string Codec() { |
- return static_cast<std::string>(FLAGS_codec); |
+ return static_cast<std::string>(FLAG_codec); |
} |
-DEFINE_int32(selected_tl, |
- -1, |
- "Temporal layer to show or analyze. -1 to disable filtering."); |
+DEFINE_int(selected_tl, |
+ -1, |
+ "Temporal layer to show or analyze. -1 to disable filtering."); |
int SelectedTL() { |
- return static_cast<int>(FLAGS_selected_tl); |
+ return static_cast<int>(FLAG_selected_tl); |
} |
-DEFINE_int32( |
+DEFINE_int( |
duration, |
0, |
"Duration of the test in seconds. If 0, rendered will be shown instead."); |
int DurationSecs() { |
- return static_cast<int>(FLAGS_duration); |
+ return static_cast<int>(FLAG_duration); |
} |
DEFINE_string(output_filename, "", "Target graph data filename."); |
std::string OutputFilename() { |
- return static_cast<std::string>(FLAGS_output_filename); |
+ return static_cast<std::string>(FLAG_output_filename); |
} |
DEFINE_string(graph_title, |
"", |
"If empty, title will be generated automatically."); |
std::string GraphTitle() { |
- return static_cast<std::string>(FLAGS_graph_title); |
+ return static_cast<std::string>(FLAG_graph_title); |
} |
-DEFINE_int32(loss_percent, 0, "Percentage of packets randomly lost."); |
+DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost."); |
int LossPercent() { |
- return static_cast<int>(FLAGS_loss_percent); |
+ return static_cast<int>(FLAG_loss_percent); |
} |
-DEFINE_int32(avg_burst_loss_length, |
- -1, |
- "Average burst length of lost packets."); |
+DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets."); |
int AvgBurstLossLength() { |
- return static_cast<int>(FLAGS_avg_burst_loss_length); |
+ return static_cast<int>(FLAG_avg_burst_loss_length); |
} |
-DEFINE_int32(link_capacity, |
- 0, |
- "Capacity (kbps) of the fake link. 0 means infinite."); |
+DEFINE_int(link_capacity, |
+ 0, |
+ "Capacity (kbps) of the fake link. 0 means infinite."); |
int LinkCapacityKbps() { |
- return static_cast<int>(FLAGS_link_capacity); |
+ return static_cast<int>(FLAG_link_capacity); |
} |
-DEFINE_int32(queue_size, 0, "Size of the bottleneck link queue in packets."); |
+DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets."); |
int QueueSize() { |
- return static_cast<int>(FLAGS_queue_size); |
+ return static_cast<int>(FLAG_queue_size); |
} |
-DEFINE_int32(avg_propagation_delay_ms, |
- 0, |
- "Average link propagation delay in ms."); |
+DEFINE_int(avg_propagation_delay_ms, |
+ 0, |
+ "Average link propagation delay in ms."); |
int AvgPropagationDelayMs() { |
- return static_cast<int>(FLAGS_avg_propagation_delay_ms); |
+ return static_cast<int>(FLAG_avg_propagation_delay_ms); |
} |
-DEFINE_int32(std_propagation_delay_ms, |
- 0, |
- "Link propagation delay standard deviation in ms."); |
+DEFINE_int(std_propagation_delay_ms, |
+ 0, |
+ "Link propagation delay standard deviation in ms."); |
int StdPropagationDelayMs() { |
- return static_cast<int>(FLAGS_std_propagation_delay_ms); |
+ return static_cast<int>(FLAG_std_propagation_delay_ms); |
} |
-DEFINE_int32(selected_stream, 0, "ID of the stream to show or analyze."); |
+DEFINE_int(num_streams, 0, "Number of streams to show or analyze."); |
+int NumStreams() { |
+ return static_cast<int>(FLAG_num_streams); |
+} |
+ |
+DEFINE_int(selected_stream, |
+ 0, |
+ "ID of the stream to show or analyze. " |
+ "Set to the number of streams to show them all."); |
int SelectedStream() { |
- return static_cast<int>(FLAGS_selected_stream); |
+ return static_cast<int>(FLAG_selected_stream); |
} |
-DEFINE_int32(num_spatial_layers, 1, "Number of spatial layers to use."); |
+DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use."); |
int NumSpatialLayers() { |
- return static_cast<int>(FLAGS_num_spatial_layers); |
+ return static_cast<int>(FLAG_num_spatial_layers); |
} |
-DEFINE_int32(selected_sl, |
- -1, |
- "Spatial layer to show or analyze. -1 to disable filtering."); |
+DEFINE_int(selected_sl, |
+ -1, |
+ "Spatial layer to show or analyze. -1 to disable filtering."); |
int SelectedSL() { |
- return static_cast<int>(FLAGS_selected_sl); |
+ return static_cast<int>(FLAG_selected_sl); |
} |
DEFINE_string(stream0, |
"", |
"Comma separated values describing VideoStream for stream #0."); |
std::string Stream0() { |
- return static_cast<std::string>(FLAGS_stream0); |
+ return static_cast<std::string>(FLAG_stream0); |
} |
DEFINE_string(stream1, |
"", |
"Comma separated values describing VideoStream for stream #1."); |
std::string Stream1() { |
- return static_cast<std::string>(FLAGS_stream1); |
+ return static_cast<std::string>(FLAG_stream1); |
} |
DEFINE_string(sl0, |
"", |
"Comma separated values describing SpatialLayer for layer #0."); |
std::string SL0() { |
- return static_cast<std::string>(FLAGS_sl0); |
+ return static_cast<std::string>(FLAG_sl0); |
} |
DEFINE_string(sl1, |
"", |
"Comma separated values describing SpatialLayer for layer #1."); |
std::string SL1() { |
- return static_cast<std::string>(FLAGS_sl1); |
+ return static_cast<std::string>(FLAG_sl1); |
} |
DEFINE_string(encoded_frame_path, |
@@ -192,7 +198,7 @@ DEFINE_string(encoded_frame_path, |
"The base path for encoded frame logs. Created files will have " |
"the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf"); |
std::string EncodedFramePath() { |
- return static_cast<std::string>(FLAGS_encoded_frame_path); |
+ return static_cast<std::string>(FLAG_encoded_frame_path); |
} |
DEFINE_bool(logs, false, "print logs to stderr"); |
@@ -227,9 +233,11 @@ DEFINE_string(clip, |
"", |
"Name of the clip to show. If empty, using chroma generator."); |
std::string Clip() { |
- return static_cast<std::string>(FLAGS_clip); |
+ return static_cast<std::string>(FLAG_clip); |
} |
+DEFINE_bool(help, false, "prints this message"); |
+ |
} // namespace flags |
void Loopback() { |
@@ -240,7 +248,7 @@ void Loopback() { |
pipe_config.queue_length_packets = flags::QueueSize(); |
pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs(); |
pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs(); |
- pipe_config.allow_reordering = flags::FLAGS_allow_reordering; |
+ pipe_config.allow_reordering = flags::FLAG_allow_reordering; |
Call::Config::BitrateConfig call_bitrate_config; |
call_bitrate_config.min_bitrate_bps = flags::MinBitrateKbps() * 1000; |
@@ -248,31 +256,36 @@ void Loopback() { |
call_bitrate_config.max_bitrate_bps = flags::MaxBitrateKbps() * 1000; |
VideoQualityTest::Params params; |
- params.call = {flags::FLAGS_send_side_bwe, call_bitrate_config}; |
- params.video = {flags::FLAGS_video, |
+ params.call = {flags::FLAG_send_side_bwe, call_bitrate_config}; |
+ params.video = {flags::FLAG_video, |
flags::Width(), |
flags::Height(), |
flags::Fps(), |
flags::MinBitrateKbps() * 1000, |
flags::TargetBitrateKbps() * 1000, |
flags::MaxBitrateKbps() * 1000, |
- flags::FLAGS_suspend_below_min_bitrate, |
+ flags::FLAG_suspend_below_min_bitrate, |
flags::Codec(), |
flags::NumTemporalLayers(), |
flags::SelectedTL(), |
0, // No min transmit bitrate. |
- flags::FLAGS_use_ulpfec, |
- flags::FLAGS_use_flexfec, |
+ flags::FLAG_use_ulpfec, |
+ flags::FLAG_use_flexfec, |
flags::EncodedFramePath(), |
flags::Clip(), |
flags::GetCaptureDevice()}; |
- params.audio = {flags::FLAGS_audio, flags::FLAGS_audio_video_sync, |
- flags::FLAGS_audio_dtx}; |
+ params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync, |
+ flags::FLAG_audio_dtx}; |
params.screenshare.enabled = false; |
params.analyzer = {"video", 0.0, 0.0, flags::DurationSecs(), |
flags::OutputFilename(), flags::GraphTitle()}; |
params.pipe = pipe_config; |
- params.logs = flags::FLAGS_logs; |
+ params.logs = flags::FLAG_logs; |
+ |
+ if (flags::NumStreams() > 1 && flags::Stream0().empty() && |
+ flags::Stream1().empty()) { |
+ params.ss.infer_streams = true; |
+ } |
std::vector<std::string> stream_descriptors; |
stream_descriptors.push_back(flags::Stream0()); |
@@ -281,7 +294,7 @@ void Loopback() { |
SL_descriptors.push_back(flags::SL0()); |
SL_descriptors.push_back(flags::SL1()); |
VideoQualityTest::FillScalabilitySettings( |
- ¶ms, stream_descriptors, flags::SelectedStream(), |
+ ¶ms, stream_descriptors, flags::NumStreams(), flags::SelectedStream(), |
flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors); |
VideoQualityTest test; |
@@ -295,9 +308,16 @@ void Loopback() { |
int main(int argc, char* argv[]) { |
::testing::InitGoogleTest(&argc, argv); |
- google::ParseCommandLineFlags(&argc, &argv, true); |
- webrtc::test::InitFieldTrialsFromString( |
- webrtc::flags::FLAGS_force_fieldtrials); |
+ rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true); |
+ if (webrtc::flags::FLAG_help) { |
+ rtc::FlagList::Print(nullptr, false); |
+ return 0; |
+ } |
+ |
+ // InitFieldTrialsFromString needs a reference to an std::string instance, |
+ // with a scope that outlives the test. |
+ std::string field_trials = webrtc::flags::FLAG_force_fieldtrials; |
+ webrtc::test::InitFieldTrialsFromString(field_trials); |
webrtc::test::RunTest(webrtc::Loopback); |
return 0; |
} |