Index: webrtc/video/video_quality_test.cc |
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc |
index 3fa07db7fdbcc3f5a3ce60193b019daeb5229ddf..dbfe66593bf14d88387410dab9ec25fa8f9a5a61 100644 |
--- a/webrtc/video/video_quality_test.cc |
+++ b/webrtc/video/video_quality_test.cc |
@@ -54,6 +54,12 @@ constexpr int kSendStatsPollingIntervalMs = 1000; |
constexpr int kPayloadTypeH264 = 122; |
constexpr int kPayloadTypeVP8 = 123; |
constexpr int kPayloadTypeVP9 = 124; |
+const std::map<uint8_t, webrtc::MediaType> additional_pt_map = { |
nisse-webrtc
2017/04/06 12:23:56
What do you think about defining *all* payload typ
minyue-webrtc
2017/04/06 18:45:10
sure. will try
|
+ {kPayloadTypeH264, webrtc::MediaType::VIDEO}, |
+ {kPayloadTypeVP8, webrtc::MediaType::VIDEO}, |
+ {kPayloadTypeVP9, webrtc::MediaType::VIDEO}, |
+}; |
+ |
constexpr size_t kMaxComparisons = 10; |
constexpr char kSyncGroup[] = "av_sync"; |
constexpr int kOpusMinBitrateBps = 6000; |
@@ -1589,11 +1595,15 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { |
call_config.bitrate_config = params.call.call_bitrate_config; |
CreateCalls(call_config, call_config); |
+ std::map<uint8_t, MediaType> payload_type_map = payload_type_map_; |
+ payload_type_map.insert(additional_pt_map.begin(), additional_pt_map.end()); |
+ |
test::LayerFilteringTransport send_transport( |
params_.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
- params_.video.selected_tl, params_.ss.selected_sl); |
- test::DirectTransport recv_transport( |
- params_.pipe, receiver_call_.get(), MediaType::VIDEO); |
+ params_.video.selected_tl, params_.ss.selected_sl, payload_type_map); |
+ |
+ test::DirectTransport recv_transport(params_.pipe, receiver_call_.get(), |
+ payload_type_map); |
std::string graph_title = params_.analyzer.graph_title; |
if (graph_title.empty()) |
@@ -1712,7 +1722,7 @@ void VideoQualityTest::SetupAudio(int send_channel_id, |
audio_send_config_.max_bitrate_bps = kOpusBitrateFbBps; |
} |
audio_send_config_.send_codec_spec.codec_inst = |
- CodecInst{120, "OPUS", 48000, 960, 2, 64000}; |
+ CodecInst{kAudioSendPayloadType, "OPUS", 48000, 960, 2, 64000}; |
audio_send_config_.send_codec_spec.enable_opus_dtx = params_.audio.dtx; |
audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); |
@@ -1724,6 +1734,7 @@ void VideoQualityTest::SetupAudio(int send_channel_id, |
audio_config.rtp.transport_cc = params_.call.send_side_bwe; |
audio_config.rtp.extensions = audio_send_config_.rtp.extensions; |
audio_config.decoder_factory = decoder_factory_; |
+ audio_config.decoder_map = {{kAudioSendPayloadType, {"OPUS", 48000, 2}}}; |
if (params_.video.enabled && params_.audio.sync_video) |
audio_config.sync_group = kSyncGroup; |
@@ -1753,9 +1764,12 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { |
// TODO(minyue): consider if this is a good transport even for audio only |
// calls. |
+ std::map<uint8_t, MediaType> payload_type_map = payload_type_map_; |
+ payload_type_map.insert(additional_pt_map.begin(), additional_pt_map.end()); |
test::LayerFilteringTransport transport( |
params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
- params.video.selected_tl, params_.ss.selected_sl); |
+ params.video.selected_tl, params_.ss.selected_sl, payload_type_map); |
+ |
// TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at |
// least share as much code as possible. That way this test would also match |
// the full stack tests better. |