Index: webrtc/video/receive_statistics_proxy.cc |
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc |
index 38e91d7a8ef2a09bcc476add136152aae4475cba..c5e0c757c588444d901ed7fce7a2e1bad417e80b 100644 |
--- a/webrtc/video/receive_statistics_proxy.cc |
+++ b/webrtc/video/receive_statistics_proxy.cc |
@@ -12,8 +12,10 @@ |
#include <algorithm> |
#include <cmath> |
+#include <sstream> |
#include <utility> |
+#include "webrtc/modules/pacing/alr_detector.h" |
#include "webrtc/modules/video_coding/include/video_codec_interface.h" |
#include "webrtc/rtc_base/checks.h" |
#include "webrtc/rtc_base/logging.h" |
@@ -46,6 +48,20 @@ const int kHighVarianceThreshold = 2; |
// How large window we use to calculate the framerate/bitrate. |
const int kRateStatisticsWindowSizeMs = 1000; |
+ |
+std::string UmaPrefixForContentType(VideoContentType content_type) { |
+ std::stringstream ss; |
+ ss << "WebRTC.Video"; |
+ int simulcast_id = content_type.GetSimulcastId(); |
+ if (content_type.IsScreenshare()) { |
+ ss << ".Screenshare"; |
+ } |
+ if (simulcast_id > 0) { |
+ ss << "_S" << simulcast_id - 1; |
+ } |
+ return ss.str(); |
+} |
+ |
} // namespace |
ReceiveStatisticsProxy::ReceiveStatisticsProxy( |
@@ -75,10 +91,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy( |
render_fps_tracker_(100, 10u), |
render_pixel_tracker_(100, 10u), |
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count |
- e2e_delay_max_ms_video_(-1), |
- e2e_delay_max_ms_screenshare_(-1), |
- interframe_delay_max_ms_video_(-1), |
- interframe_delay_max_ms_screenshare_(-1), |
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), |
first_report_block_time_ms_(-1), |
avg_rtt_ms_(0), |
@@ -96,6 +108,22 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { |
} |
void ReceiveStatisticsProxy::UpdateHistograms() { |
+ // Set synthetic field trial based on received experiment id. |
+ // Since experiment is set at the send side and is not changed during the |
+ // call, any received content type may be used. |
sprang_webrtc
2017/07/26 14:13:43
Unfortunately this is not necessarily correct. I t
ilnik
2017/07/26 14:49:48
This is a big problem, I realize now. What if seve
|
+ int experiment_id = last_content_type_.GetExperimentId(); |
+ if (experiment_id != 0) { // 0 means no experiment is active. |
+ std::stringstream group_name; |
+ group_name << "Group-" << experiment_id - 1; |
+ field_trial::RegisterSyntheticFieldTrial( |
+ AlrDetector::kScreenshareProbingBweExperimentNameOnReceiveSide, |
+ group_name.str()); |
+ } else { |
+ // No experiment group specified. |
+ field_trial::RegisterSyntheticFieldTrial( |
+ AlrDetector::kScreenshareProbingBweExperimentNameOnReceiveSide, ""); |
+ } |
+ |
RTC_HISTOGRAM_COUNTS_100000( |
"WebRTC.Video.ReceiveStreamLifetimeInSeconds", |
(clock_->TimeInMilliseconds() - start_ms_) / 1000); |
@@ -186,62 +214,46 @@ void ReceiveStatisticsProxy::UpdateHistograms() { |
if (delay_ms != -1) |
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); |
- int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples); |
- if (e2e_delay_ms_video != -1) { |
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", |
- e2e_delay_ms_video); |
- LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video; |
- } |
- |
- int e2e_delay_ms_screenshare = |
- e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples); |
- if (e2e_delay_ms_screenshare != -1) { |
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs", |
- e2e_delay_ms_screenshare); |
- } |
- |
- int e2e_delay_max_ms_video = e2e_delay_max_ms_video_; |
- if (e2e_delay_max_ms_video != -1) { |
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs", |
- e2e_delay_max_ms_video); |
- } |
- |
- int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_; |
- if (e2e_delay_max_ms_screenshare != -1) { |
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs", |
- e2e_delay_max_ms_screenshare); |
- } |
- |
- int interframe_delay_ms_screenshare = |
- interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples); |
- if (interframe_delay_ms_screenshare != -1) { |
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs", |
- interframe_delay_ms_screenshare); |
- } |
- |
- int interframe_delay_ms_video = |
- interframe_delay_counter_video_.Avg(kMinRequiredSamples); |
- if (interframe_delay_ms_video != -1) { |
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs", |
- interframe_delay_ms_video); |
- } |
- |
- int interframe_delay_max_ms_screenshare = |
- interframe_delay_max_ms_screenshare_; |
- if (interframe_delay_max_ms_screenshare != -1) { |
- RTC_HISTOGRAM_COUNTS_10000( |
- "WebRTC.Video.Screenshare.InterframeDelayMaxInMs", |
- interframe_delay_ms_screenshare); |
- } |
+ for (auto it : content_specific_stats_) { |
sprang_webrtc
2017/07/26 14:13:43
const auto& it ?
|
+ auto content_type = it.first; |
+ auto stats = it.second; |
sprang_webrtc
2017/07/26 14:13:43
auto is good for |it| here, since its type is kind
|
+ std::string uma_prefix = UmaPrefixForContentType(content_type); |
+ // The same line of code can't report different histograms because of how |
+ // macro is done. Additional index needed to be different for all different |
+ // names. Since experiment can't change during call, we could report 4*2 |
+ // different content types (4 simulcast_ids x 2 content types). |
+ // Conveniently all of these are stored in 3 lower bits in VideoContentType. |
+ int idx = content_type.content_type & 0x07; |
+ int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); |
+ if (e2e_delay_ms != -1) { |
+ RTC_HISTOGRAMS_COUNTS_10000(idx, uma_prefix + ".EndToEndDelayInMs", |
+ e2e_delay_ms); |
+ LOG(LS_INFO) << uma_prefix + ".EndToEndDelayInMs " << e2e_delay_ms; |
+ } |
+ int e2e_delay_max_ms = stats.e2e_delay_counter.Max(); |
- int interframe_delay_max_ms_video = interframe_delay_max_ms_video_; |
- if (interframe_delay_max_ms_video != -1) { |
- RTC_HISTOGRAM_COUNTS_10000( |
- "WebRTC.Video.InterframeDelayMaxInMs", |
- interframe_delay_ms_video); |
+ if (e2e_delay_max_ms != -1) { |
+ RTC_HISTOGRAMS_COUNTS_100000(idx, uma_prefix + ".EndToEndDelayMaxInMs", |
+ e2e_delay_max_ms); |
+ LOG(LS_INFO) << uma_prefix + ".EndToEndDelayMaxInMs " << e2e_delay_max_ms; |
+ } |
+ int interframe_delay_ms = |
+ stats.interframe_delay_counter.Avg(kMinRequiredSamples); |
+ if (interframe_delay_ms != -1) { |
+ RTC_HISTOGRAMS_COUNTS_10000(idx, uma_prefix + ".InterframeDelayInMs", |
+ interframe_delay_ms); |
+ LOG(LS_INFO) << uma_prefix + ".InterframeDelayInMs " |
+ << interframe_delay_ms; |
+ } |
+ int interframe_delay_max_ms = stats.interframe_delay_counter.Max(); |
+ if (interframe_delay_max_ms != -1) { |
+ RTC_HISTOGRAMS_COUNTS_10000(idx, uma_prefix + ".InterframeDelayMaxInMs", |
+ interframe_delay_max_ms); |
+ LOG(LS_INFO) << uma_prefix + ".InterframeDelayMaxInMs " |
+ << interframe_delay_max_ms; |
+ } |
} |
- |
StreamDataCounters rtp = stats_.rtp_stats; |
StreamDataCounters rtx; |
for (auto it : rtx_stats_) |
@@ -574,17 +586,13 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, |
int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; |
RTC_DCHECK_GE(interframe_delay_ms, 0); |
stats_.interframe_delay_sum_ms += interframe_delay_ms; |
- if (last_content_type_ == VideoContentType::SCREENSHARE) { |
- interframe_delay_counter_screenshare_.Add(interframe_delay_ms); |
- if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) { |
- interframe_delay_max_ms_screenshare_ = interframe_delay_ms; |
- } |
- } else { |
- interframe_delay_counter_video_.Add(interframe_delay_ms); |
- if (interframe_delay_max_ms_video_ < interframe_delay_ms) { |
- interframe_delay_max_ms_video_ = interframe_delay_ms; |
- } |
+ auto it = content_specific_stats_.find(last_content_type_); |
+ if (it == content_specific_stats_.end()) { |
+ content_specific_stats_[last_content_type_] = ContentSpecificStats(); |
+ it = content_specific_stats_.find(last_content_type_); |
} |
+ ContentSpecificStats* stats = &it->second; |
+ stats->interframe_delay_counter.Add(interframe_delay_ms); |
} |
last_decoded_frame_time_ms_.emplace(now); |
} |
@@ -609,14 +617,13 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { |
if (frame.ntp_time_ms() > 0) { |
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); |
if (delay_ms >= 0) { |
- if (last_content_type_ == VideoContentType::SCREENSHARE) { |
- e2e_delay_max_ms_screenshare_ = |
- std::max(delay_ms, e2e_delay_max_ms_screenshare_); |
- e2e_delay_counter_screenshare_.Add(delay_ms); |
- } else { |
- e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_); |
- e2e_delay_counter_video_.Add(delay_ms); |
+ auto it = content_specific_stats_.find(last_content_type_); |
+ if (it == content_specific_stats_.end()) { |
+ content_specific_stats_[last_content_type_] = ContentSpecificStats(); |
+ it = content_specific_stats_.find(last_content_type_); |
} |
+ ContentSpecificStats* stats = &it->second; |
+ stats->e2e_delay_counter.Add(delay_ms); |
} |
} |
@@ -687,6 +694,9 @@ void ReceiveStatisticsProxy::OnPreDecode( |
void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { |
sum += sample; |
++num_samples; |
+ if (!max || *max < sample) { |
+ max.emplace(sample); |
+ } |
} |
int ReceiveStatisticsProxy::SampleCounter::Avg( |
@@ -696,9 +706,18 @@ int ReceiveStatisticsProxy::SampleCounter::Avg( |
return static_cast<int>(sum / num_samples); |
} |
+int ReceiveStatisticsProxy::SampleCounter::Max() const { |
+ if (!max) { |
+ return -1; |
+ } else { |
+ return static_cast<int>(*max); |
+ } |
sprang_webrtc
2017/07/26 14:13:43
return max.value_or(-1);
|
+} |
+ |
void ReceiveStatisticsProxy::SampleCounter::Reset() { |
num_samples = 0; |
sum = 0; |
+ max.reset(); |
} |
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, |