Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1042)

Unified Diff: webrtc/video/receive_statistics_proxy.cc

Issue 2986893002: Piggybacking simulcast id and ALR experiment id into video content type extension. (Closed)
Patch Set: Add metrics sliced on AlrExperiment group Created 3 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/video/receive_statistics_proxy.cc
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index 61c264fcbe4eae9a547ac6201f9fd71bbe920999..8dab4bd2a6d32b0e184fe78c1be0bf13c1260ab7 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -12,8 +12,10 @@
#include <algorithm>
#include <cmath>
+#include <sstream>
#include <utility>
+#include "webrtc/modules/pacing/alr_detector.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
@@ -45,6 +47,42 @@ const int kHighVarianceThreshold = 2;
// How large window we use to calculate the framerate/bitrate.
const int kRateStatisticsWindowSizeMs = 1000;
+
+std::string UmaPrefixForContentType(VideoContentType content_type) {
+ std::stringstream ss;
+ ss << "WebRTC.Video";
+ if (content_type.IsScreenshare()) {
+ ss << ".Screenshare";
+ }
+ return ss.str();
+}
+
+std::string UmaSuffixForContentType(VideoContentType content_type) {
+ std::stringstream ss;
+ int simulcast_id = content_type.GetSimulcastId();
+ if (simulcast_id > 0) {
+ ss << ".S" << simulcast_id - 1;
+ }
+ return ss.str();
+}
+
+std::string UmaPrefixForExperiment(VideoContentType content_type) {
+ std::stringstream ss;
+ ss << "WebRTC.Video.AlrExperiment";
+ if (content_type.IsScreenshare()) {
+ ss << ".Screenshare";
+ }
+ return ss.str();
+}
+
+std::string UmaSuffixForExperiment(VideoContentType content_type) {
+ std::stringstream ss;
+ int experiment_id = content_type.GetExperimentId();
+ if (experiment_id > 0) {
+ ss << ".Group" << experiment_id - 1;
+ }
+ return ss.str();
+}
} // namespace
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
@@ -74,10 +112,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
render_fps_tracker_(100, 10u),
render_pixel_tracker_(100, 10u),
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count
- e2e_delay_max_ms_video_(-1),
- e2e_delay_max_ms_screenshare_(-1),
- interframe_delay_max_ms_video_(-1),
- interframe_delay_max_ms_screenshare_(-1),
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
first_report_block_time_ms_(-1),
avg_rtt_ms_(0),
@@ -95,6 +129,9 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
}
void ReceiveStatisticsProxy::UpdateHistograms() {
+ // Use to report experiment-specific metrics.
+ int experiment_id = last_content_type_.GetExperimentId();
+
RTC_HISTOGRAM_COUNTS_100000(
"WebRTC.Video.ReceiveStreamLifetimeInSeconds",
(clock_->TimeInMilliseconds() - start_ms_) / 1000);
@@ -185,53 +222,79 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
if (delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
- int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
- if (e2e_delay_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
- e2e_delay_ms_video);
- LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video;
- }
-
- int e2e_delay_ms_screenshare =
- e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
- if (e2e_delay_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
- e2e_delay_ms_screenshare);
- }
-
- int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
- if (e2e_delay_max_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
- e2e_delay_max_ms_video);
+ int avg_interframe_delay = 0;
+ int max_interframe_delay = -1;
+ int interframe_delay_slices = 0;
+ int avg_e2e_delay = 0;
+ int max_e2e_delay = -1;
+ int e2e_delay_slices = 0;
+
+ // Slice on content type and simulcast id.
+ for (auto it : content_specific_stats_) {
+ auto content_type = it.first;
+ auto stats = it.second;
+ std::string uma_prefix = UmaPrefixForContentType(content_type);
+ std::string uma_suffix = UmaSuffixForContentType(content_type);
+ // The same line of code can't report different histograms because of how
+ // macro is done. Additional index needed to be different for all different
+ // names. Since experiment can't change during call, we could report 4*2
+ // different content types (4 simulcast_ids x 2 content types).
+ // Conveniently all of these are stored in 3 lower bits in VideoContentType.
+ int idx = content_type.content_type & 0x07;
+ int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples);
+ if (e2e_delay_ms != -1) {
+ RTC_HISTOGRAMS_COUNTS_10000(
+ idx, uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
+ << e2e_delay_ms;
+ avg_e2e_delay += e2e_delay_ms;
+ ++e2e_delay_slices;
+ }
+ int e2e_delay_max_ms = stats.e2e_delay_counter.Max();
+
+ if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) {
+ RTC_HISTOGRAMS_COUNTS_100000(
+ idx, uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix,
+ e2e_delay_max_ms);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
+ << e2e_delay_max_ms;
+ if (e2e_delay_max_ms > max_e2e_delay)
+ max_e2e_delay = e2e_delay_max_ms;
+ }
+ int interframe_delay_ms =
+ stats.interframe_delay_counter.Avg(kMinRequiredSamples);
+ if (interframe_delay_ms != -1) {
+ RTC_HISTOGRAMS_COUNTS_10000(
+ idx, uma_prefix + ".InterframeDelayInMs" + uma_suffix,
+ interframe_delay_ms);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
+ << interframe_delay_ms;
+ avg_interframe_delay += interframe_delay_ms;
+ ++interframe_delay_slices;
+ }
+ int interframe_delay_max_ms = stats.interframe_delay_counter.Max();
+ if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) {
+ RTC_HISTOGRAMS_COUNTS_10000(
+ idx, uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
+ interframe_delay_max_ms);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix
+ << " " << interframe_delay_max_ms;
+ if (interframe_delay_max_ms > max_interframe_delay)
+ max_interframe_delay = interframe_delay_max_ms;
+ }
}
-
- int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
- if (e2e_delay_max_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
- e2e_delay_max_ms_screenshare);
+ if (e2e_delay_slices > 0) {
+ avg_e2e_delay /= e2e_delay_slices;
+ } else {
+ avg_e2e_delay = -1;
}
-
- int interframe_delay_ms_screenshare =
- interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples);
- if (interframe_delay_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs",
- interframe_delay_ms_screenshare);
- RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_,
- interframe_delay_ms_screenshare);
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.Screenshare.InterframeDelayMaxInMs",
- interframe_delay_max_ms_screenshare_);
+ if (interframe_delay_slices > 0) {
+ avg_interframe_delay /= interframe_delay_slices;
+ } else {
+ avg_interframe_delay = -1;
}
- int interframe_delay_ms_video =
- interframe_delay_counter_video_.Avg(kMinRequiredSamples);
- if (interframe_delay_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs",
- interframe_delay_ms_video);
- RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs",
- interframe_delay_max_ms_video_);
- }
+ int media_bitrate = 0;
StreamDataCounters rtp = stats_.rtp_stats;
StreamDataCounters rtx;
@@ -246,9 +309,10 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
"WebRTC.Video.BitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.MediaBitrateReceivedInKbps",
- static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ media_bitrate =
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
+ media_bitrate);
RTC_HISTOGRAM_COUNTS_10000(
"WebRTC.Video.PaddingBitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
@@ -280,6 +344,46 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
}
}
+ // Report metrics sliced on ALR experiment group.
+ if (experiment_id > 0 && content_specific_stats_.size() > 0) {
+ VideoContentType content_type = content_specific_stats_.begin()->first;
+ std::string uma_prefix = UmaPrefixForExperiment(content_type);
+ std::string uma_suffix = UmaSuffixForExperiment(content_type);
+ if (avg_e2e_delay != -1) {
+ RTC_HISTOGRAM_COUNTS_10000(uma_prefix + ".EndToEndDelayInMs" + uma_suffix,
+ avg_e2e_delay);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
+ << avg_e2e_delay;
+ }
+ if (max_e2e_delay != -1) {
+ RTC_HISTOGRAM_COUNTS_100000(
+ uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, max_e2e_delay);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
+ << max_e2e_delay;
+ }
+ if (avg_interframe_delay != -1) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ uma_prefix + ".InterframeDelayInMs" + uma_suffix,
+ avg_interframe_delay);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
+ << avg_interframe_delay;
+ }
+ if (max_interframe_delay != -1) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
+ max_interframe_delay);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix
+ << " " << max_interframe_delay;
+ }
+ if (media_bitrate != 0) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
+ media_bitrate);
+ LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix
+ << " " << media_bitrate;
+ }
+ }
+
sprang_webrtc 2017/08/24 09:13:16 Why isn't this reported in the same way as for the
ilnik 2017/08/25 12:35:07 Because currently we can't get slice on both simul
if (num_certain_states_ >= kBadCallMinRequiredSamples) {
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any",
100 * num_bad_states_ / num_certain_states_);
@@ -545,17 +649,13 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_;
RTC_DCHECK_GE(interframe_delay_ms, 0);
stats_.interframe_delay_sum_ms += interframe_delay_ms;
- if (last_content_type_ == VideoContentType::SCREENSHARE) {
- interframe_delay_counter_screenshare_.Add(interframe_delay_ms);
- if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) {
- interframe_delay_max_ms_screenshare_ = interframe_delay_ms;
- }
- } else {
- interframe_delay_counter_video_.Add(interframe_delay_ms);
- if (interframe_delay_max_ms_video_ < interframe_delay_ms) {
- interframe_delay_max_ms_video_ = interframe_delay_ms;
- }
+ auto it = content_specific_stats_.find(last_content_type_);
+ if (it == content_specific_stats_.end()) {
+ content_specific_stats_[last_content_type_] = ContentSpecificStats();
+ it = content_specific_stats_.find(last_content_type_);
}
sprang_webrtc 2017/08/24 09:13:16 I think you can just do auto it = content_specif
ilnik 2017/08/25 12:35:07 Redone using plain [] operator. According to docum
+ ContentSpecificStats* stats = &it->second;
+ stats->interframe_delay_counter.Add(interframe_delay_ms);
}
last_decoded_frame_time_ms_.emplace(now);
}
@@ -580,14 +680,13 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
if (frame.ntp_time_ms() > 0) {
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
if (delay_ms >= 0) {
- if (last_content_type_ == VideoContentType::SCREENSHARE) {
- e2e_delay_max_ms_screenshare_ =
- std::max(delay_ms, e2e_delay_max_ms_screenshare_);
- e2e_delay_counter_screenshare_.Add(delay_ms);
- } else {
- e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
- e2e_delay_counter_video_.Add(delay_ms);
+ auto it = content_specific_stats_.find(last_content_type_);
+ if (it == content_specific_stats_.end()) {
+ content_specific_stats_[last_content_type_] = ContentSpecificStats();
+ it = content_specific_stats_.find(last_content_type_);
}
+ ContentSpecificStats* stats = &it->second;
+ stats->e2e_delay_counter.Add(delay_ms);
}
}
}
@@ -659,6 +758,9 @@ void ReceiveStatisticsProxy::OnStreamInactive() {
void ReceiveStatisticsProxy::SampleCounter::Add(int sample) {
sum += sample;
++num_samples;
+ if (!max || *max < sample) {
+ max.emplace(sample);
+ }
}
int ReceiveStatisticsProxy::SampleCounter::Avg(
@@ -668,9 +770,18 @@ int ReceiveStatisticsProxy::SampleCounter::Avg(
return static_cast<int>(sum / num_samples);
}
+int ReceiveStatisticsProxy::SampleCounter::Max() const {
+ if (!max) {
+ return -1;
+ } else {
+ return *max;
+ }
+}
+
void ReceiveStatisticsProxy::SampleCounter::Reset() {
num_samples = 0;
sum = 0;
+ max.reset();
}
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,

Powered by Google App Engine
This is Rietveld 408576698