Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(115)

Unified Diff: webrtc/video/receive_statistics_proxy.cc

Issue 2986893002: Piggybacking simulcast id and ALR experiment id into video content type extension. (Closed)
Patch Set: Cleanup Created 3 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/video/receive_statistics_proxy.cc
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index 4f231694aa1b5efd7c7daa82f497858fd7153924..8e308ca401e248f834542bb100b95be73ad355a9 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -12,8 +12,10 @@
#include <algorithm>
#include <cmath>
+#include <sstream>
#include <utility>
+#include "webrtc/modules/pacing/alr_detector.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
@@ -48,6 +50,28 @@ const int kMovingMaxWindowMs = 10000;
// How large window we use to calculate the framerate/bitrate.
const int kRateStatisticsWindowSizeMs = 1000;
+
+std::string UmaPrefixForContentType(VideoContentType content_type) {
+ std::stringstream ss;
+ ss << "WebRTC.Video";
+ if (content_type.IsScreenshare()) {
+ ss << ".Screenshare";
+ }
+ return ss.str();
+}
+
+std::string UmaSuffixForContentType(VideoContentType content_type) {
+ std::stringstream ss;
+ int simulcast_id = content_type.GetSimulcastId();
+ if (simulcast_id > 0) {
+ ss << ".S" << simulcast_id - 1;
+ }
+ int experiment_id = content_type.GetExperimentId();
+ if (experiment_id > 0) {
+ ss << ".ExperimentGroup" << experiment_id - 1;
+ }
+ return ss.str();
+}
} // namespace
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
@@ -77,10 +101,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
render_fps_tracker_(100, 10u),
render_pixel_tracker_(100, 10u),
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count
- e2e_delay_max_ms_video_(-1),
- e2e_delay_max_ms_screenshare_(-1),
- interframe_delay_max_ms_video_(-1),
- interframe_delay_max_ms_screenshare_(-1),
interframe_delay_max_moving_(kMovingMaxWindowMs),
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
first_report_block_time_ms_(-1),
@@ -99,9 +119,11 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
}
void ReceiveStatisticsProxy::UpdateHistograms() {
- RTC_HISTOGRAM_COUNTS_100000(
- "WebRTC.Video.ReceiveStreamLifetimeInSeconds",
- (clock_->TimeInMilliseconds() - start_ms_) / 1000);
+ int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds",
+ stream_duration_sec);
+ LOG(LS_INFO) << "WebRTC.Video.ReceiveStreamLifetimeInSeconds "
+ << stream_duration_sec;
sprang_webrtc 2017/08/28 16:25:02 Check that we have received at least one frame bef
ilnik 2017/08/29 07:56:26 Done.
if (first_report_block_time_ms_ != -1 &&
((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >=
@@ -124,14 +146,7 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
"WebRTC.Video.RenderSqrtPixelsPerSecond",
round(render_pixel_tracker_.ComputeTotalRate()));
}
- int width = render_width_counter_.Avg(kMinRequiredSamples);
- int height = render_height_counter_.Avg(kMinRequiredSamples);
- if (width != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height);
- LOG(LS_INFO) << "WebRTC.Video.ReceivedWidthInPixels " << width;
- LOG(LS_INFO) << "WebRTC.Video.ReceivedHeightInPixels " << height;
- }
+
int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples);
if (sync_offset_ms != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms);
@@ -189,52 +204,131 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
if (delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
- int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
- if (e2e_delay_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
- e2e_delay_ms_video);
- LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video;
- }
+ // Aggregate content_specific_stats_ by removing experiment or simulcast
+ // information;
+ std::map<VideoContentType, ContentSpecificStats> aggregated_stats;
+ for (auto it : content_specific_stats_) {
+ // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes):
+ VideoContentType content_type = it.first;
+ if (content_type.GetSimulcastId() > 0) {
+ // Aggregate on experiment id.
+ content_type.SetExperimentId(0);
+ aggregated_stats[content_type].Add(it.second);
+ }
+ // Calculate experiment specific metrics:
sprang_webrtc 2017/08/28 16:25:02 : ?
ilnik 2017/08/29 07:56:26 Done.
+ content_type = it.first;
+ if (content_type.GetExperimentId() > 0) {
+ // Aggregate on simulcast id.
+ content_type.SetSimulcastId(0);
+ aggregated_stats[content_type].Add(it.second);
+ }
+ // calculate aggregated metrics (no suffixes. Aggregated on everything).
sprang_webrtc 2017/08/28 16:25:02 nit: s/calculate/Calculate
ilnik 2017/08/29 07:56:26 Done.
+ content_type = it.first;
+ content_type.SetSimulcastId(0);
+ content_type.SetExperimentId(0);
+ aggregated_stats[content_type].Add(it.second);
sprang_webrtc 2017/08/28 16:25:02 We shouldn't do the addition if ss == 0 && exp ==
ilnik 2017/08/29 07:56:27 We add to the separate map. We need to make this a
sprang_webrtc 2017/08/29 08:04:38 Ah, you're right. I misread.
+ }
+
+ for (auto it : aggregated_stats) {
+ // for the metric Foo we report the following slices:
sprang_webrtc 2017/08/28 16:25:02 nit: Capital F
ilnik 2017/08/29 07:56:26 Done.
+ // WebRTC.Video.Foo,
+ // WebRTC.Video.Screenshare.Foo,
+ // WebRTC.Video.Foo.S[0-3],
+ // WebRTC.Video.Foo.ExperimentGroup[0-7],
+ // WebRTC.Video.Screenshare.Foo.S[0-3],
+ // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
+ auto content_type = it.first;
+ auto stats = it.second;
+ std::string uma_prefix = UmaPrefixForContentType(content_type);
+ std::string uma_suffix = UmaSuffixForContentType(content_type);
+ // Metrics can be sliced on either simulcast id or experiment id but not
+ // both.
+ RTC_DCHECK(content_type.GetExperimentId() == 0 ||
+ content_type.GetSimulcastId() == 0);
+
+ int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples);
+ if (e2e_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
+ << e2e_delay_ms;
+ }
+ int e2e_delay_max_ms = stats.e2e_delay_counter.Max();
+ if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(
+ uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, e2e_delay_max_ms);
+ LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
+ << e2e_delay_max_ms;
+ }
+ int interframe_delay_ms =
+ stats.interframe_delay_counter.Avg(kMinRequiredSamples);
+ if (interframe_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".InterframeDelayInMs" + uma_suffix,
+ interframe_delay_ms);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
+ << interframe_delay_ms;
+ }
+ int interframe_delay_max_ms = stats.interframe_delay_counter.Max();
+ if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
+ interframe_delay_max_ms);
+ LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix
+ << " " << interframe_delay_max_ms;
+ }
- int e2e_delay_ms_screenshare =
- e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
- if (e2e_delay_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
- e2e_delay_ms_screenshare);
- }
+ int width = stats.received_width.Avg(kMinRequiredSamples);
+ if (width != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, width);
+ LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix
+ << " " << width;
+ }
- int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
- if (e2e_delay_max_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
- e2e_delay_max_ms_video);
- }
+ int height = stats.received_height.Avg(kMinRequiredSamples);
+ if (height != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, height);
+ LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix
+ << " " << height;
+ }
- int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
- if (e2e_delay_max_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
- e2e_delay_max_ms_screenshare);
- }
+ if (content_type != VideoContentType::UNSPECIFIED) {
+ // Don't report these 3 metrics unsliced, as more precise variants
+ // are reported separately in this method.
+ float flow_duration_sec = stats.flow_duration_ms / 1000.0;
+ if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) {
+ int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
+ flow_duration_sec / 1000);
+ RTC_HISTOGRAM_COUNTS_SPARSE_10000(
+ uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
+ media_bitrate_kbps);
+ LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps"
+ << uma_suffix << " " << media_bitrate_kbps;
+ }
- int interframe_delay_ms_screenshare =
- interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples);
- if (interframe_delay_ms_screenshare != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs",
- interframe_delay_ms_screenshare);
- RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_,
- interframe_delay_ms_screenshare);
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.Screenshare.InterframeDelayMaxInMs",
- interframe_delay_max_ms_screenshare_);
- }
+ int num_total_frames =
+ stats.frame_counts.key_frames + stats.frame_counts.delta_frames;
+ if (num_total_frames >= kMinRequiredSamples) {
+ int num_key_frames = stats.frame_counts.key_frames;
+ int key_frames_permille =
+ (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
+ RTC_HISTOGRAM_COUNTS_SPARSE_1000(
+ uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
+ key_frames_permille);
+ LOG(LS_INFO) << uma_prefix << ".KeyFramesReceivedInPermille"
+ << uma_suffix << " " << key_frames_permille;
+ }
- int interframe_delay_ms_video =
- interframe_delay_counter_video_.Avg(kMinRequiredSamples);
- if (interframe_delay_ms_video != -1) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs",
- interframe_delay_ms_video);
- RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video);
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs",
- interframe_delay_max_ms_video_);
+ int qp = stats.qp_counter.Avg(kMinRequiredSamples);
+ if (qp != -1) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_200(
+ uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, qp);
+ LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
+ << qp;
+ }
+ }
}
StreamDataCounters rtp = stats_.rtp_stats;
@@ -250,9 +344,12 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
"WebRTC.Video.BitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
1000));
- RTC_HISTOGRAM_COUNTS_10000(
- "WebRTC.Video.MediaBitrateReceivedInKbps",
- static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
+ int media_bitrate_kbs =
+ static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000);
+ RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
+ media_bitrate_kbs);
+ LOG(LS_INFO) << "WebRTC.Video.MediaBitrateReceivedInKbps "
+ << media_bitrate_kbs;
RTC_HISTOGRAM_COUNTS_10000(
"WebRTC.Video.PaddingBitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
@@ -529,6 +626,9 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
uint64_t now = clock_->TimeInMilliseconds();
rtc::CritScope lock(&crit_);
+
+ ContentSpecificStats* content_specific_stats =
+ &content_specific_stats_[content_type];
++stats_.frames_decoded;
if (qp) {
if (!stats_.qp_sum) {
@@ -540,6 +640,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
stats_.qp_sum = rtc::Optional<uint64_t>(0);
}
*stats_.qp_sum += *qp;
+ content_specific_stats->qp_counter.Add(*qp);
} else if (stats_.qp_sum) {
LOG(LS_WARNING)
<< "QP sum was already set and no QP was given for a frame.";
@@ -551,17 +652,8 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_;
RTC_DCHECK_GE(interframe_delay_ms, 0);
interframe_delay_max_moving_.Add(interframe_delay_ms, now);
- if (last_content_type_ == VideoContentType::SCREENSHARE) {
- interframe_delay_counter_screenshare_.Add(interframe_delay_ms);
- if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) {
- interframe_delay_max_ms_screenshare_ = interframe_delay_ms;
- }
- } else {
- interframe_delay_counter_video_.Add(interframe_delay_ms);
- if (interframe_delay_max_ms_video_ < interframe_delay_ms) {
- interframe_delay_max_ms_video_ = interframe_delay_ms;
- }
- }
+ content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms);
+ content_specific_stats->flow_duration_ms += interframe_delay_ms;
}
last_decoded_frame_time_ms_.emplace(now);
}
@@ -572,28 +664,22 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0);
uint64_t now = clock_->TimeInMilliseconds();
-
rtc::CritScope lock(&crit_);
+ ContentSpecificStats* content_specific_stats =
+ &content_specific_stats_[last_content_type_];
renders_fps_estimator_.Update(1, now);
++stats_.frames_rendered;
stats_.width = width;
stats_.height = height;
- render_width_counter_.Add(width);
- render_height_counter_.Add(height);
render_fps_tracker_.AddSamples(1);
render_pixel_tracker_.AddSamples(sqrt(width * height));
+ content_specific_stats->received_width.Add(width);
+ content_specific_stats->received_height.Add(height);
if (frame.ntp_time_ms() > 0) {
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
if (delay_ms >= 0) {
- if (last_content_type_ == VideoContentType::SCREENSHARE) {
- e2e_delay_max_ms_screenshare_ =
- std::max(delay_ms, e2e_delay_max_ms_screenshare_);
- e2e_delay_counter_screenshare_.Add(delay_ms);
- } else {
- e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
- e2e_delay_counter_video_.Add(delay_ms);
- }
+ content_specific_stats->e2e_delay_counter.Add(delay_ms);
}
}
}
@@ -618,13 +704,23 @@ void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate,
}
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
- size_t size_bytes) {
+ size_t size_bytes,
+ VideoContentType content_type) {
rtc::CritScope lock(&crit_);
if (is_keyframe)
++stats_.frame_counts.key_frames;
else
++stats_.frame_counts.delta_frames;
+ ContentSpecificStats* content_specific_stats =
+ &content_specific_stats_[content_type];
+
+ content_specific_stats->total_media_bytes += size_bytes;
+ if (is_keyframe)
+ ++content_specific_stats->frame_counts.key_frames;
+ else
+ ++content_specific_stats->frame_counts.delta_frames;
sprang_webrtc 2017/08/28 16:25:02 nit: Use {} for if/else
ilnik 2017/08/29 07:56:27 Done.
+
int64_t now_ms = clock_->TimeInMilliseconds();
frame_window_.insert(std::make_pair(now_ms, size_bytes));
UpdateFramerate(now_ms);
@@ -665,6 +761,16 @@ void ReceiveStatisticsProxy::OnStreamInactive() {
void ReceiveStatisticsProxy::SampleCounter::Add(int sample) {
sum += sample;
++num_samples;
+ if (!max || *max < sample) {
sprang_webrtc 2017/08/28 16:25:02 nit: maybe more readable with if (!max || sample >
ilnik 2017/08/29 07:56:26 Done.
+ max.emplace(sample);
+ }
+}
+
+void ReceiveStatisticsProxy::SampleCounter::Add(const SampleCounter& other) {
+ sum += other.sum;
+ num_samples += other.num_samples;
+ if (other.max && (!max || *max < *other.max))
+ max = other.max;
}
int ReceiveStatisticsProxy::SampleCounter::Avg(
@@ -674,9 +780,18 @@ int ReceiveStatisticsProxy::SampleCounter::Avg(
return static_cast<int>(sum / num_samples);
}
+int ReceiveStatisticsProxy::SampleCounter::Max() const {
+ if (!max) {
+ return -1;
+ } else {
+ return *max;
+ }
sprang_webrtc 2017/08/28 16:25:02 nit: return max.value_or(-1);
ilnik 2017/08/29 07:56:27 Done.
+}
+
void ReceiveStatisticsProxy::SampleCounter::Reset() {
num_samples = 0;
sum = 0;
+ max.reset();
}
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
@@ -685,4 +800,17 @@ void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
avg_rtt_ms_ = avg_rtt_ms;
}
+void ReceiveStatisticsProxy::ContentSpecificStats::Add(
+ const ContentSpecificStats& other) {
+ e2e_delay_counter.Add(other.e2e_delay_counter);
+ interframe_delay_counter.Add(other.interframe_delay_counter);
+ flow_duration_ms += other.flow_duration_ms;
+ total_media_bytes += other.total_media_bytes;
+ received_height.Add(other.received_height);
+ received_width.Add(other.received_width);
+ qp_counter.Add(other.qp_counter);
+ frame_counts.key_frames += other.frame_counts.key_frames;
+ frame_counts.delta_frames += other.frame_counts.delta_frames;
+}
+
} // namespace webrtc

Powered by Google App Engine
This is Rietveld 408576698