Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/video/receive_statistics_proxy.h" | 11 #include "webrtc/video/receive_statistics_proxy.h" |
| 12 | 12 |
| 13 #include <algorithm> | 13 #include <algorithm> |
| 14 #include <cmath> | 14 #include <cmath> |
| 15 #include <sstream> | |
| 15 #include <utility> | 16 #include <utility> |
| 16 | 17 |
| 18 #include "webrtc/modules/pacing/alr_detector.h" | |
| 17 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 19 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 18 #include "webrtc/rtc_base/checks.h" | 20 #include "webrtc/rtc_base/checks.h" |
| 19 #include "webrtc/rtc_base/logging.h" | 21 #include "webrtc/rtc_base/logging.h" |
| 20 #include "webrtc/system_wrappers/include/clock.h" | 22 #include "webrtc/system_wrappers/include/clock.h" |
| 21 #include "webrtc/system_wrappers/include/field_trial.h" | 23 #include "webrtc/system_wrappers/include/field_trial.h" |
| 22 #include "webrtc/system_wrappers/include/metrics.h" | 24 #include "webrtc/system_wrappers/include/metrics.h" |
| 23 | 25 |
| 24 namespace webrtc { | 26 namespace webrtc { |
| 25 namespace { | 27 namespace { |
| 26 // Periodic time interval for processing samples for |freq_offset_counter_|. | 28 // Periodic time interval for processing samples for |freq_offset_counter_|. |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 41 const int kLowQpThresholdVp8 = 60; | 43 const int kLowQpThresholdVp8 = 60; |
| 42 const int kHighQpThresholdVp8 = 70; | 44 const int kHighQpThresholdVp8 = 70; |
| 43 const int kLowVarianceThreshold = 1; | 45 const int kLowVarianceThreshold = 1; |
| 44 const int kHighVarianceThreshold = 2; | 46 const int kHighVarianceThreshold = 2; |
| 45 | 47 |
| 46 // Some metrics are reported as a maximum over this period. | 48 // Some metrics are reported as a maximum over this period. |
| 47 const int kMovingMaxWindowMs = 10000; | 49 const int kMovingMaxWindowMs = 10000; |
| 48 | 50 |
| 49 // How large window we use to calculate the framerate/bitrate. | 51 // How large window we use to calculate the framerate/bitrate. |
| 50 const int kRateStatisticsWindowSizeMs = 1000; | 52 const int kRateStatisticsWindowSizeMs = 1000; |
| 53 | |
| 54 std::string UmaPrefixForContentType(VideoContentType content_type) { | |
| 55 std::stringstream ss; | |
| 56 ss << "WebRTC.Video"; | |
| 57 if (content_type.IsScreenshare()) { | |
| 58 ss << ".Screenshare"; | |
| 59 } | |
| 60 return ss.str(); | |
| 61 } | |
| 62 | |
| 63 std::string UmaSuffixForContentType(VideoContentType content_type) { | |
| 64 std::stringstream ss; | |
| 65 int simulcast_id = content_type.GetSimulcastId(); | |
| 66 if (simulcast_id > 0) { | |
| 67 ss << ".S" << simulcast_id - 1; | |
| 68 } | |
| 69 int experiment_id = content_type.GetExperimentId(); | |
| 70 if (experiment_id > 0) { | |
| 71 ss << ".ExperimentGroup" << experiment_id - 1; | |
| 72 } | |
| 73 return ss.str(); | |
| 74 } | |
| 51 } // namespace | 75 } // namespace |
| 52 | 76 |
| 53 ReceiveStatisticsProxy::ReceiveStatisticsProxy( | 77 ReceiveStatisticsProxy::ReceiveStatisticsProxy( |
| 54 const VideoReceiveStream::Config* config, | 78 const VideoReceiveStream::Config* config, |
| 55 Clock* clock) | 79 Clock* clock) |
| 56 : clock_(clock), | 80 : clock_(clock), |
| 57 config_(*config), | 81 config_(*config), |
| 58 start_ms_(clock->TimeInMilliseconds()), | 82 start_ms_(clock->TimeInMilliseconds()), |
| 59 last_sample_time_(clock->TimeInMilliseconds()), | 83 last_sample_time_(clock->TimeInMilliseconds()), |
| 60 fps_threshold_(kLowFpsThreshold, | 84 fps_threshold_(kLowFpsThreshold, |
| 61 kHighFpsThreshold, | 85 kHighFpsThreshold, |
| 62 kBadFraction, | 86 kBadFraction, |
| 63 kNumMeasurements), | 87 kNumMeasurements), |
| 64 qp_threshold_(kLowQpThresholdVp8, | 88 qp_threshold_(kLowQpThresholdVp8, |
| 65 kHighQpThresholdVp8, | 89 kHighQpThresholdVp8, |
| 66 kBadFraction, | 90 kBadFraction, |
| 67 kNumMeasurements), | 91 kNumMeasurements), |
| 68 variance_threshold_(kLowVarianceThreshold, | 92 variance_threshold_(kLowVarianceThreshold, |
| 69 kHighVarianceThreshold, | 93 kHighVarianceThreshold, |
| 70 kBadFraction, | 94 kBadFraction, |
| 71 kNumMeasurementsVariance), | 95 kNumMeasurementsVariance), |
| 72 num_bad_states_(0), | 96 num_bad_states_(0), |
| 73 num_certain_states_(0), | 97 num_certain_states_(0), |
| 74 // 1000ms window, scale 1000 for ms to s. | 98 // 1000ms window, scale 1000 for ms to s. |
| 75 decode_fps_estimator_(1000, 1000), | 99 decode_fps_estimator_(1000, 1000), |
| 76 renders_fps_estimator_(1000, 1000), | 100 renders_fps_estimator_(1000, 1000), |
| 77 render_fps_tracker_(100, 10u), | 101 render_fps_tracker_(100, 10u), |
| 78 render_pixel_tracker_(100, 10u), | 102 render_pixel_tracker_(100, 10u), |
| 79 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count | 103 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count |
| 80 e2e_delay_max_ms_video_(-1), | |
| 81 e2e_delay_max_ms_screenshare_(-1), | |
| 82 interframe_delay_max_ms_video_(-1), | |
| 83 interframe_delay_max_ms_screenshare_(-1), | |
| 84 interframe_delay_max_moving_(kMovingMaxWindowMs), | 104 interframe_delay_max_moving_(kMovingMaxWindowMs), |
| 85 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), | 105 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), |
| 86 first_report_block_time_ms_(-1), | 106 first_report_block_time_ms_(-1), |
| 87 avg_rtt_ms_(0), | 107 avg_rtt_ms_(0), |
| 88 last_content_type_(VideoContentType::UNSPECIFIED) { | 108 last_content_type_(VideoContentType::UNSPECIFIED) { |
| 89 stats_.ssrc = config_.rtp.remote_ssrc; | 109 stats_.ssrc = config_.rtp.remote_ssrc; |
| 90 // TODO(brandtr): Replace |rtx_stats_| with a single instance of | 110 // TODO(brandtr): Replace |rtx_stats_| with a single instance of |
| 91 // StreamDataCounters. | 111 // StreamDataCounters. |
| 92 if (config_.rtp.rtx_ssrc) { | 112 if (config_.rtp.rtx_ssrc) { |
| 93 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); | 113 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); |
| 94 } | 114 } |
| 95 } | 115 } |
| 96 | 116 |
| 97 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { | 117 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { |
| 98 UpdateHistograms(); | 118 UpdateHistograms(); |
| 99 } | 119 } |
| 100 | 120 |
| 101 void ReceiveStatisticsProxy::UpdateHistograms() { | 121 void ReceiveStatisticsProxy::UpdateHistograms() { |
| 102 RTC_HISTOGRAM_COUNTS_100000( | 122 int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; |
| 103 "WebRTC.Video.ReceiveStreamLifetimeInSeconds", | 123 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds", |
| 104 (clock_->TimeInMilliseconds() - start_ms_) / 1000); | 124 stream_duration_sec); |
| 125 LOG(LS_INFO) << "WebRTC.Video.ReceiveStreamLifetimeInSeconds " | |
| 126 << stream_duration_sec; | |
|
sprang_webrtc
2017/08/28 16:25:02
Check that we have received at least one frame bef
ilnik
2017/08/29 07:56:26
Done.
| |
| 105 | 127 |
| 106 if (first_report_block_time_ms_ != -1 && | 128 if (first_report_block_time_ms_ != -1 && |
| 107 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= | 129 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= |
| 108 metrics::kMinRunTimeInSeconds) { | 130 metrics::kMinRunTimeInSeconds) { |
| 109 int fraction_lost = report_block_stats_.FractionLostInPercent(); | 131 int fraction_lost = report_block_stats_.FractionLostInPercent(); |
| 110 if (fraction_lost != -1) { | 132 if (fraction_lost != -1) { |
| 111 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", | 133 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", |
| 112 fraction_lost); | 134 fraction_lost); |
| 113 LOG(LS_INFO) << "WebRTC.Video.ReceivedPacketsLostInPercent " | 135 LOG(LS_INFO) << "WebRTC.Video.ReceivedPacketsLostInPercent " |
| 114 << fraction_lost; | 136 << fraction_lost; |
| 115 } | 137 } |
| 116 } | 138 } |
| 117 | 139 |
| 118 const int kMinRequiredSamples = 200; | 140 const int kMinRequiredSamples = 200; |
| 119 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); | 141 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); |
| 120 if (samples >= kMinRequiredSamples) { | 142 if (samples >= kMinRequiredSamples) { |
| 121 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", | 143 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", |
| 122 round(render_fps_tracker_.ComputeTotalRate())); | 144 round(render_fps_tracker_.ComputeTotalRate())); |
| 123 RTC_HISTOGRAM_COUNTS_100000( | 145 RTC_HISTOGRAM_COUNTS_100000( |
| 124 "WebRTC.Video.RenderSqrtPixelsPerSecond", | 146 "WebRTC.Video.RenderSqrtPixelsPerSecond", |
| 125 round(render_pixel_tracker_.ComputeTotalRate())); | 147 round(render_pixel_tracker_.ComputeTotalRate())); |
| 126 } | 148 } |
| 127 int width = render_width_counter_.Avg(kMinRequiredSamples); | 149 |
| 128 int height = render_height_counter_.Avg(kMinRequiredSamples); | |
| 129 if (width != -1) { | |
| 130 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width); | |
| 131 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height); | |
| 132 LOG(LS_INFO) << "WebRTC.Video.ReceivedWidthInPixels " << width; | |
| 133 LOG(LS_INFO) << "WebRTC.Video.ReceivedHeightInPixels " << height; | |
| 134 } | |
| 135 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); | 150 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); |
| 136 if (sync_offset_ms != -1) { | 151 if (sync_offset_ms != -1) { |
| 137 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); | 152 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); |
| 138 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; | 153 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; |
| 139 } | 154 } |
| 140 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); | 155 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); |
| 141 if (freq_offset_stats.num_samples > 0) { | 156 if (freq_offset_stats.num_samples > 0) { |
| 142 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", | 157 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", |
| 143 freq_offset_stats.average); | 158 freq_offset_stats.average); |
| 144 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " | 159 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 182 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); | 197 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); |
| 183 if (current_delay_ms != -1) { | 198 if (current_delay_ms != -1) { |
| 184 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", | 199 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", |
| 185 current_delay_ms); | 200 current_delay_ms); |
| 186 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; | 201 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; |
| 187 } | 202 } |
| 188 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); | 203 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); |
| 189 if (delay_ms != -1) | 204 if (delay_ms != -1) |
| 190 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); | 205 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); |
| 191 | 206 |
| 192 int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples); | 207 // Aggregate content_specific_stats_ by removing experiment or simulcast |
| 193 if (e2e_delay_ms_video != -1) { | 208 // information; |
| 194 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", | 209 std::map<VideoContentType, ContentSpecificStats> aggregated_stats; |
| 195 e2e_delay_ms_video); | 210 for (auto it : content_specific_stats_) { |
| 196 LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video; | 211 // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes): |
| 212 VideoContentType content_type = it.first; | |
| 213 if (content_type.GetSimulcastId() > 0) { | |
| 214 // Aggregate on experiment id. | |
| 215 content_type.SetExperimentId(0); | |
| 216 aggregated_stats[content_type].Add(it.second); | |
| 217 } | |
| 218 // Calculate experiment specific metrics: | |
|
sprang_webrtc
2017/08/28 16:25:02
: ?
ilnik
2017/08/29 07:56:26
Done.
| |
| 219 content_type = it.first; | |
| 220 if (content_type.GetExperimentId() > 0) { | |
| 221 // Aggregate on simulcast id. | |
| 222 content_type.SetSimulcastId(0); | |
| 223 aggregated_stats[content_type].Add(it.second); | |
| 224 } | |
| 225 // calculate aggregated metrics (no suffixes. Aggregated on everything). | |
|
sprang_webrtc
2017/08/28 16:25:02
nit: s/calculate/Calculate
ilnik
2017/08/29 07:56:26
Done.
| |
| 226 content_type = it.first; | |
| 227 content_type.SetSimulcastId(0); | |
| 228 content_type.SetExperimentId(0); | |
| 229 aggregated_stats[content_type].Add(it.second); | |
|
sprang_webrtc
2017/08/28 16:25:02
We shouldn't do the addition if ss == 0 && exp ==
ilnik
2017/08/29 07:56:27
We add to the separate map. We need to make this a
sprang_webrtc
2017/08/29 08:04:38
Ah, you're right. I misread.
| |
| 197 } | 230 } |
| 198 | 231 |
| 199 int e2e_delay_ms_screenshare = | 232 for (auto it : aggregated_stats) { |
| 200 e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples); | 233 // for the metric Foo we report the following slices: |
|
sprang_webrtc
2017/08/28 16:25:02
nit: Capital F
ilnik
2017/08/29 07:56:26
Done.
| |
| 201 if (e2e_delay_ms_screenshare != -1) { | 234 // WebRTC.Video.Foo, |
| 202 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs", | 235 // WebRTC.Video.Screenshare.Foo, |
| 203 e2e_delay_ms_screenshare); | 236 // WebRTC.Video.Foo.S[0-3], |
| 204 } | 237 // WebRTC.Video.Foo.ExperimentGroup[0-7], |
| 238 // WebRTC.Video.Screenshare.Foo.S[0-3], | |
| 239 // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. | |
| 240 auto content_type = it.first; | |
| 241 auto stats = it.second; | |
| 242 std::string uma_prefix = UmaPrefixForContentType(content_type); | |
| 243 std::string uma_suffix = UmaSuffixForContentType(content_type); | |
| 244 // Metrics can be sliced on either simulcast id or experiment id but not | |
| 245 // both. | |
| 246 RTC_DCHECK(content_type.GetExperimentId() == 0 || | |
| 247 content_type.GetSimulcastId() == 0); | |
| 205 | 248 |
| 206 int e2e_delay_max_ms_video = e2e_delay_max_ms_video_; | 249 int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); |
| 207 if (e2e_delay_max_ms_video != -1) { | 250 if (e2e_delay_ms != -1) { |
| 208 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs", | 251 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 209 e2e_delay_max_ms_video); | 252 uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms); |
| 210 } | 253 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " |
| 254 << e2e_delay_ms; | |
| 255 } | |
| 256 int e2e_delay_max_ms = stats.e2e_delay_counter.Max(); | |
| 257 if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) { | |
| 258 RTC_HISTOGRAM_COUNTS_SPARSE_100000( | |
| 259 uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, e2e_delay_max_ms); | |
| 260 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " | |
| 261 << e2e_delay_max_ms; | |
| 262 } | |
| 263 int interframe_delay_ms = | |
| 264 stats.interframe_delay_counter.Avg(kMinRequiredSamples); | |
| 265 if (interframe_delay_ms != -1) { | |
| 266 RTC_HISTOGRAM_COUNTS_SPARSE_10000( | |
| 267 uma_prefix + ".InterframeDelayInMs" + uma_suffix, | |
| 268 interframe_delay_ms); | |
| 269 LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " | |
| 270 << interframe_delay_ms; | |
| 271 } | |
| 272 int interframe_delay_max_ms = stats.interframe_delay_counter.Max(); | |
| 273 if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) { | |
| 274 RTC_HISTOGRAM_COUNTS_SPARSE_10000( | |
| 275 uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, | |
| 276 interframe_delay_max_ms); | |
| 277 LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix | |
| 278 << " " << interframe_delay_max_ms; | |
| 279 } | |
| 211 | 280 |
| 212 int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_; | 281 int width = stats.received_width.Avg(kMinRequiredSamples); |
| 213 if (e2e_delay_max_ms_screenshare != -1) { | 282 if (width != -1) { |
| 214 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs", | 283 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 215 e2e_delay_max_ms_screenshare); | 284 uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, width); |
| 216 } | 285 LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix |
| 286 << " " << width; | |
| 287 } | |
| 217 | 288 |
| 218 int interframe_delay_ms_screenshare = | 289 int height = stats.received_height.Avg(kMinRequiredSamples); |
| 219 interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples); | 290 if (height != -1) { |
| 220 if (interframe_delay_ms_screenshare != -1) { | 291 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 221 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs", | 292 uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, height); |
| 222 interframe_delay_ms_screenshare); | 293 LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix |
| 223 RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_, | 294 << " " << height; |
| 224 interframe_delay_ms_screenshare); | 295 } |
| 225 RTC_HISTOGRAM_COUNTS_10000( | |
| 226 "WebRTC.Video.Screenshare.InterframeDelayMaxInMs", | |
| 227 interframe_delay_max_ms_screenshare_); | |
| 228 } | |
| 229 | 296 |
| 230 int interframe_delay_ms_video = | 297 if (content_type != VideoContentType::UNSPECIFIED) { |
| 231 interframe_delay_counter_video_.Avg(kMinRequiredSamples); | 298 // Don't report these 3 metrics unsliced, as more precise variants |
| 232 if (interframe_delay_ms_video != -1) { | 299 // are reported separately in this method. |
| 233 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs", | 300 float flow_duration_sec = stats.flow_duration_ms / 1000.0; |
| 234 interframe_delay_ms_video); | 301 if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { |
| 235 RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video); | 302 int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 / |
| 236 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs", | 303 flow_duration_sec / 1000); |
| 237 interframe_delay_max_ms_video_); | 304 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 305 uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, | |
| 306 media_bitrate_kbps); | |
| 307 LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps" | |
| 308 << uma_suffix << " " << media_bitrate_kbps; | |
| 309 } | |
| 310 | |
| 311 int num_total_frames = | |
| 312 stats.frame_counts.key_frames + stats.frame_counts.delta_frames; | |
| 313 if (num_total_frames >= kMinRequiredSamples) { | |
| 314 int num_key_frames = stats.frame_counts.key_frames; | |
| 315 int key_frames_permille = | |
| 316 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; | |
| 317 RTC_HISTOGRAM_COUNTS_SPARSE_1000( | |
| 318 uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, | |
| 319 key_frames_permille); | |
| 320 LOG(LS_INFO) << uma_prefix << ".KeyFramesReceivedInPermille" | |
| 321 << uma_suffix << " " << key_frames_permille; | |
| 322 } | |
| 323 | |
| 324 int qp = stats.qp_counter.Avg(kMinRequiredSamples); | |
| 325 if (qp != -1) { | |
| 326 RTC_HISTOGRAM_COUNTS_SPARSE_200( | |
| 327 uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, qp); | |
| 328 LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " | |
| 329 << qp; | |
| 330 } | |
| 331 } | |
| 238 } | 332 } |
| 239 | 333 |
| 240 StreamDataCounters rtp = stats_.rtp_stats; | 334 StreamDataCounters rtp = stats_.rtp_stats; |
| 241 StreamDataCounters rtx; | 335 StreamDataCounters rtx; |
| 242 for (auto it : rtx_stats_) | 336 for (auto it : rtx_stats_) |
| 243 rtx.Add(it.second); | 337 rtx.Add(it.second); |
| 244 StreamDataCounters rtp_rtx = rtp; | 338 StreamDataCounters rtp_rtx = rtp; |
| 245 rtp_rtx.Add(rtx); | 339 rtp_rtx.Add(rtx); |
| 246 int64_t elapsed_sec = | 340 int64_t elapsed_sec = |
| 247 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; | 341 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; |
| 248 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { | 342 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { |
| 249 RTC_HISTOGRAM_COUNTS_10000( | 343 RTC_HISTOGRAM_COUNTS_10000( |
| 250 "WebRTC.Video.BitrateReceivedInKbps", | 344 "WebRTC.Video.BitrateReceivedInKbps", |
| 251 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / | 345 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / |
| 252 1000)); | 346 1000)); |
| 253 RTC_HISTOGRAM_COUNTS_10000( | 347 int media_bitrate_kbs = |
| 254 "WebRTC.Video.MediaBitrateReceivedInKbps", | 348 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000); |
| 255 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000)); | 349 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps", |
| 350 media_bitrate_kbs); | |
| 351 LOG(LS_INFO) << "WebRTC.Video.MediaBitrateReceivedInKbps " | |
| 352 << media_bitrate_kbs; | |
| 256 RTC_HISTOGRAM_COUNTS_10000( | 353 RTC_HISTOGRAM_COUNTS_10000( |
| 257 "WebRTC.Video.PaddingBitrateReceivedInKbps", | 354 "WebRTC.Video.PaddingBitrateReceivedInKbps", |
| 258 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / | 355 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / |
| 259 1000)); | 356 1000)); |
| 260 RTC_HISTOGRAM_COUNTS_10000( | 357 RTC_HISTOGRAM_COUNTS_10000( |
| 261 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", | 358 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", |
| 262 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / | 359 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / |
| 263 1000)); | 360 1000)); |
| 264 if (!rtx_stats_.empty()) { | 361 if (!rtx_stats_.empty()) { |
| 265 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", | 362 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 522 } | 619 } |
| 523 if (total_bytes > last_total_bytes) | 620 if (total_bytes > last_total_bytes) |
| 524 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); | 621 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); |
| 525 } | 622 } |
| 526 | 623 |
| 527 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, | 624 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, |
| 528 VideoContentType content_type) { | 625 VideoContentType content_type) { |
| 529 uint64_t now = clock_->TimeInMilliseconds(); | 626 uint64_t now = clock_->TimeInMilliseconds(); |
| 530 | 627 |
| 531 rtc::CritScope lock(&crit_); | 628 rtc::CritScope lock(&crit_); |
| 629 | |
| 630 ContentSpecificStats* content_specific_stats = | |
| 631 &content_specific_stats_[content_type]; | |
| 532 ++stats_.frames_decoded; | 632 ++stats_.frames_decoded; |
| 533 if (qp) { | 633 if (qp) { |
| 534 if (!stats_.qp_sum) { | 634 if (!stats_.qp_sum) { |
| 535 if (stats_.frames_decoded != 1) { | 635 if (stats_.frames_decoded != 1) { |
| 536 LOG(LS_WARNING) | 636 LOG(LS_WARNING) |
| 537 << "Frames decoded was not 1 when first qp value was received."; | 637 << "Frames decoded was not 1 when first qp value was received."; |
| 538 stats_.frames_decoded = 1; | 638 stats_.frames_decoded = 1; |
| 539 } | 639 } |
| 540 stats_.qp_sum = rtc::Optional<uint64_t>(0); | 640 stats_.qp_sum = rtc::Optional<uint64_t>(0); |
| 541 } | 641 } |
| 542 *stats_.qp_sum += *qp; | 642 *stats_.qp_sum += *qp; |
| 643 content_specific_stats->qp_counter.Add(*qp); | |
| 543 } else if (stats_.qp_sum) { | 644 } else if (stats_.qp_sum) { |
| 544 LOG(LS_WARNING) | 645 LOG(LS_WARNING) |
| 545 << "QP sum was already set and no QP was given for a frame."; | 646 << "QP sum was already set and no QP was given for a frame."; |
| 546 stats_.qp_sum = rtc::Optional<uint64_t>(); | 647 stats_.qp_sum = rtc::Optional<uint64_t>(); |
| 547 } | 648 } |
| 548 last_content_type_ = content_type; | 649 last_content_type_ = content_type; |
| 549 decode_fps_estimator_.Update(1, now); | 650 decode_fps_estimator_.Update(1, now); |
| 550 if (last_decoded_frame_time_ms_) { | 651 if (last_decoded_frame_time_ms_) { |
| 551 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; | 652 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; |
| 552 RTC_DCHECK_GE(interframe_delay_ms, 0); | 653 RTC_DCHECK_GE(interframe_delay_ms, 0); |
| 553 interframe_delay_max_moving_.Add(interframe_delay_ms, now); | 654 interframe_delay_max_moving_.Add(interframe_delay_ms, now); |
| 554 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 655 content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); |
| 555 interframe_delay_counter_screenshare_.Add(interframe_delay_ms); | 656 content_specific_stats->flow_duration_ms += interframe_delay_ms; |
| 556 if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) { | |
| 557 interframe_delay_max_ms_screenshare_ = interframe_delay_ms; | |
| 558 } | |
| 559 } else { | |
| 560 interframe_delay_counter_video_.Add(interframe_delay_ms); | |
| 561 if (interframe_delay_max_ms_video_ < interframe_delay_ms) { | |
| 562 interframe_delay_max_ms_video_ = interframe_delay_ms; | |
| 563 } | |
| 564 } | |
| 565 } | 657 } |
| 566 last_decoded_frame_time_ms_.emplace(now); | 658 last_decoded_frame_time_ms_.emplace(now); |
| 567 } | 659 } |
| 568 | 660 |
| 569 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { | 661 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { |
| 570 int width = frame.width(); | 662 int width = frame.width(); |
| 571 int height = frame.height(); | 663 int height = frame.height(); |
| 572 RTC_DCHECK_GT(width, 0); | 664 RTC_DCHECK_GT(width, 0); |
| 573 RTC_DCHECK_GT(height, 0); | 665 RTC_DCHECK_GT(height, 0); |
| 574 uint64_t now = clock_->TimeInMilliseconds(); | 666 uint64_t now = clock_->TimeInMilliseconds(); |
| 575 | |
| 576 rtc::CritScope lock(&crit_); | 667 rtc::CritScope lock(&crit_); |
| 668 ContentSpecificStats* content_specific_stats = | |
| 669 &content_specific_stats_[last_content_type_]; | |
| 577 renders_fps_estimator_.Update(1, now); | 670 renders_fps_estimator_.Update(1, now); |
| 578 ++stats_.frames_rendered; | 671 ++stats_.frames_rendered; |
| 579 stats_.width = width; | 672 stats_.width = width; |
| 580 stats_.height = height; | 673 stats_.height = height; |
| 581 render_width_counter_.Add(width); | |
| 582 render_height_counter_.Add(height); | |
| 583 render_fps_tracker_.AddSamples(1); | 674 render_fps_tracker_.AddSamples(1); |
| 584 render_pixel_tracker_.AddSamples(sqrt(width * height)); | 675 render_pixel_tracker_.AddSamples(sqrt(width * height)); |
| 676 content_specific_stats->received_width.Add(width); | |
| 677 content_specific_stats->received_height.Add(height); | |
| 585 | 678 |
| 586 if (frame.ntp_time_ms() > 0) { | 679 if (frame.ntp_time_ms() > 0) { |
| 587 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); | 680 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); |
| 588 if (delay_ms >= 0) { | 681 if (delay_ms >= 0) { |
| 589 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 682 content_specific_stats->e2e_delay_counter.Add(delay_ms); |
| 590 e2e_delay_max_ms_screenshare_ = | |
| 591 std::max(delay_ms, e2e_delay_max_ms_screenshare_); | |
| 592 e2e_delay_counter_screenshare_.Add(delay_ms); | |
| 593 } else { | |
| 594 e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_); | |
| 595 e2e_delay_counter_video_.Add(delay_ms); | |
| 596 } | |
| 597 } | 683 } |
| 598 } | 684 } |
| 599 } | 685 } |
| 600 | 686 |
| 601 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, | 687 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, |
| 602 double estimated_freq_khz) { | 688 double estimated_freq_khz) { |
| 603 rtc::CritScope lock(&crit_); | 689 rtc::CritScope lock(&crit_); |
| 604 sync_offset_counter_.Add(std::abs(sync_offset_ms)); | 690 sync_offset_counter_.Add(std::abs(sync_offset_ms)); |
| 605 stats_.sync_offset_ms = sync_offset_ms; | 691 stats_.sync_offset_ms = sync_offset_ms; |
| 606 | 692 |
| 607 const double kMaxFreqKhz = 10000.0; | 693 const double kMaxFreqKhz = 10000.0; |
| 608 int offset_khz = kMaxFreqKhz; | 694 int offset_khz = kMaxFreqKhz; |
| 609 // Should not be zero or negative. If so, report max. | 695 // Should not be zero or negative. If so, report max. |
| 610 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) | 696 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) |
| 611 offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5); | 697 offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5); |
| 612 | 698 |
| 613 freq_offset_counter_.Add(offset_khz); | 699 freq_offset_counter_.Add(offset_khz); |
| 614 } | 700 } |
| 615 | 701 |
| 616 void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate, | 702 void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate, |
| 617 uint32_t frameRate) { | 703 uint32_t frameRate) { |
| 618 } | 704 } |
| 619 | 705 |
| 620 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, | 706 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, |
| 621 size_t size_bytes) { | 707 size_t size_bytes, |
| 708 VideoContentType content_type) { | |
| 622 rtc::CritScope lock(&crit_); | 709 rtc::CritScope lock(&crit_); |
| 623 if (is_keyframe) | 710 if (is_keyframe) |
| 624 ++stats_.frame_counts.key_frames; | 711 ++stats_.frame_counts.key_frames; |
| 625 else | 712 else |
| 626 ++stats_.frame_counts.delta_frames; | 713 ++stats_.frame_counts.delta_frames; |
| 627 | 714 |
| 715 ContentSpecificStats* content_specific_stats = | |
| 716 &content_specific_stats_[content_type]; | |
| 717 | |
| 718 content_specific_stats->total_media_bytes += size_bytes; | |
| 719 if (is_keyframe) | |
| 720 ++content_specific_stats->frame_counts.key_frames; | |
| 721 else | |
| 722 ++content_specific_stats->frame_counts.delta_frames; | |
|
sprang_webrtc
2017/08/28 16:25:02
nit: Use {} for if/else
ilnik
2017/08/29 07:56:27
Done.
| |
| 723 | |
| 628 int64_t now_ms = clock_->TimeInMilliseconds(); | 724 int64_t now_ms = clock_->TimeInMilliseconds(); |
| 629 frame_window_.insert(std::make_pair(now_ms, size_bytes)); | 725 frame_window_.insert(std::make_pair(now_ms, size_bytes)); |
| 630 UpdateFramerate(now_ms); | 726 UpdateFramerate(now_ms); |
| 631 } | 727 } |
| 632 | 728 |
| 633 void ReceiveStatisticsProxy::OnFrameCountsUpdated( | 729 void ReceiveStatisticsProxy::OnFrameCountsUpdated( |
| 634 const FrameCounts& frame_counts) { | 730 const FrameCounts& frame_counts) { |
| 635 rtc::CritScope lock(&crit_); | 731 rtc::CritScope lock(&crit_); |
| 636 stats_.frame_counts = frame_counts; | 732 stats_.frame_counts = frame_counts; |
| 637 } | 733 } |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 658 // TODO(sprang): Figure out any other state that should be reset. | 754 // TODO(sprang): Figure out any other state that should be reset. |
| 659 | 755 |
| 660 rtc::CritScope lock(&crit_); | 756 rtc::CritScope lock(&crit_); |
| 661 // Don't report inter-frame delay if stream was paused. | 757 // Don't report inter-frame delay if stream was paused. |
| 662 last_decoded_frame_time_ms_.reset(); | 758 last_decoded_frame_time_ms_.reset(); |
| 663 } | 759 } |
| 664 | 760 |
| 665 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { | 761 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { |
| 666 sum += sample; | 762 sum += sample; |
| 667 ++num_samples; | 763 ++num_samples; |
| 764 if (!max || *max < sample) { | |
|
sprang_webrtc
2017/08/28 16:25:02
nit: maybe more readable with if (!max || sample >
ilnik
2017/08/29 07:56:26
Done.
| |
| 765 max.emplace(sample); | |
| 766 } | |
| 767 } | |
| 768 | |
| 769 void ReceiveStatisticsProxy::SampleCounter::Add(const SampleCounter& other) { | |
| 770 sum += other.sum; | |
| 771 num_samples += other.num_samples; | |
| 772 if (other.max && (!max || *max < *other.max)) | |
| 773 max = other.max; | |
| 668 } | 774 } |
| 669 | 775 |
| 670 int ReceiveStatisticsProxy::SampleCounter::Avg( | 776 int ReceiveStatisticsProxy::SampleCounter::Avg( |
| 671 int64_t min_required_samples) const { | 777 int64_t min_required_samples) const { |
| 672 if (num_samples < min_required_samples || num_samples == 0) | 778 if (num_samples < min_required_samples || num_samples == 0) |
| 673 return -1; | 779 return -1; |
| 674 return static_cast<int>(sum / num_samples); | 780 return static_cast<int>(sum / num_samples); |
| 675 } | 781 } |
| 676 | 782 |
| 783 int ReceiveStatisticsProxy::SampleCounter::Max() const { | |
| 784 if (!max) { | |
| 785 return -1; | |
| 786 } else { | |
| 787 return *max; | |
| 788 } | |
|
sprang_webrtc
2017/08/28 16:25:02
nit: return max.value_or(-1);
ilnik
2017/08/29 07:56:27
Done.
| |
| 789 } | |
| 790 | |
| 677 void ReceiveStatisticsProxy::SampleCounter::Reset() { | 791 void ReceiveStatisticsProxy::SampleCounter::Reset() { |
| 678 num_samples = 0; | 792 num_samples = 0; |
| 679 sum = 0; | 793 sum = 0; |
| 794 max.reset(); | |
| 680 } | 795 } |
| 681 | 796 |
| 682 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, | 797 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, |
| 683 int64_t max_rtt_ms) { | 798 int64_t max_rtt_ms) { |
| 684 rtc::CritScope lock(&crit_); | 799 rtc::CritScope lock(&crit_); |
| 685 avg_rtt_ms_ = avg_rtt_ms; | 800 avg_rtt_ms_ = avg_rtt_ms; |
| 686 } | 801 } |
| 687 | 802 |
| 803 void ReceiveStatisticsProxy::ContentSpecificStats::Add( | |
| 804 const ContentSpecificStats& other) { | |
| 805 e2e_delay_counter.Add(other.e2e_delay_counter); | |
| 806 interframe_delay_counter.Add(other.interframe_delay_counter); | |
| 807 flow_duration_ms += other.flow_duration_ms; | |
| 808 total_media_bytes += other.total_media_bytes; | |
| 809 received_height.Add(other.received_height); | |
| 810 received_width.Add(other.received_width); | |
| 811 qp_counter.Add(other.qp_counter); | |
| 812 frame_counts.key_frames += other.frame_counts.key_frames; | |
| 813 frame_counts.delta_frames += other.frame_counts.delta_frames; | |
| 814 } | |
| 815 | |
| 688 } // namespace webrtc | 816 } // namespace webrtc |
| OLD | NEW |