Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/video/receive_statistics_proxy.h" | 11 #include "webrtc/video/receive_statistics_proxy.h" |
| 12 | 12 |
| 13 #include <algorithm> | 13 #include <algorithm> |
| 14 #include <cmath> | 14 #include <cmath> |
| 15 #include <sstream> | |
| 15 #include <utility> | 16 #include <utility> |
| 16 | 17 |
| 18 #include "webrtc/modules/pacing/alr_detector.h" | |
| 17 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 19 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 18 #include "webrtc/rtc_base/checks.h" | 20 #include "webrtc/rtc_base/checks.h" |
| 19 #include "webrtc/rtc_base/logging.h" | 21 #include "webrtc/rtc_base/logging.h" |
| 20 #include "webrtc/system_wrappers/include/clock.h" | 22 #include "webrtc/system_wrappers/include/clock.h" |
| 21 #include "webrtc/system_wrappers/include/field_trial.h" | 23 #include "webrtc/system_wrappers/include/field_trial.h" |
| 22 #include "webrtc/system_wrappers/include/metrics.h" | 24 #include "webrtc/system_wrappers/include/metrics.h" |
| 23 | 25 |
| 24 namespace webrtc { | 26 namespace webrtc { |
| 25 namespace { | 27 namespace { |
| 26 // Periodic time interval for processing samples for |freq_offset_counter_|. | 28 // Periodic time interval for processing samples for |freq_offset_counter_|. |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 38 const int kHighFpsThreshold = 14; | 40 const int kHighFpsThreshold = 14; |
| 39 // For qp and fps variance: | 41 // For qp and fps variance: |
| 40 // Low means low enough to be good, high means high enough to be bad | 42 // Low means low enough to be good, high means high enough to be bad |
| 41 const int kLowQpThresholdVp8 = 60; | 43 const int kLowQpThresholdVp8 = 60; |
| 42 const int kHighQpThresholdVp8 = 70; | 44 const int kHighQpThresholdVp8 = 70; |
| 43 const int kLowVarianceThreshold = 1; | 45 const int kLowVarianceThreshold = 1; |
| 44 const int kHighVarianceThreshold = 2; | 46 const int kHighVarianceThreshold = 2; |
| 45 | 47 |
| 46 // How large window we use to calculate the framerate/bitrate. | 48 // How large window we use to calculate the framerate/bitrate. |
| 47 const int kRateStatisticsWindowSizeMs = 1000; | 49 const int kRateStatisticsWindowSizeMs = 1000; |
| 50 | |
| 51 std::string UmaPrefixForContentType(VideoContentType content_type) { | |
| 52 std::stringstream ss; | |
| 53 ss << "WebRTC.Video"; | |
| 54 if (content_type.IsScreenshare()) { | |
| 55 ss << ".Screenshare"; | |
| 56 } | |
| 57 return ss.str(); | |
| 58 } | |
| 59 | |
| 60 std::string UmaSpatialSuffixForContentType(VideoContentType content_type) { | |
| 61 std::stringstream ss; | |
| 62 int simulcast_id = content_type.GetSimulcastId(); | |
| 63 if (simulcast_id > 0) { | |
| 64 ss << ".S" << simulcast_id - 1; | |
| 65 } | |
| 66 return ss.str(); | |
| 67 } | |
| 68 | |
| 69 std::string UmaExperimentSuffixConentType(VideoContentType content_type) { | |
|
sprang_webrtc
2017/08/25 13:56:29
nit: UmaExperimentSuffixForContentType
ilnik
2017/08/25 14:14:27
Done.
| |
| 70 std::stringstream ss; | |
| 71 int experiment_id = content_type.GetExperimentId(); | |
| 72 if (experiment_id > 0) { | |
| 73 ss << ".ExperimentGroup" << experiment_id - 1; | |
| 74 } | |
| 75 return ss.str(); | |
| 76 } | |
| 48 } // namespace | 77 } // namespace |
| 49 | 78 |
| 50 ReceiveStatisticsProxy::ReceiveStatisticsProxy( | 79 ReceiveStatisticsProxy::ReceiveStatisticsProxy( |
| 51 const VideoReceiveStream::Config* config, | 80 const VideoReceiveStream::Config* config, |
| 52 Clock* clock) | 81 Clock* clock) |
| 53 : clock_(clock), | 82 : clock_(clock), |
| 54 config_(*config), | 83 config_(*config), |
| 55 start_ms_(clock->TimeInMilliseconds()), | 84 start_ms_(clock->TimeInMilliseconds()), |
| 56 last_sample_time_(clock->TimeInMilliseconds()), | 85 last_sample_time_(clock->TimeInMilliseconds()), |
| 57 fps_threshold_(kLowFpsThreshold, | 86 fps_threshold_(kLowFpsThreshold, |
| 58 kHighFpsThreshold, | 87 kHighFpsThreshold, |
| 59 kBadFraction, | 88 kBadFraction, |
| 60 kNumMeasurements), | 89 kNumMeasurements), |
| 61 qp_threshold_(kLowQpThresholdVp8, | 90 qp_threshold_(kLowQpThresholdVp8, |
| 62 kHighQpThresholdVp8, | 91 kHighQpThresholdVp8, |
| 63 kBadFraction, | 92 kBadFraction, |
| 64 kNumMeasurements), | 93 kNumMeasurements), |
| 65 variance_threshold_(kLowVarianceThreshold, | 94 variance_threshold_(kLowVarianceThreshold, |
| 66 kHighVarianceThreshold, | 95 kHighVarianceThreshold, |
| 67 kBadFraction, | 96 kBadFraction, |
| 68 kNumMeasurementsVariance), | 97 kNumMeasurementsVariance), |
| 69 num_bad_states_(0), | 98 num_bad_states_(0), |
| 70 num_certain_states_(0), | 99 num_certain_states_(0), |
| 71 // 1000ms window, scale 1000 for ms to s. | 100 // 1000ms window, scale 1000 for ms to s. |
| 72 decode_fps_estimator_(1000, 1000), | 101 decode_fps_estimator_(1000, 1000), |
| 73 renders_fps_estimator_(1000, 1000), | 102 renders_fps_estimator_(1000, 1000), |
| 74 render_fps_tracker_(100, 10u), | 103 render_fps_tracker_(100, 10u), |
| 75 render_pixel_tracker_(100, 10u), | 104 render_pixel_tracker_(100, 10u), |
| 76 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count | 105 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count |
| 77 e2e_delay_max_ms_video_(-1), | |
| 78 e2e_delay_max_ms_screenshare_(-1), | |
| 79 interframe_delay_max_ms_video_(-1), | |
| 80 interframe_delay_max_ms_screenshare_(-1), | |
| 81 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), | 106 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), |
| 82 first_report_block_time_ms_(-1), | 107 first_report_block_time_ms_(-1), |
| 83 avg_rtt_ms_(0), | 108 avg_rtt_ms_(0), |
| 84 last_content_type_(VideoContentType::UNSPECIFIED) { | 109 last_content_type_(VideoContentType::UNSPECIFIED) { |
| 85 stats_.ssrc = config_.rtp.remote_ssrc; | 110 stats_.ssrc = config_.rtp.remote_ssrc; |
| 86 // TODO(brandtr): Replace |rtx_stats_| with a single instance of | 111 // TODO(brandtr): Replace |rtx_stats_| with a single instance of |
| 87 // StreamDataCounters. | 112 // StreamDataCounters. |
| 88 if (config_.rtp.rtx_ssrc) { | 113 if (config_.rtp.rtx_ssrc) { |
| 89 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); | 114 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); |
| 90 } | 115 } |
| 91 } | 116 } |
| 92 | 117 |
| 93 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { | 118 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { |
| 94 UpdateHistograms(); | 119 UpdateHistograms(); |
| 95 } | 120 } |
| 96 | 121 |
| 97 void ReceiveStatisticsProxy::UpdateHistograms() { | 122 void ReceiveStatisticsProxy::UpdateHistograms() { |
| 98 RTC_HISTOGRAM_COUNTS_100000( | 123 const int kMinRequiredSamples = 200; |
| 99 "WebRTC.Video.ReceiveStreamLifetimeInSeconds", | |
| 100 (clock_->TimeInMilliseconds() - start_ms_) / 1000); | |
| 101 | 124 |
| 102 if (first_report_block_time_ms_ != -1 && | |
| 103 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= | |
| 104 metrics::kMinRunTimeInSeconds) { | |
| 105 int fraction_lost = report_block_stats_.FractionLostInPercent(); | |
| 106 if (fraction_lost != -1) { | |
| 107 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", | |
| 108 fraction_lost); | |
| 109 LOG(LS_INFO) << "WebRTC.Video.ReceivedPacketsLostInPercent " | |
| 110 << fraction_lost; | |
| 111 } | |
| 112 } | |
| 113 | |
| 114 const int kMinRequiredSamples = 200; | |
| 115 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); | 125 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); |
| 116 if (samples >= kMinRequiredSamples) { | 126 if (samples >= kMinRequiredSamples) { |
| 117 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", | 127 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", |
| 118 round(render_fps_tracker_.ComputeTotalRate())); | 128 round(render_fps_tracker_.ComputeTotalRate())); |
| 119 RTC_HISTOGRAM_COUNTS_100000( | 129 RTC_HISTOGRAM_COUNTS_100000( |
| 120 "WebRTC.Video.RenderSqrtPixelsPerSecond", | 130 "WebRTC.Video.RenderSqrtPixelsPerSecond", |
| 121 round(render_pixel_tracker_.ComputeTotalRate())); | 131 round(render_pixel_tracker_.ComputeTotalRate())); |
| 122 } | 132 } |
| 133 | |
| 134 // Slice on content type and simulcast id. | |
| 135 for (auto it : content_specific_stats_) { | |
| 136 auto content_type = it.first; | |
| 137 if (content_type.GetExperimentId() == 0 && | |
| 138 content_type.GetSimulcastId() == 0) { | |
| 139 // Name of a metric sliced on simulcast is the same as sliced on | |
| 140 // experiment because no slice information is available in both cases. | |
| 141 // Don't report the metric here to not duplicate the metric. | |
| 142 continue; | |
| 143 } | |
| 144 auto stats = it.second; | |
| 145 std::string uma_prefix = UmaPrefixForContentType(content_type); | |
| 146 std::string uma_suffix = UmaSpatialSuffixForContentType(content_type); | |
| 147 // The same line of code can't report different histograms because of how | |
| 148 // macro is done. Additional index needed to be different for all different | |
| 149 // names. Since experiment can't change during call, we could report 4*2 | |
| 150 // different content types (4 simulcast_ids x 2 content types). | |
| 151 // Conveniently all of these are stored in 3 lower bits in VideoContentType. | |
| 152 int idx = content_type.content_type & 0x07; | |
| 153 int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); | |
| 154 if (e2e_delay_ms != -1) { | |
| 155 RTC_HISTOGRAMS_COUNTS_10000( | |
| 156 idx, uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms); | |
| 157 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " | |
| 158 << e2e_delay_ms; | |
| 159 } | |
| 160 int e2e_delay_max_ms = stats.e2e_delay_counter.Max(); | |
| 161 if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) { | |
| 162 RTC_HISTOGRAMS_COUNTS_100000( | |
| 163 idx, uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, | |
| 164 e2e_delay_max_ms); | |
| 165 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " | |
| 166 << e2e_delay_max_ms; | |
| 167 } | |
| 168 int interframe_delay_ms = | |
| 169 stats.interframe_delay_counter.Avg(kMinRequiredSamples); | |
| 170 if (interframe_delay_ms != -1) { | |
| 171 RTC_HISTOGRAMS_COUNTS_10000( | |
| 172 idx, uma_prefix + ".InterframeDelayInMs" + uma_suffix, | |
| 173 interframe_delay_ms); | |
| 174 LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " | |
| 175 << interframe_delay_ms; | |
| 176 } | |
| 177 int interframe_delay_max_ms = stats.interframe_delay_counter.Max(); | |
| 178 if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) { | |
| 179 RTC_HISTOGRAMS_COUNTS_10000( | |
| 180 idx, uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, | |
| 181 interframe_delay_max_ms); | |
| 182 LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix | |
| 183 << " " << interframe_delay_max_ms; | |
| 184 } | |
| 185 | |
| 186 float flow_duration_sec = stats.flow_duration_ms / 1000.0; | |
| 187 if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { | |
| 188 int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 / | |
| 189 flow_duration_sec / 1000); | |
| 190 RTC_HISTOGRAM_COUNTS_10000( | |
| 191 uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, | |
| 192 media_bitrate_kbps); | |
| 193 LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix | |
| 194 << " " << media_bitrate_kbps; | |
| 195 } | |
| 196 | |
| 197 int qp = stats.qp_counter.Avg(kMinRequiredSamples); | |
| 198 if (qp != -1) { | |
| 199 RTC_HISTOGRAMS_COUNTS_200( | |
| 200 idx, uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, qp); | |
| 201 LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " | |
| 202 << qp; | |
| 203 } | |
| 204 | |
| 205 int width = stats.received_width.Avg(kMinRequiredSamples); | |
| 206 if (width != -1) { | |
| 207 RTC_HISTOGRAMS_COUNTS_10000( | |
| 208 idx, uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, width); | |
| 209 LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix | |
| 210 << " " << width; | |
| 211 } | |
| 212 | |
| 213 int height = stats.received_height.Avg(kMinRequiredSamples); | |
| 214 if (height != -1) { | |
| 215 RTC_HISTOGRAMS_COUNTS_10000( | |
| 216 idx, uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, height); | |
| 217 LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix | |
| 218 << " " << height; | |
| 219 } | |
| 220 } | |
| 221 | |
| 222 std::string uma_prefix = UmaPrefixForContentType(last_content_type_); | |
| 223 std::string uma_experiment_suffix = | |
| 224 UmaExperimentSuffixConentType(last_content_type_); | |
|
sprang_webrtc
2017/08/25 13:56:29
I think these might unfortunately change during th
ilnik
2017/08/25 14:14:27
Done.
| |
| 225 int idx = (last_content_type_.IsScreenshare() ? 1 : 0) + | |
| 226 (last_content_type_.GetExperimentId() << 1); | |
| 227 | |
| 228 if (first_report_block_time_ms_ != -1 && | |
| 229 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= | |
| 230 metrics::kMinRunTimeInSeconds) { | |
| 231 int fraction_lost = report_block_stats_.FractionLostInPercent(); | |
| 232 if (fraction_lost != -1) { | |
| 233 RTC_HISTOGRAMS_PERCENTAGE( | |
| 234 idx, | |
| 235 uma_prefix + ".ReceivedPacketsLostInPercent" + uma_experiment_suffix, | |
| 236 fraction_lost); | |
| 237 LOG(LS_INFO) << uma_prefix << ".ReceivedPacketsLostInPercent" | |
| 238 << uma_experiment_suffix << " " << fraction_lost; | |
| 239 } | |
| 240 } | |
| 241 | |
| 242 int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; | |
| 243 RTC_HISTOGRAMS_COUNTS_100000( | |
| 244 idx, | |
| 245 uma_prefix + ".ReceiveStreamLifetimeInSeconds" + uma_experiment_suffix, | |
| 246 stream_duration_sec); | |
| 247 LOG(LS_INFO) << uma_prefix << ".ReceiveStreamLifetimeInSeconds" | |
| 248 << uma_experiment_suffix << " " << stream_duration_sec; | |
| 249 | |
| 250 int e2e_delay_ms = e2e_delay_counter.Avg(kMinRequiredSamples); | |
| 251 if (e2e_delay_ms != -1) { | |
| 252 RTC_HISTOGRAMS_COUNTS_10000( | |
| 253 idx, uma_prefix + ".EndToEndDelayInMs" + uma_experiment_suffix, | |
| 254 e2e_delay_ms); | |
| 255 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_experiment_suffix | |
| 256 << " " << e2e_delay_ms; | |
| 257 } | |
| 258 int e2e_delay_max_ms = e2e_delay_counter.Max(); | |
| 259 if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) { | |
| 260 RTC_HISTOGRAMS_COUNTS_100000( | |
| 261 idx, uma_prefix + ".EndToEndDelayMaxInMs" + uma_experiment_suffix, | |
| 262 e2e_delay_max_ms); | |
| 263 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" | |
| 264 << uma_experiment_suffix << " " << e2e_delay_max_ms; | |
| 265 } | |
| 266 int interframe_delay_ms = interframe_delay_counter.Avg(kMinRequiredSamples); | |
| 267 if (interframe_delay_ms != -1) { | |
| 268 RTC_HISTOGRAMS_COUNTS_10000( | |
| 269 idx, uma_prefix + ".InterframeDelayInMs" + uma_experiment_suffix, | |
| 270 interframe_delay_ms); | |
| 271 LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" | |
| 272 << uma_experiment_suffix << " " << interframe_delay_ms; | |
| 273 } | |
| 274 int interframe_delay_max_ms = interframe_delay_counter.Max(); | |
| 275 if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) { | |
| 276 RTC_HISTOGRAMS_COUNTS_10000( | |
| 277 idx, uma_prefix + ".InterframeDelayMaxInMs" + uma_experiment_suffix, | |
| 278 interframe_delay_max_ms); | |
| 279 LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" | |
| 280 << uma_experiment_suffix << " " << interframe_delay_max_ms; | |
| 281 } | |
| 282 | |
| 123 int width = render_width_counter_.Avg(kMinRequiredSamples); | 283 int width = render_width_counter_.Avg(kMinRequiredSamples); |
| 124 int height = render_height_counter_.Avg(kMinRequiredSamples); | 284 int height = render_height_counter_.Avg(kMinRequiredSamples); |
| 125 if (width != -1) { | 285 if (width != -1) { |
| 126 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width); | 286 RTC_HISTOGRAMS_COUNTS_10000( |
| 127 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height); | 287 idx, uma_prefix + ".ReceivedWidthInPixels" + uma_experiment_suffix, |
| 128 LOG(LS_INFO) << "WebRTC.Video.ReceivedWidthInPixels " << width; | 288 width); |
| 129 LOG(LS_INFO) << "WebRTC.Video.ReceivedHeightInPixels " << height; | 289 RTC_HISTOGRAMS_COUNTS_10000( |
| 290 idx, uma_prefix + ".ReceivedHeightInPixels" + uma_experiment_suffix, | |
| 291 height); | |
| 292 LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" | |
| 293 << uma_experiment_suffix << " " << width; | |
| 294 LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" | |
| 295 << uma_experiment_suffix << " " << height; | |
| 130 } | 296 } |
| 131 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); | 297 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); |
| 132 if (sync_offset_ms != -1) { | 298 if (sync_offset_ms != -1) { |
| 133 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); | 299 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); |
| 134 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; | 300 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; |
| 135 } | 301 } |
| 136 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); | 302 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); |
| 137 if (freq_offset_stats.num_samples > 0) { | 303 if (freq_offset_stats.num_samples > 0) { |
| 138 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", | 304 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", |
| 139 freq_offset_stats.average); | 305 freq_offset_stats.average); |
| 140 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " | 306 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " |
| 141 << freq_offset_stats.ToString(); | 307 << freq_offset_stats.ToString(); |
| 142 } | 308 } |
| 143 | 309 |
| 144 int num_total_frames = | 310 int num_total_frames = |
| 145 stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames; | 311 stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames; |
| 146 if (num_total_frames >= kMinRequiredSamples) { | 312 if (num_total_frames >= kMinRequiredSamples) { |
| 147 int num_key_frames = stats_.frame_counts.key_frames; | 313 int num_key_frames = stats_.frame_counts.key_frames; |
| 148 int key_frames_permille = | 314 int key_frames_permille = |
| 149 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; | 315 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; |
| 150 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille", | 316 RTC_HISTOGRAMS_COUNTS_1000( |
| 151 key_frames_permille); | 317 idx, |
| 152 LOG(LS_INFO) << "WebRTC.Video.KeyFramesReceivedInPermille " | 318 uma_prefix + ".KeyFramesReceivedInPermille" + uma_experiment_suffix, |
| 153 << key_frames_permille; | 319 key_frames_permille); |
| 320 LOG(LS_INFO) << uma_prefix << ".KeyFramesReceivedInPermille" | |
| 321 << uma_experiment_suffix << " " << key_frames_permille; | |
| 154 } | 322 } |
| 155 | 323 |
| 156 int qp = qp_counters_.vp8.Avg(kMinRequiredSamples); | 324 int qp = qp_counters_.vp8.Avg(kMinRequiredSamples); |
| 157 if (qp != -1) { | 325 if (qp != -1) { |
| 158 RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", qp); | 326 RTC_HISTOGRAMS_COUNTS_200( |
| 159 LOG(LS_INFO) << "WebRTC.Video.Decoded.Vp8.Qp " << qp; | 327 idx, uma_prefix + ".Decoded.Vp8.Qp" + uma_experiment_suffix, qp); |
| 328 LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_experiment_suffix | |
| 329 << " " << qp; | |
| 160 } | 330 } |
| 161 int decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); | 331 int decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); |
| 162 if (decode_ms != -1) { | 332 if (decode_ms != -1) { |
| 163 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms); | 333 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms); |
| 164 LOG(LS_INFO) << "WebRTC.Video.DecodeTimeInMs " << decode_ms; | 334 LOG(LS_INFO) << "WebRTC.Video.DecodeTimeInMs " << decode_ms; |
| 165 } | 335 } |
| 166 int jb_delay_ms = jitter_buffer_delay_counter_.Avg(kMinRequiredSamples); | 336 int jb_delay_ms = jitter_buffer_delay_counter_.Avg(kMinRequiredSamples); |
| 167 if (jb_delay_ms != -1) { | 337 if (jb_delay_ms != -1) { |
| 168 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", | 338 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", |
| 169 jb_delay_ms); | 339 jb_delay_ms); |
| 170 LOG(LS_INFO) << "WebRTC.Video.JitterBufferDelayInMs " << jb_delay_ms; | 340 LOG(LS_INFO) << "WebRTC.Video.JitterBufferDelayInMs " << jb_delay_ms; |
| 171 } | 341 } |
| 172 | 342 |
| 173 int target_delay_ms = target_delay_counter_.Avg(kMinRequiredSamples); | 343 int target_delay_ms = target_delay_counter_.Avg(kMinRequiredSamples); |
| 174 if (target_delay_ms != -1) { | 344 if (target_delay_ms != -1) { |
| 175 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", target_delay_ms); | 345 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", target_delay_ms); |
| 176 LOG(LS_INFO) << "WebRTC.Video.TargetDelayInMs " << target_delay_ms; | 346 LOG(LS_INFO) << "WebRTC.Video.TargetDelayInMs " << target_delay_ms; |
| 177 } | 347 } |
| 178 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); | 348 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); |
| 179 if (current_delay_ms != -1) { | 349 if (current_delay_ms != -1) { |
| 180 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", | 350 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", |
| 181 current_delay_ms); | 351 current_delay_ms); |
| 182 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; | 352 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; |
| 183 } | 353 } |
| 184 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); | 354 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); |
| 185 if (delay_ms != -1) | 355 if (delay_ms != -1) |
| 186 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); | 356 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); |
| 187 | 357 |
| 188 int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples); | |
| 189 if (e2e_delay_ms_video != -1) { | |
| 190 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", | |
| 191 e2e_delay_ms_video); | |
| 192 LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video; | |
| 193 } | |
| 194 | |
| 195 int e2e_delay_ms_screenshare = | |
| 196 e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples); | |
| 197 if (e2e_delay_ms_screenshare != -1) { | |
| 198 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs", | |
| 199 e2e_delay_ms_screenshare); | |
| 200 } | |
| 201 | |
| 202 int e2e_delay_max_ms_video = e2e_delay_max_ms_video_; | |
| 203 if (e2e_delay_max_ms_video != -1) { | |
| 204 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs", | |
| 205 e2e_delay_max_ms_video); | |
| 206 } | |
| 207 | |
| 208 int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_; | |
| 209 if (e2e_delay_max_ms_screenshare != -1) { | |
| 210 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs", | |
| 211 e2e_delay_max_ms_screenshare); | |
| 212 } | |
| 213 | |
| 214 int interframe_delay_ms_screenshare = | |
| 215 interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples); | |
| 216 if (interframe_delay_ms_screenshare != -1) { | |
| 217 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs", | |
| 218 interframe_delay_ms_screenshare); | |
| 219 RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_, | |
| 220 interframe_delay_ms_screenshare); | |
| 221 RTC_HISTOGRAM_COUNTS_10000( | |
| 222 "WebRTC.Video.Screenshare.InterframeDelayMaxInMs", | |
| 223 interframe_delay_max_ms_screenshare_); | |
| 224 } | |
| 225 | |
| 226 int interframe_delay_ms_video = | |
| 227 interframe_delay_counter_video_.Avg(kMinRequiredSamples); | |
| 228 if (interframe_delay_ms_video != -1) { | |
| 229 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs", | |
| 230 interframe_delay_ms_video); | |
| 231 RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video); | |
| 232 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs", | |
| 233 interframe_delay_max_ms_video_); | |
| 234 } | |
| 235 | |
| 236 StreamDataCounters rtp = stats_.rtp_stats; | 358 StreamDataCounters rtp = stats_.rtp_stats; |
| 237 StreamDataCounters rtx; | 359 StreamDataCounters rtx; |
| 238 for (auto it : rtx_stats_) | 360 for (auto it : rtx_stats_) |
| 239 rtx.Add(it.second); | 361 rtx.Add(it.second); |
| 240 StreamDataCounters rtp_rtx = rtp; | 362 StreamDataCounters rtp_rtx = rtp; |
| 241 rtp_rtx.Add(rtx); | 363 rtp_rtx.Add(rtx); |
| 242 int64_t elapsed_sec = | 364 int64_t elapsed_sec = |
| 243 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; | 365 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; |
| 244 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { | 366 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { |
| 245 RTC_HISTOGRAM_COUNTS_10000( | 367 RTC_HISTOGRAM_COUNTS_10000( |
| 246 "WebRTC.Video.BitrateReceivedInKbps", | 368 "WebRTC.Video.BitrateReceivedInKbps", |
| 247 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / | 369 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / |
| 248 1000)); | 370 1000)); |
| 249 RTC_HISTOGRAM_COUNTS_10000( | 371 int media_bitrate_kbs = |
| 250 "WebRTC.Video.MediaBitrateReceivedInKbps", | 372 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000); |
| 251 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000)); | 373 RTC_HISTOGRAMS_COUNTS_10000( |
| 374 idx, uma_prefix + ".MediaBitrateReceivedInKbps" + uma_experiment_suffix, | |
| 375 media_bitrate_kbs); | |
| 376 LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps" | |
| 377 << uma_experiment_suffix << " " << media_bitrate_kbs; | |
| 252 RTC_HISTOGRAM_COUNTS_10000( | 378 RTC_HISTOGRAM_COUNTS_10000( |
| 253 "WebRTC.Video.PaddingBitrateReceivedInKbps", | 379 "WebRTC.Video.PaddingBitrateReceivedInKbps", |
| 254 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / | 380 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / |
| 255 1000)); | 381 1000)); |
| 256 RTC_HISTOGRAM_COUNTS_10000( | 382 RTC_HISTOGRAM_COUNTS_10000( |
| 257 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", | 383 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", |
| 258 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / | 384 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / |
| 259 1000)); | 385 1000)); |
| 260 if (!rtx_stats_.empty()) { | 386 if (!rtx_stats_.empty()) { |
| 261 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", | 387 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 516 } | 642 } |
| 517 if (total_bytes > last_total_bytes) | 643 if (total_bytes > last_total_bytes) |
| 518 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); | 644 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); |
| 519 } | 645 } |
| 520 | 646 |
| 521 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, | 647 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, |
| 522 VideoContentType content_type) { | 648 VideoContentType content_type) { |
| 523 uint64_t now = clock_->TimeInMilliseconds(); | 649 uint64_t now = clock_->TimeInMilliseconds(); |
| 524 | 650 |
| 525 rtc::CritScope lock(&crit_); | 651 rtc::CritScope lock(&crit_); |
| 652 | |
| 653 ContentSpecificStats* content_specific_stats = | |
| 654 &content_specific_stats_[content_type]; | |
| 526 ++stats_.frames_decoded; | 655 ++stats_.frames_decoded; |
| 527 if (qp) { | 656 if (qp) { |
| 528 if (!stats_.qp_sum) { | 657 if (!stats_.qp_sum) { |
| 529 if (stats_.frames_decoded != 1) { | 658 if (stats_.frames_decoded != 1) { |
| 530 LOG(LS_WARNING) | 659 LOG(LS_WARNING) |
| 531 << "Frames decoded was not 1 when first qp value was received."; | 660 << "Frames decoded was not 1 when first qp value was received."; |
| 532 stats_.frames_decoded = 1; | 661 stats_.frames_decoded = 1; |
| 533 } | 662 } |
| 534 stats_.qp_sum = rtc::Optional<uint64_t>(0); | 663 stats_.qp_sum = rtc::Optional<uint64_t>(0); |
| 535 } | 664 } |
| 536 *stats_.qp_sum += *qp; | 665 *stats_.qp_sum += *qp; |
| 666 content_specific_stats->qp_counter.Add(*qp); | |
| 537 } else if (stats_.qp_sum) { | 667 } else if (stats_.qp_sum) { |
| 538 LOG(LS_WARNING) | 668 LOG(LS_WARNING) |
| 539 << "QP sum was already set and no QP was given for a frame."; | 669 << "QP sum was already set and no QP was given for a frame."; |
| 540 stats_.qp_sum = rtc::Optional<uint64_t>(); | 670 stats_.qp_sum = rtc::Optional<uint64_t>(); |
| 541 } | 671 } |
| 542 last_content_type_ = content_type; | 672 last_content_type_ = content_type; |
| 543 decode_fps_estimator_.Update(1, now); | 673 decode_fps_estimator_.Update(1, now); |
| 544 if (last_decoded_frame_time_ms_) { | 674 if (last_decoded_frame_time_ms_) { |
| 545 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; | 675 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; |
| 546 RTC_DCHECK_GE(interframe_delay_ms, 0); | 676 RTC_DCHECK_GE(interframe_delay_ms, 0); |
| 547 stats_.interframe_delay_sum_ms += interframe_delay_ms; | 677 stats_.interframe_delay_sum_ms += interframe_delay_ms; |
| 548 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 678 content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); |
| 549 interframe_delay_counter_screenshare_.Add(interframe_delay_ms); | 679 content_specific_stats->flow_duration_ms += interframe_delay_ms; |
| 550 if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) { | 680 interframe_delay_counter.Add(interframe_delay_ms); |
| 551 interframe_delay_max_ms_screenshare_ = interframe_delay_ms; | |
| 552 } | |
| 553 } else { | |
| 554 interframe_delay_counter_video_.Add(interframe_delay_ms); | |
| 555 if (interframe_delay_max_ms_video_ < interframe_delay_ms) { | |
| 556 interframe_delay_max_ms_video_ = interframe_delay_ms; | |
| 557 } | |
| 558 } | |
| 559 } | 681 } |
| 560 last_decoded_frame_time_ms_.emplace(now); | 682 last_decoded_frame_time_ms_.emplace(now); |
| 561 } | 683 } |
| 562 | 684 |
| 563 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { | 685 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { |
| 564 int width = frame.width(); | 686 int width = frame.width(); |
| 565 int height = frame.height(); | 687 int height = frame.height(); |
| 566 RTC_DCHECK_GT(width, 0); | 688 RTC_DCHECK_GT(width, 0); |
| 567 RTC_DCHECK_GT(height, 0); | 689 RTC_DCHECK_GT(height, 0); |
| 568 uint64_t now = clock_->TimeInMilliseconds(); | 690 uint64_t now = clock_->TimeInMilliseconds(); |
| 569 | |
| 570 rtc::CritScope lock(&crit_); | 691 rtc::CritScope lock(&crit_); |
| 692 ContentSpecificStats* content_specific_stats = | |
| 693 &content_specific_stats_[last_content_type_]; | |
| 571 renders_fps_estimator_.Update(1, now); | 694 renders_fps_estimator_.Update(1, now); |
| 572 ++stats_.frames_rendered; | 695 ++stats_.frames_rendered; |
| 573 stats_.width = width; | 696 stats_.width = width; |
| 574 stats_.height = height; | 697 stats_.height = height; |
| 575 render_width_counter_.Add(width); | 698 render_width_counter_.Add(width); |
| 576 render_height_counter_.Add(height); | 699 render_height_counter_.Add(height); |
| 577 render_fps_tracker_.AddSamples(1); | 700 render_fps_tracker_.AddSamples(1); |
| 578 render_pixel_tracker_.AddSamples(sqrt(width * height)); | 701 render_pixel_tracker_.AddSamples(sqrt(width * height)); |
| 702 content_specific_stats->received_width.Add(width); | |
| 703 content_specific_stats->received_height.Add(height); | |
| 579 | 704 |
| 580 if (frame.ntp_time_ms() > 0) { | 705 if (frame.ntp_time_ms() > 0) { |
| 581 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); | 706 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); |
| 582 if (delay_ms >= 0) { | 707 if (delay_ms >= 0) { |
| 583 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 708 content_specific_stats->e2e_delay_counter.Add(delay_ms); |
| 584 e2e_delay_max_ms_screenshare_ = | 709 e2e_delay_counter.Add(delay_ms); |
| 585 std::max(delay_ms, e2e_delay_max_ms_screenshare_); | |
| 586 e2e_delay_counter_screenshare_.Add(delay_ms); | |
| 587 } else { | |
| 588 e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_); | |
| 589 e2e_delay_counter_video_.Add(delay_ms); | |
| 590 } | |
| 591 } | 710 } |
| 592 } | 711 } |
| 593 } | 712 } |
| 594 | 713 |
| 595 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, | 714 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, |
| 596 double estimated_freq_khz) { | 715 double estimated_freq_khz) { |
| 597 rtc::CritScope lock(&crit_); | 716 rtc::CritScope lock(&crit_); |
| 598 sync_offset_counter_.Add(std::abs(sync_offset_ms)); | 717 sync_offset_counter_.Add(std::abs(sync_offset_ms)); |
| 599 stats_.sync_offset_ms = sync_offset_ms; | 718 stats_.sync_offset_ms = sync_offset_ms; |
| 600 | 719 |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 612 } | 731 } |
| 613 | 732 |
| 614 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, | 733 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, |
| 615 size_t size_bytes) { | 734 size_t size_bytes) { |
| 616 rtc::CritScope lock(&crit_); | 735 rtc::CritScope lock(&crit_); |
| 617 if (is_keyframe) | 736 if (is_keyframe) |
| 618 ++stats_.frame_counts.key_frames; | 737 ++stats_.frame_counts.key_frames; |
| 619 else | 738 else |
| 620 ++stats_.frame_counts.delta_frames; | 739 ++stats_.frame_counts.delta_frames; |
| 621 | 740 |
| 741 // Todo(ilnik): May be incorrect for the first frame of a new content type | |
| 742 // stream. Leads to a bit incorrect |total_media_bytes| count. | |
| 743 ContentSpecificStats* content_specific_stats = | |
| 744 &content_specific_stats_[last_content_type_]; | |
| 745 | |
| 746 content_specific_stats->total_media_bytes += size_bytes; | |
| 747 | |
| 622 int64_t now_ms = clock_->TimeInMilliseconds(); | 748 int64_t now_ms = clock_->TimeInMilliseconds(); |
| 623 frame_window_.insert(std::make_pair(now_ms, size_bytes)); | 749 frame_window_.insert(std::make_pair(now_ms, size_bytes)); |
| 624 UpdateFramerate(now_ms); | 750 UpdateFramerate(now_ms); |
| 625 } | 751 } |
| 626 | 752 |
| 627 void ReceiveStatisticsProxy::OnFrameCountsUpdated( | 753 void ReceiveStatisticsProxy::OnFrameCountsUpdated( |
| 628 const FrameCounts& frame_counts) { | 754 const FrameCounts& frame_counts) { |
| 629 rtc::CritScope lock(&crit_); | 755 rtc::CritScope lock(&crit_); |
| 630 stats_.frame_counts = frame_counts; | 756 stats_.frame_counts = frame_counts; |
| 631 } | 757 } |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 652 // TODO(sprang): Figure out any other state that should be reset. | 778 // TODO(sprang): Figure out any other state that should be reset. |
| 653 | 779 |
| 654 rtc::CritScope lock(&crit_); | 780 rtc::CritScope lock(&crit_); |
| 655 // Don't report inter-frame delay if stream was paused. | 781 // Don't report inter-frame delay if stream was paused. |
| 656 last_decoded_frame_time_ms_.reset(); | 782 last_decoded_frame_time_ms_.reset(); |
| 657 } | 783 } |
| 658 | 784 |
| 659 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { | 785 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { |
| 660 sum += sample; | 786 sum += sample; |
| 661 ++num_samples; | 787 ++num_samples; |
| 788 if (!max || *max < sample) { | |
| 789 max.emplace(sample); | |
| 790 } | |
| 662 } | 791 } |
| 663 | 792 |
| 664 int ReceiveStatisticsProxy::SampleCounter::Avg( | 793 int ReceiveStatisticsProxy::SampleCounter::Avg( |
| 665 int64_t min_required_samples) const { | 794 int64_t min_required_samples) const { |
| 666 if (num_samples < min_required_samples || num_samples == 0) | 795 if (num_samples < min_required_samples || num_samples == 0) |
| 667 return -1; | 796 return -1; |
| 668 return static_cast<int>(sum / num_samples); | 797 return static_cast<int>(sum / num_samples); |
| 669 } | 798 } |
| 670 | 799 |
| 800 int ReceiveStatisticsProxy::SampleCounter::Max() const { | |
| 801 if (!max) { | |
| 802 return -1; | |
| 803 } else { | |
| 804 return *max; | |
| 805 } | |
| 806 } | |
| 807 | |
| 671 void ReceiveStatisticsProxy::SampleCounter::Reset() { | 808 void ReceiveStatisticsProxy::SampleCounter::Reset() { |
| 672 num_samples = 0; | 809 num_samples = 0; |
| 673 sum = 0; | 810 sum = 0; |
| 811 max.reset(); | |
| 674 } | 812 } |
| 675 | 813 |
| 676 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, | 814 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, |
| 677 int64_t max_rtt_ms) { | 815 int64_t max_rtt_ms) { |
| 678 rtc::CritScope lock(&crit_); | 816 rtc::CritScope lock(&crit_); |
| 679 avg_rtt_ms_ = avg_rtt_ms; | 817 avg_rtt_ms_ = avg_rtt_ms; |
| 680 } | 818 } |
| 681 | 819 |
| 682 } // namespace webrtc | 820 } // namespace webrtc |
| OLD | NEW |