OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/video/receive_statistics_proxy.h" | 11 #include "webrtc/video/receive_statistics_proxy.h" |
12 | 12 |
13 #include <algorithm> | 13 #include <algorithm> |
14 #include <cmath> | 14 #include <cmath> |
| 15 #include <sstream> |
15 #include <utility> | 16 #include <utility> |
16 | 17 |
| 18 #include "webrtc/modules/pacing/alr_detector.h" |
17 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 19 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
18 #include "webrtc/rtc_base/checks.h" | 20 #include "webrtc/rtc_base/checks.h" |
19 #include "webrtc/rtc_base/logging.h" | 21 #include "webrtc/rtc_base/logging.h" |
20 #include "webrtc/system_wrappers/include/clock.h" | 22 #include "webrtc/system_wrappers/include/clock.h" |
21 #include "webrtc/system_wrappers/include/field_trial.h" | 23 #include "webrtc/system_wrappers/include/field_trial.h" |
22 #include "webrtc/system_wrappers/include/metrics.h" | 24 #include "webrtc/system_wrappers/include/metrics.h" |
23 | 25 |
24 namespace webrtc { | 26 namespace webrtc { |
25 namespace { | 27 namespace { |
26 // Periodic time interval for processing samples for |freq_offset_counter_|. | 28 // Periodic time interval for processing samples for |freq_offset_counter_|. |
(...skipping 14 matching lines...) Expand all Loading... |
41 const int kLowQpThresholdVp8 = 60; | 43 const int kLowQpThresholdVp8 = 60; |
42 const int kHighQpThresholdVp8 = 70; | 44 const int kHighQpThresholdVp8 = 70; |
43 const int kLowVarianceThreshold = 1; | 45 const int kLowVarianceThreshold = 1; |
44 const int kHighVarianceThreshold = 2; | 46 const int kHighVarianceThreshold = 2; |
45 | 47 |
46 // Some metrics are reported as a maximum over this period. | 48 // Some metrics are reported as a maximum over this period. |
47 const int kMovingMaxWindowMs = 10000; | 49 const int kMovingMaxWindowMs = 10000; |
48 | 50 |
49 // How large window we use to calculate the framerate/bitrate. | 51 // How large window we use to calculate the framerate/bitrate. |
50 const int kRateStatisticsWindowSizeMs = 1000; | 52 const int kRateStatisticsWindowSizeMs = 1000; |
| 53 |
| 54 std::string UmaPrefixForContentType(VideoContentType content_type) { |
| 55 std::stringstream ss; |
| 56 ss << "WebRTC.Video"; |
| 57 if (content_type.IsScreenshare()) { |
| 58 ss << ".Screenshare"; |
| 59 } |
| 60 return ss.str(); |
| 61 } |
| 62 |
| 63 std::string UmaSuffixForContentType(VideoContentType content_type) { |
| 64 std::stringstream ss; |
| 65 int simulcast_id = content_type.GetSimulcastId(); |
| 66 if (simulcast_id > 0) { |
| 67 ss << ".S" << simulcast_id - 1; |
| 68 } |
| 69 int experiment_id = content_type.GetExperimentId(); |
| 70 if (experiment_id > 0) { |
| 71 ss << ".ExperimentGroup" << experiment_id - 1; |
| 72 } |
| 73 return ss.str(); |
| 74 } |
51 } // namespace | 75 } // namespace |
52 | 76 |
53 ReceiveStatisticsProxy::ReceiveStatisticsProxy( | 77 ReceiveStatisticsProxy::ReceiveStatisticsProxy( |
54 const VideoReceiveStream::Config* config, | 78 const VideoReceiveStream::Config* config, |
55 Clock* clock) | 79 Clock* clock) |
56 : clock_(clock), | 80 : clock_(clock), |
57 config_(*config), | 81 config_(*config), |
58 start_ms_(clock->TimeInMilliseconds()), | 82 start_ms_(clock->TimeInMilliseconds()), |
59 last_sample_time_(clock->TimeInMilliseconds()), | 83 last_sample_time_(clock->TimeInMilliseconds()), |
60 fps_threshold_(kLowFpsThreshold, | 84 fps_threshold_(kLowFpsThreshold, |
61 kHighFpsThreshold, | 85 kHighFpsThreshold, |
62 kBadFraction, | 86 kBadFraction, |
63 kNumMeasurements), | 87 kNumMeasurements), |
64 qp_threshold_(kLowQpThresholdVp8, | 88 qp_threshold_(kLowQpThresholdVp8, |
65 kHighQpThresholdVp8, | 89 kHighQpThresholdVp8, |
66 kBadFraction, | 90 kBadFraction, |
67 kNumMeasurements), | 91 kNumMeasurements), |
68 variance_threshold_(kLowVarianceThreshold, | 92 variance_threshold_(kLowVarianceThreshold, |
69 kHighVarianceThreshold, | 93 kHighVarianceThreshold, |
70 kBadFraction, | 94 kBadFraction, |
71 kNumMeasurementsVariance), | 95 kNumMeasurementsVariance), |
72 num_bad_states_(0), | 96 num_bad_states_(0), |
73 num_certain_states_(0), | 97 num_certain_states_(0), |
74 // 1000ms window, scale 1000 for ms to s. | 98 // 1000ms window, scale 1000 for ms to s. |
75 decode_fps_estimator_(1000, 1000), | 99 decode_fps_estimator_(1000, 1000), |
76 renders_fps_estimator_(1000, 1000), | 100 renders_fps_estimator_(1000, 1000), |
77 render_fps_tracker_(100, 10u), | 101 render_fps_tracker_(100, 10u), |
78 render_pixel_tracker_(100, 10u), | 102 render_pixel_tracker_(100, 10u), |
79 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count | 103 total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count |
80 e2e_delay_max_ms_video_(-1), | |
81 e2e_delay_max_ms_screenshare_(-1), | |
82 interframe_delay_max_ms_video_(-1), | |
83 interframe_delay_max_ms_screenshare_(-1), | |
84 interframe_delay_max_moving_(kMovingMaxWindowMs), | 104 interframe_delay_max_moving_(kMovingMaxWindowMs), |
85 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), | 105 freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), |
86 first_report_block_time_ms_(-1), | 106 first_report_block_time_ms_(-1), |
87 avg_rtt_ms_(0), | 107 avg_rtt_ms_(0), |
88 last_content_type_(VideoContentType::UNSPECIFIED) { | 108 last_content_type_(VideoContentType::UNSPECIFIED) { |
89 stats_.ssrc = config_.rtp.remote_ssrc; | 109 stats_.ssrc = config_.rtp.remote_ssrc; |
90 // TODO(brandtr): Replace |rtx_stats_| with a single instance of | 110 // TODO(brandtr): Replace |rtx_stats_| with a single instance of |
91 // StreamDataCounters. | 111 // StreamDataCounters. |
92 if (config_.rtp.rtx_ssrc) { | 112 if (config_.rtp.rtx_ssrc) { |
93 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); | 113 rtx_stats_[config_.rtp.rtx_ssrc] = StreamDataCounters(); |
94 } | 114 } |
95 } | 115 } |
96 | 116 |
97 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { | 117 ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { |
98 UpdateHistograms(); | 118 UpdateHistograms(); |
99 } | 119 } |
100 | 120 |
101 void ReceiveStatisticsProxy::UpdateHistograms() { | 121 void ReceiveStatisticsProxy::UpdateHistograms() { |
102 RTC_HISTOGRAM_COUNTS_100000( | 122 int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; |
103 "WebRTC.Video.ReceiveStreamLifetimeInSeconds", | 123 if (stats_.frame_counts.key_frames > 0 || |
104 (clock_->TimeInMilliseconds() - start_ms_) / 1000); | 124 stats_.frame_counts.delta_frames > 0) { |
| 125 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds", |
| 126 stream_duration_sec); |
| 127 LOG(LS_INFO) << "WebRTC.Video.ReceiveStreamLifetimeInSeconds " |
| 128 << stream_duration_sec; |
| 129 } |
105 | 130 |
106 if (first_report_block_time_ms_ != -1 && | 131 if (first_report_block_time_ms_ != -1 && |
107 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= | 132 ((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >= |
108 metrics::kMinRunTimeInSeconds) { | 133 metrics::kMinRunTimeInSeconds) { |
109 int fraction_lost = report_block_stats_.FractionLostInPercent(); | 134 int fraction_lost = report_block_stats_.FractionLostInPercent(); |
110 if (fraction_lost != -1) { | 135 if (fraction_lost != -1) { |
111 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", | 136 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", |
112 fraction_lost); | 137 fraction_lost); |
113 LOG(LS_INFO) << "WebRTC.Video.ReceivedPacketsLostInPercent " | 138 LOG(LS_INFO) << "WebRTC.Video.ReceivedPacketsLostInPercent " |
114 << fraction_lost; | 139 << fraction_lost; |
115 } | 140 } |
116 } | 141 } |
117 | 142 |
118 const int kMinRequiredSamples = 200; | 143 const int kMinRequiredSamples = 200; |
119 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); | 144 int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount()); |
120 if (samples >= kMinRequiredSamples) { | 145 if (samples >= kMinRequiredSamples) { |
121 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", | 146 RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", |
122 round(render_fps_tracker_.ComputeTotalRate())); | 147 round(render_fps_tracker_.ComputeTotalRate())); |
123 RTC_HISTOGRAM_COUNTS_100000( | 148 RTC_HISTOGRAM_COUNTS_100000( |
124 "WebRTC.Video.RenderSqrtPixelsPerSecond", | 149 "WebRTC.Video.RenderSqrtPixelsPerSecond", |
125 round(render_pixel_tracker_.ComputeTotalRate())); | 150 round(render_pixel_tracker_.ComputeTotalRate())); |
126 } | 151 } |
127 int width = render_width_counter_.Avg(kMinRequiredSamples); | 152 |
128 int height = render_height_counter_.Avg(kMinRequiredSamples); | |
129 if (width != -1) { | |
130 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width); | |
131 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height); | |
132 LOG(LS_INFO) << "WebRTC.Video.ReceivedWidthInPixels " << width; | |
133 LOG(LS_INFO) << "WebRTC.Video.ReceivedHeightInPixels " << height; | |
134 } | |
135 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); | 153 int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); |
136 if (sync_offset_ms != -1) { | 154 if (sync_offset_ms != -1) { |
137 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); | 155 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms); |
138 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; | 156 LOG(LS_INFO) << "WebRTC.Video.AVSyncOffsetInMs " << sync_offset_ms; |
139 } | 157 } |
140 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); | 158 AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); |
141 if (freq_offset_stats.num_samples > 0) { | 159 if (freq_offset_stats.num_samples > 0) { |
142 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", | 160 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", |
143 freq_offset_stats.average); | 161 freq_offset_stats.average); |
144 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " | 162 LOG(LS_INFO) << "WebRTC.Video.RtpToNtpFreqOffsetInKhz, " |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
182 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); | 200 int current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); |
183 if (current_delay_ms != -1) { | 201 if (current_delay_ms != -1) { |
184 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", | 202 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", |
185 current_delay_ms); | 203 current_delay_ms); |
186 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; | 204 LOG(LS_INFO) << "WebRTC.Video.CurrentDelayInMs " << current_delay_ms; |
187 } | 205 } |
188 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); | 206 int delay_ms = delay_counter_.Avg(kMinRequiredSamples); |
189 if (delay_ms != -1) | 207 if (delay_ms != -1) |
190 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); | 208 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms); |
191 | 209 |
192 int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples); | 210 // Aggregate content_specific_stats_ by removing experiment or simulcast |
193 if (e2e_delay_ms_video != -1) { | 211 // information; |
194 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", | 212 std::map<VideoContentType, ContentSpecificStats> aggregated_stats; |
195 e2e_delay_ms_video); | 213 for (auto it : content_specific_stats_) { |
196 LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video; | 214 // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). |
| 215 VideoContentType content_type = it.first; |
| 216 if (content_type.GetSimulcastId() > 0) { |
| 217 // Aggregate on experiment id. |
| 218 content_type.SetExperimentId(0); |
| 219 aggregated_stats[content_type].Add(it.second); |
| 220 } |
| 221 // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes). |
| 222 content_type = it.first; |
| 223 if (content_type.GetExperimentId() > 0) { |
| 224 // Aggregate on simulcast id. |
| 225 content_type.SetSimulcastId(0); |
| 226 aggregated_stats[content_type].Add(it.second); |
| 227 } |
| 228 // Calculate aggregated metrics (no suffixes. Aggregated on everything). |
| 229 content_type = it.first; |
| 230 content_type.SetSimulcastId(0); |
| 231 content_type.SetExperimentId(0); |
| 232 aggregated_stats[content_type].Add(it.second); |
197 } | 233 } |
198 | 234 |
199 int e2e_delay_ms_screenshare = | 235 for (auto it : aggregated_stats) { |
200 e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples); | 236 // For the metric Foo we report the following slices: |
201 if (e2e_delay_ms_screenshare != -1) { | 237 // WebRTC.Video.Foo, |
202 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs", | 238 // WebRTC.Video.Screenshare.Foo, |
203 e2e_delay_ms_screenshare); | 239 // WebRTC.Video.Foo.S[0-3], |
204 } | 240 // WebRTC.Video.Foo.ExperimentGroup[0-7], |
| 241 // WebRTC.Video.Screenshare.Foo.S[0-3], |
| 242 // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. |
| 243 auto content_type = it.first; |
| 244 auto stats = it.second; |
| 245 std::string uma_prefix = UmaPrefixForContentType(content_type); |
| 246 std::string uma_suffix = UmaSuffixForContentType(content_type); |
| 247 // Metrics can be sliced on either simulcast id or experiment id but not |
| 248 // both. |
| 249 RTC_DCHECK(content_type.GetExperimentId() == 0 || |
| 250 content_type.GetSimulcastId() == 0); |
205 | 251 |
206 int e2e_delay_max_ms_video = e2e_delay_max_ms_video_; | 252 int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); |
207 if (e2e_delay_max_ms_video != -1) { | 253 if (e2e_delay_ms != -1) { |
208 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs", | 254 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
209 e2e_delay_max_ms_video); | 255 uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms); |
210 } | 256 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " |
| 257 << e2e_delay_ms; |
| 258 } |
| 259 int e2e_delay_max_ms = stats.e2e_delay_counter.Max(); |
| 260 if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) { |
| 261 RTC_HISTOGRAM_COUNTS_SPARSE_100000( |
| 262 uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, e2e_delay_max_ms); |
| 263 LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " |
| 264 << e2e_delay_max_ms; |
| 265 } |
| 266 int interframe_delay_ms = |
| 267 stats.interframe_delay_counter.Avg(kMinRequiredSamples); |
| 268 if (interframe_delay_ms != -1) { |
| 269 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 270 uma_prefix + ".InterframeDelayInMs" + uma_suffix, |
| 271 interframe_delay_ms); |
| 272 LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " |
| 273 << interframe_delay_ms; |
| 274 } |
| 275 int interframe_delay_max_ms = stats.interframe_delay_counter.Max(); |
| 276 if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) { |
| 277 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 278 uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, |
| 279 interframe_delay_max_ms); |
| 280 LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix |
| 281 << " " << interframe_delay_max_ms; |
| 282 } |
211 | 283 |
212 int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_; | 284 int width = stats.received_width.Avg(kMinRequiredSamples); |
213 if (e2e_delay_max_ms_screenshare != -1) { | 285 if (width != -1) { |
214 RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs", | 286 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
215 e2e_delay_max_ms_screenshare); | 287 uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, width); |
216 } | 288 LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix |
| 289 << " " << width; |
| 290 } |
217 | 291 |
218 int interframe_delay_ms_screenshare = | 292 int height = stats.received_height.Avg(kMinRequiredSamples); |
219 interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples); | 293 if (height != -1) { |
220 if (interframe_delay_ms_screenshare != -1) { | 294 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
221 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs", | 295 uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, height); |
222 interframe_delay_ms_screenshare); | 296 LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix |
223 RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_, | 297 << " " << height; |
224 interframe_delay_ms_screenshare); | 298 } |
225 RTC_HISTOGRAM_COUNTS_10000( | |
226 "WebRTC.Video.Screenshare.InterframeDelayMaxInMs", | |
227 interframe_delay_max_ms_screenshare_); | |
228 } | |
229 | 299 |
230 int interframe_delay_ms_video = | 300 if (content_type != VideoContentType::UNSPECIFIED) { |
231 interframe_delay_counter_video_.Avg(kMinRequiredSamples); | 301 // Don't report these 3 metrics unsliced, as more precise variants |
232 if (interframe_delay_ms_video != -1) { | 302 // are reported separately in this method. |
233 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs", | 303 float flow_duration_sec = stats.flow_duration_ms / 1000.0; |
234 interframe_delay_ms_video); | 304 if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { |
235 RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video); | 305 int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 / |
236 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs", | 306 flow_duration_sec / 1000); |
237 interframe_delay_max_ms_video_); | 307 RTC_HISTOGRAM_COUNTS_SPARSE_10000( |
| 308 uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, |
| 309 media_bitrate_kbps); |
| 310 LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps" |
| 311 << uma_suffix << " " << media_bitrate_kbps; |
| 312 } |
| 313 |
| 314 int num_total_frames = |
| 315 stats.frame_counts.key_frames + stats.frame_counts.delta_frames; |
| 316 if (num_total_frames >= kMinRequiredSamples) { |
| 317 int num_key_frames = stats.frame_counts.key_frames; |
| 318 int key_frames_permille = |
| 319 (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; |
| 320 RTC_HISTOGRAM_COUNTS_SPARSE_1000( |
| 321 uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, |
| 322 key_frames_permille); |
| 323 LOG(LS_INFO) << uma_prefix << ".KeyFramesReceivedInPermille" |
| 324 << uma_suffix << " " << key_frames_permille; |
| 325 } |
| 326 |
| 327 int qp = stats.qp_counter.Avg(kMinRequiredSamples); |
| 328 if (qp != -1) { |
| 329 RTC_HISTOGRAM_COUNTS_SPARSE_200( |
| 330 uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, qp); |
| 331 LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " |
| 332 << qp; |
| 333 } |
| 334 } |
238 } | 335 } |
239 | 336 |
240 StreamDataCounters rtp = stats_.rtp_stats; | 337 StreamDataCounters rtp = stats_.rtp_stats; |
241 StreamDataCounters rtx; | 338 StreamDataCounters rtx; |
242 for (auto it : rtx_stats_) | 339 for (auto it : rtx_stats_) |
243 rtx.Add(it.second); | 340 rtx.Add(it.second); |
244 StreamDataCounters rtp_rtx = rtp; | 341 StreamDataCounters rtp_rtx = rtp; |
245 rtp_rtx.Add(rtx); | 342 rtp_rtx.Add(rtx); |
246 int64_t elapsed_sec = | 343 int64_t elapsed_sec = |
247 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; | 344 rtp_rtx.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / 1000; |
248 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { | 345 if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { |
249 RTC_HISTOGRAM_COUNTS_10000( | 346 RTC_HISTOGRAM_COUNTS_10000( |
250 "WebRTC.Video.BitrateReceivedInKbps", | 347 "WebRTC.Video.BitrateReceivedInKbps", |
251 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / | 348 static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec / |
252 1000)); | 349 1000)); |
253 RTC_HISTOGRAM_COUNTS_10000( | 350 int media_bitrate_kbs = |
254 "WebRTC.Video.MediaBitrateReceivedInKbps", | 351 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000); |
255 static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000)); | 352 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps", |
| 353 media_bitrate_kbs); |
| 354 LOG(LS_INFO) << "WebRTC.Video.MediaBitrateReceivedInKbps " |
| 355 << media_bitrate_kbs; |
256 RTC_HISTOGRAM_COUNTS_10000( | 356 RTC_HISTOGRAM_COUNTS_10000( |
257 "WebRTC.Video.PaddingBitrateReceivedInKbps", | 357 "WebRTC.Video.PaddingBitrateReceivedInKbps", |
258 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / | 358 static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec / |
259 1000)); | 359 1000)); |
260 RTC_HISTOGRAM_COUNTS_10000( | 360 RTC_HISTOGRAM_COUNTS_10000( |
261 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", | 361 "WebRTC.Video.RetransmittedBitrateReceivedInKbps", |
262 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / | 362 static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / |
263 1000)); | 363 1000)); |
264 if (!rtx_stats_.empty()) { | 364 if (!rtx_stats_.empty()) { |
265 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", | 365 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtxBitrateReceivedInKbps", |
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
522 } | 622 } |
523 if (total_bytes > last_total_bytes) | 623 if (total_bytes > last_total_bytes) |
524 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); | 624 total_byte_tracker_.AddSamples(total_bytes - last_total_bytes); |
525 } | 625 } |
526 | 626 |
527 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, | 627 void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp, |
528 VideoContentType content_type) { | 628 VideoContentType content_type) { |
529 uint64_t now = clock_->TimeInMilliseconds(); | 629 uint64_t now = clock_->TimeInMilliseconds(); |
530 | 630 |
531 rtc::CritScope lock(&crit_); | 631 rtc::CritScope lock(&crit_); |
| 632 |
| 633 ContentSpecificStats* content_specific_stats = |
| 634 &content_specific_stats_[content_type]; |
532 ++stats_.frames_decoded; | 635 ++stats_.frames_decoded; |
533 if (qp) { | 636 if (qp) { |
534 if (!stats_.qp_sum) { | 637 if (!stats_.qp_sum) { |
535 if (stats_.frames_decoded != 1) { | 638 if (stats_.frames_decoded != 1) { |
536 LOG(LS_WARNING) | 639 LOG(LS_WARNING) |
537 << "Frames decoded was not 1 when first qp value was received."; | 640 << "Frames decoded was not 1 when first qp value was received."; |
538 stats_.frames_decoded = 1; | 641 stats_.frames_decoded = 1; |
539 } | 642 } |
540 stats_.qp_sum = rtc::Optional<uint64_t>(0); | 643 stats_.qp_sum = rtc::Optional<uint64_t>(0); |
541 } | 644 } |
542 *stats_.qp_sum += *qp; | 645 *stats_.qp_sum += *qp; |
| 646 content_specific_stats->qp_counter.Add(*qp); |
543 } else if (stats_.qp_sum) { | 647 } else if (stats_.qp_sum) { |
544 LOG(LS_WARNING) | 648 LOG(LS_WARNING) |
545 << "QP sum was already set and no QP was given for a frame."; | 649 << "QP sum was already set and no QP was given for a frame."; |
546 stats_.qp_sum = rtc::Optional<uint64_t>(); | 650 stats_.qp_sum = rtc::Optional<uint64_t>(); |
547 } | 651 } |
548 last_content_type_ = content_type; | 652 last_content_type_ = content_type; |
549 decode_fps_estimator_.Update(1, now); | 653 decode_fps_estimator_.Update(1, now); |
550 if (last_decoded_frame_time_ms_) { | 654 if (last_decoded_frame_time_ms_) { |
551 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; | 655 int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_; |
552 RTC_DCHECK_GE(interframe_delay_ms, 0); | 656 RTC_DCHECK_GE(interframe_delay_ms, 0); |
553 interframe_delay_max_moving_.Add(interframe_delay_ms, now); | 657 interframe_delay_max_moving_.Add(interframe_delay_ms, now); |
554 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 658 content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); |
555 interframe_delay_counter_screenshare_.Add(interframe_delay_ms); | 659 content_specific_stats->flow_duration_ms += interframe_delay_ms; |
556 if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) { | |
557 interframe_delay_max_ms_screenshare_ = interframe_delay_ms; | |
558 } | |
559 } else { | |
560 interframe_delay_counter_video_.Add(interframe_delay_ms); | |
561 if (interframe_delay_max_ms_video_ < interframe_delay_ms) { | |
562 interframe_delay_max_ms_video_ = interframe_delay_ms; | |
563 } | |
564 } | |
565 } | 660 } |
566 last_decoded_frame_time_ms_.emplace(now); | 661 last_decoded_frame_time_ms_.emplace(now); |
567 } | 662 } |
568 | 663 |
569 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { | 664 void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { |
570 int width = frame.width(); | 665 int width = frame.width(); |
571 int height = frame.height(); | 666 int height = frame.height(); |
572 RTC_DCHECK_GT(width, 0); | 667 RTC_DCHECK_GT(width, 0); |
573 RTC_DCHECK_GT(height, 0); | 668 RTC_DCHECK_GT(height, 0); |
574 uint64_t now = clock_->TimeInMilliseconds(); | 669 uint64_t now = clock_->TimeInMilliseconds(); |
575 | |
576 rtc::CritScope lock(&crit_); | 670 rtc::CritScope lock(&crit_); |
| 671 ContentSpecificStats* content_specific_stats = |
| 672 &content_specific_stats_[last_content_type_]; |
577 renders_fps_estimator_.Update(1, now); | 673 renders_fps_estimator_.Update(1, now); |
578 ++stats_.frames_rendered; | 674 ++stats_.frames_rendered; |
579 stats_.width = width; | 675 stats_.width = width; |
580 stats_.height = height; | 676 stats_.height = height; |
581 render_width_counter_.Add(width); | |
582 render_height_counter_.Add(height); | |
583 render_fps_tracker_.AddSamples(1); | 677 render_fps_tracker_.AddSamples(1); |
584 render_pixel_tracker_.AddSamples(sqrt(width * height)); | 678 render_pixel_tracker_.AddSamples(sqrt(width * height)); |
| 679 content_specific_stats->received_width.Add(width); |
| 680 content_specific_stats->received_height.Add(height); |
585 | 681 |
586 if (frame.ntp_time_ms() > 0) { | 682 if (frame.ntp_time_ms() > 0) { |
587 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); | 683 int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); |
588 if (delay_ms >= 0) { | 684 if (delay_ms >= 0) { |
589 if (last_content_type_ == VideoContentType::SCREENSHARE) { | 685 content_specific_stats->e2e_delay_counter.Add(delay_ms); |
590 e2e_delay_max_ms_screenshare_ = | |
591 std::max(delay_ms, e2e_delay_max_ms_screenshare_); | |
592 e2e_delay_counter_screenshare_.Add(delay_ms); | |
593 } else { | |
594 e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_); | |
595 e2e_delay_counter_video_.Add(delay_ms); | |
596 } | |
597 } | 686 } |
598 } | 687 } |
599 } | 688 } |
600 | 689 |
601 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, | 690 void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms, |
602 double estimated_freq_khz) { | 691 double estimated_freq_khz) { |
603 rtc::CritScope lock(&crit_); | 692 rtc::CritScope lock(&crit_); |
604 sync_offset_counter_.Add(std::abs(sync_offset_ms)); | 693 sync_offset_counter_.Add(std::abs(sync_offset_ms)); |
605 stats_.sync_offset_ms = sync_offset_ms; | 694 stats_.sync_offset_ms = sync_offset_ms; |
606 | 695 |
607 const double kMaxFreqKhz = 10000.0; | 696 const double kMaxFreqKhz = 10000.0; |
608 int offset_khz = kMaxFreqKhz; | 697 int offset_khz = kMaxFreqKhz; |
609 // Should not be zero or negative. If so, report max. | 698 // Should not be zero or negative. If so, report max. |
610 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) | 699 if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) |
611 offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5); | 700 offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5); |
612 | 701 |
613 freq_offset_counter_.Add(offset_khz); | 702 freq_offset_counter_.Add(offset_khz); |
614 } | 703 } |
615 | 704 |
616 void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate, | 705 void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate, |
617 uint32_t frameRate) { | 706 uint32_t frameRate) { |
618 } | 707 } |
619 | 708 |
620 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, | 709 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, |
621 size_t size_bytes) { | 710 size_t size_bytes, |
| 711 VideoContentType content_type) { |
622 rtc::CritScope lock(&crit_); | 712 rtc::CritScope lock(&crit_); |
623 if (is_keyframe) | 713 if (is_keyframe) { |
624 ++stats_.frame_counts.key_frames; | 714 ++stats_.frame_counts.key_frames; |
625 else | 715 } else { |
626 ++stats_.frame_counts.delta_frames; | 716 ++stats_.frame_counts.delta_frames; |
| 717 } |
| 718 |
| 719 ContentSpecificStats* content_specific_stats = |
| 720 &content_specific_stats_[content_type]; |
| 721 |
| 722 content_specific_stats->total_media_bytes += size_bytes; |
| 723 if (is_keyframe) { |
| 724 ++content_specific_stats->frame_counts.key_frames; |
| 725 } else { |
| 726 ++content_specific_stats->frame_counts.delta_frames; |
| 727 } |
627 | 728 |
628 int64_t now_ms = clock_->TimeInMilliseconds(); | 729 int64_t now_ms = clock_->TimeInMilliseconds(); |
629 frame_window_.insert(std::make_pair(now_ms, size_bytes)); | 730 frame_window_.insert(std::make_pair(now_ms, size_bytes)); |
630 UpdateFramerate(now_ms); | 731 UpdateFramerate(now_ms); |
631 } | 732 } |
632 | 733 |
633 void ReceiveStatisticsProxy::OnFrameCountsUpdated( | 734 void ReceiveStatisticsProxy::OnFrameCountsUpdated( |
634 const FrameCounts& frame_counts) { | 735 const FrameCounts& frame_counts) { |
635 rtc::CritScope lock(&crit_); | 736 rtc::CritScope lock(&crit_); |
636 stats_.frame_counts = frame_counts; | 737 stats_.frame_counts = frame_counts; |
(...skipping 21 matching lines...) Expand all Loading... |
658 // TODO(sprang): Figure out any other state that should be reset. | 759 // TODO(sprang): Figure out any other state that should be reset. |
659 | 760 |
660 rtc::CritScope lock(&crit_); | 761 rtc::CritScope lock(&crit_); |
661 // Don't report inter-frame delay if stream was paused. | 762 // Don't report inter-frame delay if stream was paused. |
662 last_decoded_frame_time_ms_.reset(); | 763 last_decoded_frame_time_ms_.reset(); |
663 } | 764 } |
664 | 765 |
665 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { | 766 void ReceiveStatisticsProxy::SampleCounter::Add(int sample) { |
666 sum += sample; | 767 sum += sample; |
667 ++num_samples; | 768 ++num_samples; |
| 769 if (!max || sample > *max) { |
| 770 max.emplace(sample); |
| 771 } |
| 772 } |
| 773 |
| 774 void ReceiveStatisticsProxy::SampleCounter::Add(const SampleCounter& other) { |
| 775 sum += other.sum; |
| 776 num_samples += other.num_samples; |
| 777 if (other.max && (!max || *max < *other.max)) |
| 778 max = other.max; |
668 } | 779 } |
669 | 780 |
670 int ReceiveStatisticsProxy::SampleCounter::Avg( | 781 int ReceiveStatisticsProxy::SampleCounter::Avg( |
671 int64_t min_required_samples) const { | 782 int64_t min_required_samples) const { |
672 if (num_samples < min_required_samples || num_samples == 0) | 783 if (num_samples < min_required_samples || num_samples == 0) |
673 return -1; | 784 return -1; |
674 return static_cast<int>(sum / num_samples); | 785 return static_cast<int>(sum / num_samples); |
675 } | 786 } |
676 | 787 |
| 788 int ReceiveStatisticsProxy::SampleCounter::Max() const { |
| 789 return max.value_or(-1); |
| 790 } |
| 791 |
677 void ReceiveStatisticsProxy::SampleCounter::Reset() { | 792 void ReceiveStatisticsProxy::SampleCounter::Reset() { |
678 num_samples = 0; | 793 num_samples = 0; |
679 sum = 0; | 794 sum = 0; |
| 795 max.reset(); |
680 } | 796 } |
681 | 797 |
682 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, | 798 void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, |
683 int64_t max_rtt_ms) { | 799 int64_t max_rtt_ms) { |
684 rtc::CritScope lock(&crit_); | 800 rtc::CritScope lock(&crit_); |
685 avg_rtt_ms_ = avg_rtt_ms; | 801 avg_rtt_ms_ = avg_rtt_ms; |
686 } | 802 } |
687 | 803 |
| 804 void ReceiveStatisticsProxy::ContentSpecificStats::Add( |
| 805 const ContentSpecificStats& other) { |
| 806 e2e_delay_counter.Add(other.e2e_delay_counter); |
| 807 interframe_delay_counter.Add(other.interframe_delay_counter); |
| 808 flow_duration_ms += other.flow_duration_ms; |
| 809 total_media_bytes += other.total_media_bytes; |
| 810 received_height.Add(other.received_height); |
| 811 received_width.Add(other.received_width); |
| 812 qp_counter.Add(other.qp_counter); |
| 813 frame_counts.key_frames += other.frame_counts.key_frames; |
| 814 frame_counts.delta_frames += other.frame_counts.delta_frames; |
| 815 } |
| 816 |
688 } // namespace webrtc | 817 } // namespace webrtc |
OLD | NEW |