Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(112)

Side by Side Diff: webrtc/tools/event_log_visualizer/analyzer.cc

Issue 2965593002: Move webrtc/{tools => rtc_tools} (Closed)
Patch Set: Adding back root changes Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/tools/event_log_visualizer/analyzer.h"
12
13 #include <algorithm>
14 #include <limits>
15 #include <map>
16 #include <sstream>
17 #include <string>
18 #include <utility>
19
20 #include "webrtc/base/checks.h"
21 #include "webrtc/base/format_macros.h"
22 #include "webrtc/base/logging.h"
23 #include "webrtc/base/ptr_util.h"
24 #include "webrtc/base/rate_statistics.h"
25 #include "webrtc/call/audio_receive_stream.h"
26 #include "webrtc/call/audio_send_stream.h"
27 #include "webrtc/call/call.h"
28 #include "webrtc/common_types.h"
29 #include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
30 #include "webrtc/modules/audio_coding/neteq/tools/fake_decode_from_file.h"
31 #include "webrtc/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
32 #include "webrtc/modules/audio_coding/neteq/tools/neteq_replacement_input.h"
33 #include "webrtc/modules/audio_coding/neteq/tools/neteq_test.h"
34 #include "webrtc/modules/audio_coding/neteq/tools/resample_input_audio_file.h"
35 #include "webrtc/modules/congestion_controller/include/congestion_controller.h"
36 #include "webrtc/modules/include/module_common_types.h"
37 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
38 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
39 #include "webrtc/modules/rtp_rtcp/source/rtcp_packet/common_header.h"
40 #include "webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
41 #include "webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h"
42 #include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
43 #include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
44 #include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
45 #include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
46 #include "webrtc/video_receive_stream.h"
47 #include "webrtc/video_send_stream.h"
48
49 namespace webrtc {
50 namespace plotting {
51
52 namespace {
53
54 void SortPacketFeedbackVector(std::vector<PacketFeedback>* vec) {
55 auto pred = [](const PacketFeedback& packet_feedback) {
56 return packet_feedback.arrival_time_ms == PacketFeedback::kNotReceived;
57 };
58 vec->erase(std::remove_if(vec->begin(), vec->end(), pred), vec->end());
59 std::sort(vec->begin(), vec->end(), PacketFeedbackComparator());
60 }
61
62 std::string SsrcToString(uint32_t ssrc) {
63 std::stringstream ss;
64 ss << "SSRC " << ssrc;
65 return ss.str();
66 }
67
68 // Checks whether an SSRC is contained in the list of desired SSRCs.
69 // Note that an empty SSRC list matches every SSRC.
70 bool MatchingSsrc(uint32_t ssrc, const std::vector<uint32_t>& desired_ssrc) {
71 if (desired_ssrc.size() == 0)
72 return true;
73 return std::find(desired_ssrc.begin(), desired_ssrc.end(), ssrc) !=
74 desired_ssrc.end();
75 }
76
77 double AbsSendTimeToMicroseconds(int64_t abs_send_time) {
78 // The timestamp is a fixed point representation with 6 bits for seconds
79 // and 18 bits for fractions of a second. Thus, we divide by 2^18 to get the
80 // time in seconds and then multiply by 1000000 to convert to microseconds.
81 static constexpr double kTimestampToMicroSec =
82 1000000.0 / static_cast<double>(1ul << 18);
83 return abs_send_time * kTimestampToMicroSec;
84 }
85
86 // Computes the difference |later| - |earlier| where |later| and |earlier|
87 // are counters that wrap at |modulus|. The difference is chosen to have the
88 // least absolute value. For example if |modulus| is 8, then the difference will
89 // be chosen in the range [-3, 4]. If |modulus| is 9, then the difference will
90 // be in [-4, 4].
91 int64_t WrappingDifference(uint32_t later, uint32_t earlier, int64_t modulus) {
92 RTC_DCHECK_LE(1, modulus);
93 RTC_DCHECK_LT(later, modulus);
94 RTC_DCHECK_LT(earlier, modulus);
95 int64_t difference =
96 static_cast<int64_t>(later) - static_cast<int64_t>(earlier);
97 int64_t max_difference = modulus / 2;
98 int64_t min_difference = max_difference - modulus + 1;
99 if (difference > max_difference) {
100 difference -= modulus;
101 }
102 if (difference < min_difference) {
103 difference += modulus;
104 }
105 if (difference > max_difference / 2 || difference < min_difference / 2) {
106 LOG(LS_WARNING) << "Difference between" << later << " and " << earlier
107 << " expected to be in the range (" << min_difference / 2
108 << "," << max_difference / 2 << ") but is " << difference
109 << ". Correct unwrapping is uncertain.";
110 }
111 return difference;
112 }
113
114 // Return default values for header extensions, to use on streams without stored
115 // mapping data. Currently this only applies to audio streams, since the mapping
116 // is not stored in the event log.
117 // TODO(ivoc): Remove this once this mapping is stored in the event log for
118 // audio streams. Tracking bug: webrtc:6399
119 webrtc::RtpHeaderExtensionMap GetDefaultHeaderExtensionMap() {
120 webrtc::RtpHeaderExtensionMap default_map;
121 default_map.Register<AudioLevel>(webrtc::RtpExtension::kAudioLevelDefaultId);
122 default_map.Register<AbsoluteSendTime>(
123 webrtc::RtpExtension::kAbsSendTimeDefaultId);
124 return default_map;
125 }
126
127 constexpr float kLeftMargin = 0.01f;
128 constexpr float kRightMargin = 0.02f;
129 constexpr float kBottomMargin = 0.02f;
130 constexpr float kTopMargin = 0.05f;
131
132 rtc::Optional<double> NetworkDelayDiff_AbsSendTime(
133 const LoggedRtpPacket& old_packet,
134 const LoggedRtpPacket& new_packet) {
135 if (old_packet.header.extension.hasAbsoluteSendTime &&
136 new_packet.header.extension.hasAbsoluteSendTime) {
137 int64_t send_time_diff = WrappingDifference(
138 new_packet.header.extension.absoluteSendTime,
139 old_packet.header.extension.absoluteSendTime, 1ul << 24);
140 int64_t recv_time_diff = new_packet.timestamp - old_packet.timestamp;
141 double delay_change_us =
142 recv_time_diff - AbsSendTimeToMicroseconds(send_time_diff);
143 return rtc::Optional<double>(delay_change_us / 1000);
144 } else {
145 return rtc::Optional<double>();
146 }
147 }
148
149 rtc::Optional<double> NetworkDelayDiff_CaptureTime(
150 const LoggedRtpPacket& old_packet,
151 const LoggedRtpPacket& new_packet) {
152 int64_t send_time_diff = WrappingDifference(
153 new_packet.header.timestamp, old_packet.header.timestamp, 1ull << 32);
154 int64_t recv_time_diff = new_packet.timestamp - old_packet.timestamp;
155
156 const double kVideoSampleRate = 90000;
157 // TODO(terelius): We treat all streams as video for now, even though
158 // audio might be sampled at e.g. 16kHz, because it is really difficult to
159 // figure out the true sampling rate of a stream. The effect is that the
160 // delay will be scaled incorrectly for non-video streams.
161
162 double delay_change =
163 static_cast<double>(recv_time_diff) / 1000 -
164 static_cast<double>(send_time_diff) / kVideoSampleRate * 1000;
165 if (delay_change < -10000 || 10000 < delay_change) {
166 LOG(LS_WARNING) << "Very large delay change. Timestamps correct?";
167 LOG(LS_WARNING) << "Old capture time " << old_packet.header.timestamp
168 << ", received time " << old_packet.timestamp;
169 LOG(LS_WARNING) << "New capture time " << new_packet.header.timestamp
170 << ", received time " << new_packet.timestamp;
171 LOG(LS_WARNING) << "Receive time difference " << recv_time_diff << " = "
172 << static_cast<double>(recv_time_diff) / 1000000 << "s";
173 LOG(LS_WARNING) << "Send time difference " << send_time_diff << " = "
174 << static_cast<double>(send_time_diff) / kVideoSampleRate
175 << "s";
176 }
177 return rtc::Optional<double>(delay_change);
178 }
179
180 // For each element in data, use |get_y()| to extract a y-coordinate and
181 // store the result in a TimeSeries.
182 template <typename DataType>
183 void ProcessPoints(
184 rtc::FunctionView<rtc::Optional<float>(const DataType&)> get_y,
185 const std::vector<DataType>& data,
186 uint64_t begin_time,
187 TimeSeries* result) {
188 for (size_t i = 0; i < data.size(); i++) {
189 float x = static_cast<float>(data[i].timestamp - begin_time) / 1000000;
190 rtc::Optional<float> y = get_y(data[i]);
191 if (y)
192 result->points.emplace_back(x, *y);
193 }
194 }
195
196 // For each pair of adjacent elements in |data|, use |get_y| to extract a
197 // y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
198 // will be the time of the second element in the pair.
199 template <typename DataType, typename ResultType>
200 void ProcessPairs(
201 rtc::FunctionView<rtc::Optional<ResultType>(const DataType&,
202 const DataType&)> get_y,
203 const std::vector<DataType>& data,
204 uint64_t begin_time,
205 TimeSeries* result) {
206 for (size_t i = 1; i < data.size(); i++) {
207 float x = static_cast<float>(data[i].timestamp - begin_time) / 1000000;
208 rtc::Optional<ResultType> y = get_y(data[i - 1], data[i]);
209 if (y)
210 result->points.emplace_back(x, static_cast<float>(*y));
211 }
212 }
213
214 // For each element in data, use |extract()| to extract a y-coordinate and
215 // store the result in a TimeSeries.
216 template <typename DataType, typename ResultType>
217 void AccumulatePoints(
218 rtc::FunctionView<rtc::Optional<ResultType>(const DataType&)> extract,
219 const std::vector<DataType>& data,
220 uint64_t begin_time,
221 TimeSeries* result) {
222 ResultType sum = 0;
223 for (size_t i = 0; i < data.size(); i++) {
224 float x = static_cast<float>(data[i].timestamp - begin_time) / 1000000;
225 rtc::Optional<ResultType> y = extract(data[i]);
226 if (y) {
227 sum += *y;
228 result->points.emplace_back(x, static_cast<float>(sum));
229 }
230 }
231 }
232
233 // For each pair of adjacent elements in |data|, use |extract()| to extract a
234 // y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
235 // will be the time of the second element in the pair.
236 template <typename DataType, typename ResultType>
237 void AccumulatePairs(
238 rtc::FunctionView<rtc::Optional<ResultType>(const DataType&,
239 const DataType&)> extract,
240 const std::vector<DataType>& data,
241 uint64_t begin_time,
242 TimeSeries* result) {
243 ResultType sum = 0;
244 for (size_t i = 1; i < data.size(); i++) {
245 float x = static_cast<float>(data[i].timestamp - begin_time) / 1000000;
246 rtc::Optional<ResultType> y = extract(data[i - 1], data[i]);
247 if (y)
248 sum += *y;
249 result->points.emplace_back(x, static_cast<float>(sum));
250 }
251 }
252
253 // Calculates a moving average of |data| and stores the result in a TimeSeries.
254 // A data point is generated every |step| microseconds from |begin_time|
255 // to |end_time|. The value of each data point is the average of the data
256 // during the preceeding |window_duration_us| microseconds.
257 template <typename DataType, typename ResultType>
258 void MovingAverage(
259 rtc::FunctionView<rtc::Optional<ResultType>(const DataType&)> extract,
260 const std::vector<DataType>& data,
261 uint64_t begin_time,
262 uint64_t end_time,
263 uint64_t window_duration_us,
264 uint64_t step,
265 webrtc::plotting::TimeSeries* result) {
266 size_t window_index_begin = 0;
267 size_t window_index_end = 0;
268 ResultType sum_in_window = 0;
269
270 for (uint64_t t = begin_time; t < end_time + step; t += step) {
271 while (window_index_end < data.size() &&
272 data[window_index_end].timestamp < t) {
273 rtc::Optional<ResultType> value = extract(data[window_index_end]);
274 if (value)
275 sum_in_window += *value;
276 ++window_index_end;
277 }
278 while (window_index_begin < data.size() &&
279 data[window_index_begin].timestamp < t - window_duration_us) {
280 rtc::Optional<ResultType> value = extract(data[window_index_begin]);
281 if (value)
282 sum_in_window -= *value;
283 ++window_index_begin;
284 }
285 float window_duration_s = static_cast<float>(window_duration_us) / 1000000;
286 float x = static_cast<float>(t - begin_time) / 1000000;
287 float y = sum_in_window / window_duration_s;
288 result->points.emplace_back(x, y);
289 }
290 }
291
292 } // namespace
293
294 EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log)
295 : parsed_log_(log), window_duration_(250000), step_(10000) {
296 uint64_t first_timestamp = std::numeric_limits<uint64_t>::max();
297 uint64_t last_timestamp = std::numeric_limits<uint64_t>::min();
298
299 PacketDirection direction;
300 uint8_t header[IP_PACKET_SIZE];
301 size_t header_length;
302 size_t total_length;
303
304 uint8_t last_incoming_rtcp_packet[IP_PACKET_SIZE];
305 uint8_t last_incoming_rtcp_packet_length = 0;
306
307 // Make a default extension map for streams without configuration information.
308 // TODO(ivoc): Once configuration of audio streams is stored in the event log,
309 // this can be removed. Tracking bug: webrtc:6399
310 RtpHeaderExtensionMap default_extension_map = GetDefaultHeaderExtensionMap();
311
312 rtc::Optional<uint64_t> last_log_start;
313
314 for (size_t i = 0; i < parsed_log_.GetNumberOfEvents(); i++) {
315 ParsedRtcEventLog::EventType event_type = parsed_log_.GetEventType(i);
316 if (event_type != ParsedRtcEventLog::VIDEO_RECEIVER_CONFIG_EVENT &&
317 event_type != ParsedRtcEventLog::VIDEO_SENDER_CONFIG_EVENT &&
318 event_type != ParsedRtcEventLog::AUDIO_RECEIVER_CONFIG_EVENT &&
319 event_type != ParsedRtcEventLog::AUDIO_SENDER_CONFIG_EVENT &&
320 event_type != ParsedRtcEventLog::LOG_START &&
321 event_type != ParsedRtcEventLog::LOG_END) {
322 uint64_t timestamp = parsed_log_.GetTimestamp(i);
323 first_timestamp = std::min(first_timestamp, timestamp);
324 last_timestamp = std::max(last_timestamp, timestamp);
325 }
326
327 switch (parsed_log_.GetEventType(i)) {
328 case ParsedRtcEventLog::VIDEO_RECEIVER_CONFIG_EVENT: {
329 rtclog::StreamConfig config = parsed_log_.GetVideoReceiveConfig(i);
330 StreamId stream(config.remote_ssrc, kIncomingPacket);
331 video_ssrcs_.insert(stream);
332 StreamId rtx_stream(config.rtx_ssrc, kIncomingPacket);
333 video_ssrcs_.insert(rtx_stream);
334 rtx_ssrcs_.insert(rtx_stream);
335 break;
336 }
337 case ParsedRtcEventLog::VIDEO_SENDER_CONFIG_EVENT: {
338 std::vector<rtclog::StreamConfig> configs =
339 parsed_log_.GetVideoSendConfig(i);
340 for (const auto& config : configs) {
341 StreamId stream(config.local_ssrc, kOutgoingPacket);
342 video_ssrcs_.insert(stream);
343 StreamId rtx_stream(config.rtx_ssrc, kOutgoingPacket);
344 video_ssrcs_.insert(rtx_stream);
345 rtx_ssrcs_.insert(rtx_stream);
346 }
347 break;
348 }
349 case ParsedRtcEventLog::AUDIO_RECEIVER_CONFIG_EVENT: {
350 rtclog::StreamConfig config = parsed_log_.GetAudioReceiveConfig(i);
351 StreamId stream(config.remote_ssrc, kIncomingPacket);
352 audio_ssrcs_.insert(stream);
353 break;
354 }
355 case ParsedRtcEventLog::AUDIO_SENDER_CONFIG_EVENT: {
356 rtclog::StreamConfig config = parsed_log_.GetAudioSendConfig(i);
357 StreamId stream(config.local_ssrc, kOutgoingPacket);
358 audio_ssrcs_.insert(stream);
359 break;
360 }
361 case ParsedRtcEventLog::RTP_EVENT: {
362 RtpHeaderExtensionMap* extension_map = parsed_log_.GetRtpHeader(
363 i, &direction, header, &header_length, &total_length);
364 RtpUtility::RtpHeaderParser rtp_parser(header, header_length);
365 RTPHeader parsed_header;
366 if (extension_map != nullptr) {
367 rtp_parser.Parse(&parsed_header, extension_map);
368 } else {
369 // Use the default extension map.
370 // TODO(ivoc): Once configuration of audio streams is stored in the
371 // event log, this can be removed.
372 // Tracking bug: webrtc:6399
373 rtp_parser.Parse(&parsed_header, &default_extension_map);
374 }
375 uint64_t timestamp = parsed_log_.GetTimestamp(i);
376 StreamId stream(parsed_header.ssrc, direction);
377 rtp_packets_[stream].push_back(
378 LoggedRtpPacket(timestamp, parsed_header, total_length));
379 break;
380 }
381 case ParsedRtcEventLog::RTCP_EVENT: {
382 uint8_t packet[IP_PACKET_SIZE];
383 parsed_log_.GetRtcpPacket(i, &direction, packet, &total_length);
384 // Currently incoming RTCP packets are logged twice, both for audio and
385 // video. Only act on one of them. Compare against the previous parsed
386 // incoming RTCP packet.
387 if (direction == webrtc::kIncomingPacket) {
388 RTC_CHECK_LE(total_length, IP_PACKET_SIZE);
389 if (total_length == last_incoming_rtcp_packet_length &&
390 memcmp(last_incoming_rtcp_packet, packet, total_length) == 0) {
391 continue;
392 } else {
393 memcpy(last_incoming_rtcp_packet, packet, total_length);
394 last_incoming_rtcp_packet_length = total_length;
395 }
396 }
397 rtcp::CommonHeader header;
398 const uint8_t* packet_end = packet + total_length;
399 for (const uint8_t* block = packet; block < packet_end;
400 block = header.NextPacket()) {
401 RTC_CHECK(header.Parse(block, packet_end - block));
402 if (header.type() == rtcp::TransportFeedback::kPacketType &&
403 header.fmt() == rtcp::TransportFeedback::kFeedbackMessageType) {
404 std::unique_ptr<rtcp::TransportFeedback> rtcp_packet(
405 rtc::MakeUnique<rtcp::TransportFeedback>());
406 if (rtcp_packet->Parse(header)) {
407 uint32_t ssrc = rtcp_packet->sender_ssrc();
408 StreamId stream(ssrc, direction);
409 uint64_t timestamp = parsed_log_.GetTimestamp(i);
410 rtcp_packets_[stream].push_back(LoggedRtcpPacket(
411 timestamp, kRtcpTransportFeedback, std::move(rtcp_packet)));
412 }
413 } else if (header.type() == rtcp::SenderReport::kPacketType) {
414 std::unique_ptr<rtcp::SenderReport> rtcp_packet(
415 rtc::MakeUnique<rtcp::SenderReport>());
416 if (rtcp_packet->Parse(header)) {
417 uint32_t ssrc = rtcp_packet->sender_ssrc();
418 StreamId stream(ssrc, direction);
419 uint64_t timestamp = parsed_log_.GetTimestamp(i);
420 rtcp_packets_[stream].push_back(
421 LoggedRtcpPacket(timestamp, kRtcpSr, std::move(rtcp_packet)));
422 }
423 } else if (header.type() == rtcp::ReceiverReport::kPacketType) {
424 std::unique_ptr<rtcp::ReceiverReport> rtcp_packet(
425 rtc::MakeUnique<rtcp::ReceiverReport>());
426 if (rtcp_packet->Parse(header)) {
427 uint32_t ssrc = rtcp_packet->sender_ssrc();
428 StreamId stream(ssrc, direction);
429 uint64_t timestamp = parsed_log_.GetTimestamp(i);
430 rtcp_packets_[stream].push_back(
431 LoggedRtcpPacket(timestamp, kRtcpRr, std::move(rtcp_packet)));
432 }
433 } else if (header.type() == rtcp::Remb::kPacketType &&
434 header.fmt() == rtcp::Remb::kFeedbackMessageType) {
435 std::unique_ptr<rtcp::Remb> rtcp_packet(
436 rtc::MakeUnique<rtcp::Remb>());
437 if (rtcp_packet->Parse(header)) {
438 uint32_t ssrc = rtcp_packet->sender_ssrc();
439 StreamId stream(ssrc, direction);
440 uint64_t timestamp = parsed_log_.GetTimestamp(i);
441 rtcp_packets_[stream].push_back(LoggedRtcpPacket(
442 timestamp, kRtcpRemb, std::move(rtcp_packet)));
443 }
444 }
445 }
446 break;
447 }
448 case ParsedRtcEventLog::LOG_START: {
449 if (last_log_start) {
450 // A LOG_END event was missing. Use last_timestamp.
451 RTC_DCHECK_GE(last_timestamp, *last_log_start);
452 log_segments_.push_back(
453 std::make_pair(*last_log_start, last_timestamp));
454 }
455 last_log_start = rtc::Optional<uint64_t>(parsed_log_.GetTimestamp(i));
456 break;
457 }
458 case ParsedRtcEventLog::LOG_END: {
459 RTC_DCHECK(last_log_start);
460 log_segments_.push_back(
461 std::make_pair(*last_log_start, parsed_log_.GetTimestamp(i)));
462 last_log_start.reset();
463 break;
464 }
465 case ParsedRtcEventLog::AUDIO_PLAYOUT_EVENT: {
466 uint32_t this_ssrc;
467 parsed_log_.GetAudioPlayout(i, &this_ssrc);
468 audio_playout_events_[this_ssrc].push_back(parsed_log_.GetTimestamp(i));
469 break;
470 }
471 case ParsedRtcEventLog::LOSS_BASED_BWE_UPDATE: {
472 LossBasedBweUpdate bwe_update;
473 bwe_update.timestamp = parsed_log_.GetTimestamp(i);
474 parsed_log_.GetLossBasedBweUpdate(i, &bwe_update.new_bitrate,
475 &bwe_update.fraction_loss,
476 &bwe_update.expected_packets);
477 bwe_loss_updates_.push_back(bwe_update);
478 break;
479 }
480 case ParsedRtcEventLog::DELAY_BASED_BWE_UPDATE: {
481 bwe_delay_updates_.push_back(parsed_log_.GetDelayBasedBweUpdate(i));
482 break;
483 }
484 case ParsedRtcEventLog::AUDIO_NETWORK_ADAPTATION_EVENT: {
485 AudioNetworkAdaptationEvent ana_event;
486 ana_event.timestamp = parsed_log_.GetTimestamp(i);
487 parsed_log_.GetAudioNetworkAdaptation(i, &ana_event.config);
488 audio_network_adaptation_events_.push_back(ana_event);
489 break;
490 }
491 case ParsedRtcEventLog::BWE_PROBE_CLUSTER_CREATED_EVENT: {
492 bwe_probe_cluster_created_events_.push_back(
493 parsed_log_.GetBweProbeClusterCreated(i));
494 break;
495 }
496 case ParsedRtcEventLog::BWE_PROBE_RESULT_EVENT: {
497 bwe_probe_result_events_.push_back(parsed_log_.GetBweProbeResult(i));
498 break;
499 }
500 case ParsedRtcEventLog::UNKNOWN_EVENT: {
501 break;
502 }
503 }
504 }
505
506 if (last_timestamp < first_timestamp) {
507 // No useful events in the log.
508 first_timestamp = last_timestamp = 0;
509 }
510 begin_time_ = first_timestamp;
511 end_time_ = last_timestamp;
512 call_duration_s_ = static_cast<float>(end_time_ - begin_time_) / 1000000;
513 if (last_log_start) {
514 // The log was missing the last LOG_END event. Fake it.
515 log_segments_.push_back(std::make_pair(*last_log_start, end_time_));
516 }
517 }
518
519 class BitrateObserver : public CongestionController::Observer,
520 public RemoteBitrateObserver {
521 public:
522 BitrateObserver() : last_bitrate_bps_(0), bitrate_updated_(false) {}
523
524 // TODO(minyue): remove this when old OnNetworkChanged is deprecated. See
525 // https://bugs.chromium.org/p/webrtc/issues/detail?id=6796
526 using CongestionController::Observer::OnNetworkChanged;
527
528 void OnNetworkChanged(uint32_t bitrate_bps,
529 uint8_t fraction_loss,
530 int64_t rtt_ms,
531 int64_t probing_interval_ms) override {
532 last_bitrate_bps_ = bitrate_bps;
533 bitrate_updated_ = true;
534 }
535
536 void OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
537 uint32_t bitrate) override {}
538
539 uint32_t last_bitrate_bps() const { return last_bitrate_bps_; }
540 bool GetAndResetBitrateUpdated() {
541 bool bitrate_updated = bitrate_updated_;
542 bitrate_updated_ = false;
543 return bitrate_updated;
544 }
545
546 private:
547 uint32_t last_bitrate_bps_;
548 bool bitrate_updated_;
549 };
550
551 bool EventLogAnalyzer::IsRtxSsrc(StreamId stream_id) const {
552 return rtx_ssrcs_.count(stream_id) == 1;
553 }
554
555 bool EventLogAnalyzer::IsVideoSsrc(StreamId stream_id) const {
556 return video_ssrcs_.count(stream_id) == 1;
557 }
558
559 bool EventLogAnalyzer::IsAudioSsrc(StreamId stream_id) const {
560 return audio_ssrcs_.count(stream_id) == 1;
561 }
562
563 std::string EventLogAnalyzer::GetStreamName(StreamId stream_id) const {
564 std::stringstream name;
565 if (IsAudioSsrc(stream_id)) {
566 name << "Audio ";
567 } else if (IsVideoSsrc(stream_id)) {
568 name << "Video ";
569 } else {
570 name << "Unknown ";
571 }
572 if (IsRtxSsrc(stream_id))
573 name << "RTX ";
574 if (stream_id.GetDirection() == kIncomingPacket) {
575 name << "(In) ";
576 } else {
577 name << "(Out) ";
578 }
579 name << SsrcToString(stream_id.GetSsrc());
580 return name.str();
581 }
582
583 void EventLogAnalyzer::CreatePacketGraph(PacketDirection desired_direction,
584 Plot* plot) {
585 for (auto& kv : rtp_packets_) {
586 StreamId stream_id = kv.first;
587 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
588 // Filter on direction and SSRC.
589 if (stream_id.GetDirection() != desired_direction ||
590 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_)) {
591 continue;
592 }
593
594 TimeSeries time_series(GetStreamName(stream_id), BAR_GRAPH);
595 ProcessPoints<LoggedRtpPacket>(
596 [](const LoggedRtpPacket& packet) -> rtc::Optional<float> {
597 return rtc::Optional<float>(packet.total_length);
598 },
599 packet_stream, begin_time_, &time_series);
600 plot->AppendTimeSeries(std::move(time_series));
601 }
602
603 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
604 plot->SetSuggestedYAxis(0, 1, "Packet size (bytes)", kBottomMargin,
605 kTopMargin);
606 if (desired_direction == webrtc::PacketDirection::kIncomingPacket) {
607 plot->SetTitle("Incoming RTP packets");
608 } else if (desired_direction == webrtc::PacketDirection::kOutgoingPacket) {
609 plot->SetTitle("Outgoing RTP packets");
610 }
611 }
612
613 template <typename T>
614 void EventLogAnalyzer::CreateAccumulatedPacketsTimeSeries(
615 PacketDirection desired_direction,
616 Plot* plot,
617 const std::map<StreamId, std::vector<T>>& packets,
618 const std::string& label_prefix) {
619 for (auto& kv : packets) {
620 StreamId stream_id = kv.first;
621 const std::vector<T>& packet_stream = kv.second;
622 // Filter on direction and SSRC.
623 if (stream_id.GetDirection() != desired_direction ||
624 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_)) {
625 continue;
626 }
627
628 std::string label = label_prefix + " " + GetStreamName(stream_id);
629 TimeSeries time_series(label, LINE_STEP_GRAPH);
630 for (size_t i = 0; i < packet_stream.size(); i++) {
631 float x = static_cast<float>(packet_stream[i].timestamp - begin_time_) /
632 1000000;
633 time_series.points.emplace_back(x, i + 1);
634 }
635
636 plot->AppendTimeSeries(std::move(time_series));
637 }
638 }
639
640 void EventLogAnalyzer::CreateAccumulatedPacketsGraph(
641 PacketDirection desired_direction,
642 Plot* plot) {
643 CreateAccumulatedPacketsTimeSeries(desired_direction, plot, rtp_packets_,
644 "RTP");
645 CreateAccumulatedPacketsTimeSeries(desired_direction, plot, rtcp_packets_,
646 "RTCP");
647
648 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
649 plot->SetSuggestedYAxis(0, 1, "Received Packets", kBottomMargin, kTopMargin);
650 if (desired_direction == webrtc::PacketDirection::kIncomingPacket) {
651 plot->SetTitle("Accumulated Incoming RTP/RTCP packets");
652 } else if (desired_direction == webrtc::PacketDirection::kOutgoingPacket) {
653 plot->SetTitle("Accumulated Outgoing RTP/RTCP packets");
654 }
655 }
656
657 // For each SSRC, plot the time between the consecutive playouts.
658 void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) {
659 std::map<uint32_t, TimeSeries> time_series;
660 std::map<uint32_t, uint64_t> last_playout;
661
662 uint32_t ssrc;
663
664 for (size_t i = 0; i < parsed_log_.GetNumberOfEvents(); i++) {
665 ParsedRtcEventLog::EventType event_type = parsed_log_.GetEventType(i);
666 if (event_type == ParsedRtcEventLog::AUDIO_PLAYOUT_EVENT) {
667 parsed_log_.GetAudioPlayout(i, &ssrc);
668 uint64_t timestamp = parsed_log_.GetTimestamp(i);
669 if (MatchingSsrc(ssrc, desired_ssrc_)) {
670 float x = static_cast<float>(timestamp - begin_time_) / 1000000;
671 float y = static_cast<float>(timestamp - last_playout[ssrc]) / 1000;
672 if (time_series[ssrc].points.size() == 0) {
673 // There were no previusly logged playout for this SSRC.
674 // Generate a point, but place it on the x-axis.
675 y = 0;
676 }
677 time_series[ssrc].points.push_back(TimeSeriesPoint(x, y));
678 last_playout[ssrc] = timestamp;
679 }
680 }
681 }
682
683 // Set labels and put in graph.
684 for (auto& kv : time_series) {
685 kv.second.label = SsrcToString(kv.first);
686 kv.second.style = BAR_GRAPH;
687 plot->AppendTimeSeries(std::move(kv.second));
688 }
689
690 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
691 plot->SetSuggestedYAxis(0, 1, "Time since last playout (ms)", kBottomMargin,
692 kTopMargin);
693 plot->SetTitle("Audio playout");
694 }
695
696 // For audio SSRCs, plot the audio level.
697 void EventLogAnalyzer::CreateAudioLevelGraph(Plot* plot) {
698 std::map<StreamId, TimeSeries> time_series;
699
700 for (auto& kv : rtp_packets_) {
701 StreamId stream_id = kv.first;
702 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
703 // TODO(ivoc): When audio send/receive configs are stored in the event
704 // log, a check should be added here to only process audio
705 // streams. Tracking bug: webrtc:6399
706 for (auto& packet : packet_stream) {
707 if (packet.header.extension.hasAudioLevel) {
708 float x = static_cast<float>(packet.timestamp - begin_time_) / 1000000;
709 // The audio level is stored in -dBov (so e.g. -10 dBov is stored as 10)
710 // Here we convert it to dBov.
711 float y = static_cast<float>(-packet.header.extension.audioLevel);
712 time_series[stream_id].points.emplace_back(TimeSeriesPoint(x, y));
713 }
714 }
715 }
716
717 for (auto& series : time_series) {
718 series.second.label = GetStreamName(series.first);
719 series.second.style = LINE_GRAPH;
720 plot->AppendTimeSeries(std::move(series.second));
721 }
722
723 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
724 plot->SetYAxis(-127, 0, "Audio level (dBov)", kBottomMargin,
725 kTopMargin);
726 plot->SetTitle("Audio level");
727 }
728
729 // For each SSRC, plot the time between the consecutive playouts.
730 void EventLogAnalyzer::CreateSequenceNumberGraph(Plot* plot) {
731 for (auto& kv : rtp_packets_) {
732 StreamId stream_id = kv.first;
733 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
734 // Filter on direction and SSRC.
735 if (stream_id.GetDirection() != kIncomingPacket ||
736 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_)) {
737 continue;
738 }
739
740 TimeSeries time_series(GetStreamName(stream_id), BAR_GRAPH);
741 ProcessPairs<LoggedRtpPacket, float>(
742 [](const LoggedRtpPacket& old_packet,
743 const LoggedRtpPacket& new_packet) {
744 int64_t diff =
745 WrappingDifference(new_packet.header.sequenceNumber,
746 old_packet.header.sequenceNumber, 1ul << 16);
747 return rtc::Optional<float>(diff);
748 },
749 packet_stream, begin_time_, &time_series);
750 plot->AppendTimeSeries(std::move(time_series));
751 }
752
753 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
754 plot->SetSuggestedYAxis(0, 1, "Difference since last packet", kBottomMargin,
755 kTopMargin);
756 plot->SetTitle("Sequence number");
757 }
758
759 void EventLogAnalyzer::CreateIncomingPacketLossGraph(Plot* plot) {
760 for (auto& kv : rtp_packets_) {
761 StreamId stream_id = kv.first;
762 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
763 // Filter on direction and SSRC.
764 if (stream_id.GetDirection() != kIncomingPacket ||
765 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_) ||
766 packet_stream.size() == 0) {
767 continue;
768 }
769
770 TimeSeries time_series(GetStreamName(stream_id), LINE_DOT_GRAPH);
771 const uint64_t kWindowUs = 1000000;
772 const uint64_t kStep = 1000000;
773 SequenceNumberUnwrapper unwrapper_;
774 SequenceNumberUnwrapper prior_unwrapper_;
775 size_t window_index_begin = 0;
776 size_t window_index_end = 0;
777 int64_t highest_seq_number =
778 unwrapper_.Unwrap(packet_stream[0].header.sequenceNumber) - 1;
779 int64_t highest_prior_seq_number =
780 prior_unwrapper_.Unwrap(packet_stream[0].header.sequenceNumber) - 1;
781
782 for (uint64_t t = begin_time_; t < end_time_ + kStep; t += kStep) {
783 while (window_index_end < packet_stream.size() &&
784 packet_stream[window_index_end].timestamp < t) {
785 int64_t sequence_number = unwrapper_.Unwrap(
786 packet_stream[window_index_end].header.sequenceNumber);
787 highest_seq_number = std::max(highest_seq_number, sequence_number);
788 ++window_index_end;
789 }
790 while (window_index_begin < packet_stream.size() &&
791 packet_stream[window_index_begin].timestamp < t - kWindowUs) {
792 int64_t sequence_number = prior_unwrapper_.Unwrap(
793 packet_stream[window_index_begin].header.sequenceNumber);
794 highest_prior_seq_number =
795 std::max(highest_prior_seq_number, sequence_number);
796 ++window_index_begin;
797 }
798 float x = static_cast<float>(t - begin_time_) / 1000000;
799 int64_t expected_packets = highest_seq_number - highest_prior_seq_number;
800 if (expected_packets > 0) {
801 int64_t received_packets = window_index_end - window_index_begin;
802 int64_t lost_packets = expected_packets - received_packets;
803 float y = static_cast<float>(lost_packets) / expected_packets * 100;
804 time_series.points.emplace_back(x, y);
805 }
806 }
807 plot->AppendTimeSeries(std::move(time_series));
808 }
809
810 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
811 plot->SetSuggestedYAxis(0, 1, "Estimated loss rate (%)", kBottomMargin,
812 kTopMargin);
813 plot->SetTitle("Estimated incoming loss rate");
814 }
815
816 void EventLogAnalyzer::CreateDelayChangeGraph(Plot* plot) {
817 for (auto& kv : rtp_packets_) {
818 StreamId stream_id = kv.first;
819 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
820 // Filter on direction and SSRC.
821 if (stream_id.GetDirection() != kIncomingPacket ||
822 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_) ||
823 IsAudioSsrc(stream_id) || !IsVideoSsrc(stream_id) ||
824 IsRtxSsrc(stream_id)) {
825 continue;
826 }
827
828 TimeSeries capture_time_data(GetStreamName(stream_id) + " capture-time",
829 BAR_GRAPH);
830 ProcessPairs<LoggedRtpPacket, double>(NetworkDelayDiff_CaptureTime,
831 packet_stream, begin_time_,
832 &capture_time_data);
833 plot->AppendTimeSeries(std::move(capture_time_data));
834
835 TimeSeries send_time_data(GetStreamName(stream_id) + " abs-send-time",
836 BAR_GRAPH);
837 ProcessPairs<LoggedRtpPacket, double>(NetworkDelayDiff_AbsSendTime,
838 packet_stream, begin_time_,
839 &send_time_data);
840 plot->AppendTimeSeries(std::move(send_time_data));
841 }
842
843 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
844 plot->SetSuggestedYAxis(0, 1, "Latency change (ms)", kBottomMargin,
845 kTopMargin);
846 plot->SetTitle("Network latency change between consecutive packets");
847 }
848
849 void EventLogAnalyzer::CreateAccumulatedDelayChangeGraph(Plot* plot) {
850 for (auto& kv : rtp_packets_) {
851 StreamId stream_id = kv.first;
852 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
853 // Filter on direction and SSRC.
854 if (stream_id.GetDirection() != kIncomingPacket ||
855 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_) ||
856 IsAudioSsrc(stream_id) || !IsVideoSsrc(stream_id) ||
857 IsRtxSsrc(stream_id)) {
858 continue;
859 }
860
861 TimeSeries capture_time_data(GetStreamName(stream_id) + " capture-time",
862 LINE_GRAPH);
863 AccumulatePairs<LoggedRtpPacket, double>(NetworkDelayDiff_CaptureTime,
864 packet_stream, begin_time_,
865 &capture_time_data);
866 plot->AppendTimeSeries(std::move(capture_time_data));
867
868 TimeSeries send_time_data(GetStreamName(stream_id) + " abs-send-time",
869 LINE_GRAPH);
870 AccumulatePairs<LoggedRtpPacket, double>(NetworkDelayDiff_AbsSendTime,
871 packet_stream, begin_time_,
872 &send_time_data);
873 plot->AppendTimeSeries(std::move(send_time_data));
874 }
875
876 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
877 plot->SetSuggestedYAxis(0, 1, "Latency change (ms)", kBottomMargin,
878 kTopMargin);
879 plot->SetTitle("Accumulated network latency change");
880 }
881
882 // Plot the fraction of packets lost (as perceived by the loss-based BWE).
883 void EventLogAnalyzer::CreateFractionLossGraph(Plot* plot) {
884 TimeSeries time_series("Fraction lost", LINE_DOT_GRAPH);
885 for (auto& bwe_update : bwe_loss_updates_) {
886 float x = static_cast<float>(bwe_update.timestamp - begin_time_) / 1000000;
887 float y = static_cast<float>(bwe_update.fraction_loss) / 255 * 100;
888 time_series.points.emplace_back(x, y);
889 }
890
891 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
892 plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
893 kTopMargin);
894 plot->SetTitle("Reported packet loss");
895 plot->AppendTimeSeries(std::move(time_series));
896 }
897
898 // Plot the total bandwidth used by all RTP streams.
899 void EventLogAnalyzer::CreateTotalBitrateGraph(
900 PacketDirection desired_direction,
901 Plot* plot) {
902 struct TimestampSize {
903 TimestampSize(uint64_t t, size_t s) : timestamp(t), size(s) {}
904 uint64_t timestamp;
905 size_t size;
906 };
907 std::vector<TimestampSize> packets;
908
909 PacketDirection direction;
910 size_t total_length;
911
912 // Extract timestamps and sizes for the relevant packets.
913 for (size_t i = 0; i < parsed_log_.GetNumberOfEvents(); i++) {
914 ParsedRtcEventLog::EventType event_type = parsed_log_.GetEventType(i);
915 if (event_type == ParsedRtcEventLog::RTP_EVENT) {
916 parsed_log_.GetRtpHeader(i, &direction, nullptr, nullptr, &total_length);
917 if (direction == desired_direction) {
918 uint64_t timestamp = parsed_log_.GetTimestamp(i);
919 packets.push_back(TimestampSize(timestamp, total_length));
920 }
921 }
922 }
923
924 size_t window_index_begin = 0;
925 size_t window_index_end = 0;
926 size_t bytes_in_window = 0;
927
928 // Calculate a moving average of the bitrate and store in a TimeSeries.
929 TimeSeries bitrate_series("Bitrate", LINE_GRAPH);
930 for (uint64_t time = begin_time_; time < end_time_ + step_; time += step_) {
931 while (window_index_end < packets.size() &&
932 packets[window_index_end].timestamp < time) {
933 bytes_in_window += packets[window_index_end].size;
934 ++window_index_end;
935 }
936 while (window_index_begin < packets.size() &&
937 packets[window_index_begin].timestamp < time - window_duration_) {
938 RTC_DCHECK_LE(packets[window_index_begin].size, bytes_in_window);
939 bytes_in_window -= packets[window_index_begin].size;
940 ++window_index_begin;
941 }
942 float window_duration_in_seconds =
943 static_cast<float>(window_duration_) / 1000000;
944 float x = static_cast<float>(time - begin_time_) / 1000000;
945 float y = bytes_in_window * 8 / window_duration_in_seconds / 1000;
946 bitrate_series.points.emplace_back(x, y);
947 }
948 plot->AppendTimeSeries(std::move(bitrate_series));
949
950 // Overlay the send-side bandwidth estimate over the outgoing bitrate.
951 if (desired_direction == kOutgoingPacket) {
952 TimeSeries loss_series("Loss-based estimate", LINE_STEP_GRAPH);
953 for (auto& loss_update : bwe_loss_updates_) {
954 float x =
955 static_cast<float>(loss_update.timestamp - begin_time_) / 1000000;
956 float y = static_cast<float>(loss_update.new_bitrate) / 1000;
957 loss_series.points.emplace_back(x, y);
958 }
959
960 TimeSeries delay_series("Delay-based estimate", LINE_STEP_GRAPH);
961 for (auto& delay_update : bwe_delay_updates_) {
962 float x =
963 static_cast<float>(delay_update.timestamp - begin_time_) / 1000000;
964 float y = static_cast<float>(delay_update.bitrate_bps) / 1000;
965 delay_series.points.emplace_back(x, y);
966 }
967
968 TimeSeries created_series("Probe cluster created.", DOT_GRAPH);
969 for (auto& cluster : bwe_probe_cluster_created_events_) {
970 float x = static_cast<float>(cluster.timestamp - begin_time_) / 1000000;
971 float y = static_cast<float>(cluster.bitrate_bps) / 1000;
972 created_series.points.emplace_back(x, y);
973 }
974
975 TimeSeries result_series("Probing results.", DOT_GRAPH);
976 for (auto& result : bwe_probe_result_events_) {
977 if (result.bitrate_bps) {
978 float x = static_cast<float>(result.timestamp - begin_time_) / 1000000;
979 float y = static_cast<float>(*result.bitrate_bps) / 1000;
980 result_series.points.emplace_back(x, y);
981 }
982 }
983 plot->AppendTimeSeries(std::move(loss_series));
984 plot->AppendTimeSeries(std::move(delay_series));
985 plot->AppendTimeSeries(std::move(created_series));
986 plot->AppendTimeSeries(std::move(result_series));
987 }
988
989 // Overlay the incoming REMB over the outgoing bitrate
990 // and outgoing REMB over incoming bitrate.
991 PacketDirection remb_direction =
992 desired_direction == kOutgoingPacket ? kIncomingPacket : kOutgoingPacket;
993 TimeSeries remb_series("Remb", LINE_STEP_GRAPH);
994 std::multimap<uint64_t, const LoggedRtcpPacket*> remb_packets;
995 for (const auto& kv : rtcp_packets_) {
996 if (kv.first.GetDirection() == remb_direction) {
997 for (const LoggedRtcpPacket& rtcp_packet : kv.second) {
998 if (rtcp_packet.type == kRtcpRemb) {
999 remb_packets.insert(
1000 std::make_pair(rtcp_packet.timestamp, &rtcp_packet));
1001 }
1002 }
1003 }
1004 }
1005
1006 for (const auto& kv : remb_packets) {
1007 const LoggedRtcpPacket* const rtcp = kv.second;
1008 const rtcp::Remb* const remb = static_cast<rtcp::Remb*>(rtcp->packet.get());
1009 float x = static_cast<float>(rtcp->timestamp - begin_time_) / 1000000;
1010 float y = static_cast<float>(remb->bitrate_bps()) / 1000;
1011 remb_series.points.emplace_back(x, y);
1012 }
1013 plot->AppendTimeSeriesIfNotEmpty(std::move(remb_series));
1014
1015 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1016 plot->SetSuggestedYAxis(0, 1, "Bitrate (kbps)", kBottomMargin, kTopMargin);
1017 if (desired_direction == webrtc::PacketDirection::kIncomingPacket) {
1018 plot->SetTitle("Incoming RTP bitrate");
1019 } else if (desired_direction == webrtc::PacketDirection::kOutgoingPacket) {
1020 plot->SetTitle("Outgoing RTP bitrate");
1021 }
1022 }
1023
1024 // For each SSRC, plot the bandwidth used by that stream.
1025 void EventLogAnalyzer::CreateStreamBitrateGraph(
1026 PacketDirection desired_direction,
1027 Plot* plot) {
1028 for (auto& kv : rtp_packets_) {
1029 StreamId stream_id = kv.first;
1030 const std::vector<LoggedRtpPacket>& packet_stream = kv.second;
1031 // Filter on direction and SSRC.
1032 if (stream_id.GetDirection() != desired_direction ||
1033 !MatchingSsrc(stream_id.GetSsrc(), desired_ssrc_)) {
1034 continue;
1035 }
1036
1037 TimeSeries time_series(GetStreamName(stream_id), LINE_GRAPH);
1038 MovingAverage<LoggedRtpPacket, double>(
1039 [](const LoggedRtpPacket& packet) {
1040 return rtc::Optional<double>(packet.total_length * 8.0 / 1000.0);
1041 },
1042 packet_stream, begin_time_, end_time_, window_duration_, step_,
1043 &time_series);
1044 plot->AppendTimeSeries(std::move(time_series));
1045 }
1046
1047 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1048 plot->SetSuggestedYAxis(0, 1, "Bitrate (kbps)", kBottomMargin, kTopMargin);
1049 if (desired_direction == webrtc::PacketDirection::kIncomingPacket) {
1050 plot->SetTitle("Incoming bitrate per stream");
1051 } else if (desired_direction == webrtc::PacketDirection::kOutgoingPacket) {
1052 plot->SetTitle("Outgoing bitrate per stream");
1053 }
1054 }
1055
1056 void EventLogAnalyzer::CreateBweSimulationGraph(Plot* plot) {
1057 std::multimap<uint64_t, const LoggedRtpPacket*> outgoing_rtp;
1058 std::multimap<uint64_t, const LoggedRtcpPacket*> incoming_rtcp;
1059
1060 for (const auto& kv : rtp_packets_) {
1061 if (kv.first.GetDirection() == PacketDirection::kOutgoingPacket) {
1062 for (const LoggedRtpPacket& rtp_packet : kv.second)
1063 outgoing_rtp.insert(std::make_pair(rtp_packet.timestamp, &rtp_packet));
1064 }
1065 }
1066
1067 for (const auto& kv : rtcp_packets_) {
1068 if (kv.first.GetDirection() == PacketDirection::kIncomingPacket) {
1069 for (const LoggedRtcpPacket& rtcp_packet : kv.second)
1070 incoming_rtcp.insert(
1071 std::make_pair(rtcp_packet.timestamp, &rtcp_packet));
1072 }
1073 }
1074
1075 SimulatedClock clock(0);
1076 BitrateObserver observer;
1077 RtcEventLogNullImpl null_event_log;
1078 PacketRouter packet_router;
1079 CongestionController cc(&clock, &observer, &observer, &null_event_log,
1080 &packet_router);
1081 // TODO(holmer): Log the call config and use that here instead.
1082 static const uint32_t kDefaultStartBitrateBps = 300000;
1083 cc.SetBweBitrates(0, kDefaultStartBitrateBps, -1);
1084
1085 TimeSeries time_series("Delay-based estimate", LINE_DOT_GRAPH);
1086 TimeSeries acked_time_series("Acked bitrate", LINE_DOT_GRAPH);
1087
1088 auto rtp_iterator = outgoing_rtp.begin();
1089 auto rtcp_iterator = incoming_rtcp.begin();
1090
1091 auto NextRtpTime = [&]() {
1092 if (rtp_iterator != outgoing_rtp.end())
1093 return static_cast<int64_t>(rtp_iterator->first);
1094 return std::numeric_limits<int64_t>::max();
1095 };
1096
1097 auto NextRtcpTime = [&]() {
1098 if (rtcp_iterator != incoming_rtcp.end())
1099 return static_cast<int64_t>(rtcp_iterator->first);
1100 return std::numeric_limits<int64_t>::max();
1101 };
1102
1103 auto NextProcessTime = [&]() {
1104 if (rtcp_iterator != incoming_rtcp.end() ||
1105 rtp_iterator != outgoing_rtp.end()) {
1106 return clock.TimeInMicroseconds() +
1107 std::max<int64_t>(cc.TimeUntilNextProcess() * 1000, 0);
1108 }
1109 return std::numeric_limits<int64_t>::max();
1110 };
1111
1112 RateStatistics acked_bitrate(250, 8000);
1113
1114 int64_t time_us = std::min(NextRtpTime(), NextRtcpTime());
1115 int64_t last_update_us = 0;
1116 while (time_us != std::numeric_limits<int64_t>::max()) {
1117 clock.AdvanceTimeMicroseconds(time_us - clock.TimeInMicroseconds());
1118 if (clock.TimeInMicroseconds() >= NextRtcpTime()) {
1119 RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtcpTime());
1120 const LoggedRtcpPacket& rtcp = *rtcp_iterator->second;
1121 if (rtcp.type == kRtcpTransportFeedback) {
1122 cc.OnTransportFeedback(
1123 *static_cast<rtcp::TransportFeedback*>(rtcp.packet.get()));
1124 std::vector<PacketFeedback> feedback = cc.GetTransportFeedbackVector();
1125 SortPacketFeedbackVector(&feedback);
1126 rtc::Optional<uint32_t> bitrate_bps;
1127 if (!feedback.empty()) {
1128 for (const PacketFeedback& packet : feedback)
1129 acked_bitrate.Update(packet.payload_size, packet.arrival_time_ms);
1130 bitrate_bps = acked_bitrate.Rate(feedback.back().arrival_time_ms);
1131 }
1132 uint32_t y = 0;
1133 if (bitrate_bps)
1134 y = *bitrate_bps / 1000;
1135 float x = static_cast<float>(clock.TimeInMicroseconds() - begin_time_) /
1136 1000000;
1137 acked_time_series.points.emplace_back(x, y);
1138 }
1139 ++rtcp_iterator;
1140 }
1141 if (clock.TimeInMicroseconds() >= NextRtpTime()) {
1142 RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime());
1143 const LoggedRtpPacket& rtp = *rtp_iterator->second;
1144 if (rtp.header.extension.hasTransportSequenceNumber) {
1145 RTC_DCHECK(rtp.header.extension.hasTransportSequenceNumber);
1146 cc.AddPacket(rtp.header.ssrc,
1147 rtp.header.extension.transportSequenceNumber,
1148 rtp.total_length, PacedPacketInfo());
1149 rtc::SentPacket sent_packet(
1150 rtp.header.extension.transportSequenceNumber, rtp.timestamp / 1000);
1151 cc.OnSentPacket(sent_packet);
1152 }
1153 ++rtp_iterator;
1154 }
1155 if (clock.TimeInMicroseconds() >= NextProcessTime()) {
1156 RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextProcessTime());
1157 cc.Process();
1158 }
1159 if (observer.GetAndResetBitrateUpdated() ||
1160 time_us - last_update_us >= 1e6) {
1161 uint32_t y = observer.last_bitrate_bps() / 1000;
1162 float x = static_cast<float>(clock.TimeInMicroseconds() - begin_time_) /
1163 1000000;
1164 time_series.points.emplace_back(x, y);
1165 last_update_us = time_us;
1166 }
1167 time_us = std::min({NextRtpTime(), NextRtcpTime(), NextProcessTime()});
1168 }
1169 // Add the data set to the plot.
1170 plot->AppendTimeSeries(std::move(time_series));
1171 plot->AppendTimeSeries(std::move(acked_time_series));
1172
1173 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1174 plot->SetSuggestedYAxis(0, 10, "Bitrate (kbps)", kBottomMargin, kTopMargin);
1175 plot->SetTitle("Simulated BWE behavior");
1176 }
1177
1178 void EventLogAnalyzer::CreateNetworkDelayFeedbackGraph(Plot* plot) {
1179 std::multimap<uint64_t, const LoggedRtpPacket*> outgoing_rtp;
1180 std::multimap<uint64_t, const LoggedRtcpPacket*> incoming_rtcp;
1181
1182 for (const auto& kv : rtp_packets_) {
1183 if (kv.first.GetDirection() == PacketDirection::kOutgoingPacket) {
1184 for (const LoggedRtpPacket& rtp_packet : kv.second)
1185 outgoing_rtp.insert(std::make_pair(rtp_packet.timestamp, &rtp_packet));
1186 }
1187 }
1188
1189 for (const auto& kv : rtcp_packets_) {
1190 if (kv.first.GetDirection() == PacketDirection::kIncomingPacket) {
1191 for (const LoggedRtcpPacket& rtcp_packet : kv.second)
1192 incoming_rtcp.insert(
1193 std::make_pair(rtcp_packet.timestamp, &rtcp_packet));
1194 }
1195 }
1196
1197 SimulatedClock clock(0);
1198 TransportFeedbackAdapter feedback_adapter(&clock);
1199
1200 TimeSeries time_series("Network Delay Change", LINE_DOT_GRAPH);
1201 int64_t estimated_base_delay_ms = std::numeric_limits<int64_t>::max();
1202
1203 auto rtp_iterator = outgoing_rtp.begin();
1204 auto rtcp_iterator = incoming_rtcp.begin();
1205
1206 auto NextRtpTime = [&]() {
1207 if (rtp_iterator != outgoing_rtp.end())
1208 return static_cast<int64_t>(rtp_iterator->first);
1209 return std::numeric_limits<int64_t>::max();
1210 };
1211
1212 auto NextRtcpTime = [&]() {
1213 if (rtcp_iterator != incoming_rtcp.end())
1214 return static_cast<int64_t>(rtcp_iterator->first);
1215 return std::numeric_limits<int64_t>::max();
1216 };
1217
1218 int64_t time_us = std::min(NextRtpTime(), NextRtcpTime());
1219 while (time_us != std::numeric_limits<int64_t>::max()) {
1220 clock.AdvanceTimeMicroseconds(time_us - clock.TimeInMicroseconds());
1221 if (clock.TimeInMicroseconds() >= NextRtcpTime()) {
1222 RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtcpTime());
1223 const LoggedRtcpPacket& rtcp = *rtcp_iterator->second;
1224 if (rtcp.type == kRtcpTransportFeedback) {
1225 feedback_adapter.OnTransportFeedback(
1226 *static_cast<rtcp::TransportFeedback*>(rtcp.packet.get()));
1227 std::vector<PacketFeedback> feedback =
1228 feedback_adapter.GetTransportFeedbackVector();
1229 SortPacketFeedbackVector(&feedback);
1230 for (const PacketFeedback& packet : feedback) {
1231 int64_t y = packet.arrival_time_ms - packet.send_time_ms;
1232 float x =
1233 static_cast<float>(clock.TimeInMicroseconds() - begin_time_) /
1234 1000000;
1235 estimated_base_delay_ms = std::min(y, estimated_base_delay_ms);
1236 time_series.points.emplace_back(x, y);
1237 }
1238 }
1239 ++rtcp_iterator;
1240 }
1241 if (clock.TimeInMicroseconds() >= NextRtpTime()) {
1242 RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime());
1243 const LoggedRtpPacket& rtp = *rtp_iterator->second;
1244 if (rtp.header.extension.hasTransportSequenceNumber) {
1245 RTC_DCHECK(rtp.header.extension.hasTransportSequenceNumber);
1246 feedback_adapter.AddPacket(rtp.header.ssrc,
1247 rtp.header.extension.transportSequenceNumber,
1248 rtp.total_length, PacedPacketInfo());
1249 feedback_adapter.OnSentPacket(
1250 rtp.header.extension.transportSequenceNumber, rtp.timestamp / 1000);
1251 }
1252 ++rtp_iterator;
1253 }
1254 time_us = std::min(NextRtpTime(), NextRtcpTime());
1255 }
1256 // We assume that the base network delay (w/o queues) is the min delay
1257 // observed during the call.
1258 for (TimeSeriesPoint& point : time_series.points)
1259 point.y -= estimated_base_delay_ms;
1260 // Add the data set to the plot.
1261 plot->AppendTimeSeries(std::move(time_series));
1262
1263 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1264 plot->SetSuggestedYAxis(0, 10, "Delay (ms)", kBottomMargin, kTopMargin);
1265 plot->SetTitle("Network Delay Change.");
1266 }
1267
1268 std::vector<std::pair<int64_t, int64_t>> EventLogAnalyzer::GetFrameTimestamps()
1269 const {
1270 std::vector<std::pair<int64_t, int64_t>> timestamps;
1271 size_t largest_stream_size = 0;
1272 const std::vector<LoggedRtpPacket>* largest_video_stream = nullptr;
1273 // Find the incoming video stream with the most number of packets that is
1274 // not rtx.
1275 for (const auto& kv : rtp_packets_) {
1276 if (kv.first.GetDirection() == kIncomingPacket &&
1277 video_ssrcs_.find(kv.first) != video_ssrcs_.end() &&
1278 rtx_ssrcs_.find(kv.first) == rtx_ssrcs_.end() &&
1279 kv.second.size() > largest_stream_size) {
1280 largest_stream_size = kv.second.size();
1281 largest_video_stream = &kv.second;
1282 }
1283 }
1284 if (largest_video_stream == nullptr) {
1285 for (auto& packet : *largest_video_stream) {
1286 if (packet.header.markerBit) {
1287 int64_t capture_ms = packet.header.timestamp / 90.0;
1288 int64_t arrival_ms = packet.timestamp / 1000.0;
1289 timestamps.push_back(std::make_pair(capture_ms, arrival_ms));
1290 }
1291 }
1292 }
1293 return timestamps;
1294 }
1295
1296 void EventLogAnalyzer::CreateTimestampGraph(Plot* plot) {
1297 for (const auto& kv : rtp_packets_) {
1298 const std::vector<LoggedRtpPacket>& rtp_packets = kv.second;
1299 StreamId stream_id = kv.first;
1300
1301 {
1302 TimeSeries timestamp_data(GetStreamName(stream_id) + " capture-time",
1303 LINE_DOT_GRAPH);
1304 for (LoggedRtpPacket packet : rtp_packets) {
1305 float x = static_cast<float>(packet.timestamp - begin_time_) / 1000000;
1306 float y = packet.header.timestamp;
1307 timestamp_data.points.emplace_back(x, y);
1308 }
1309 plot->AppendTimeSeries(std::move(timestamp_data));
1310 }
1311
1312 {
1313 auto kv = rtcp_packets_.find(stream_id);
1314 if (kv != rtcp_packets_.end()) {
1315 const auto& packets = kv->second;
1316 TimeSeries timestamp_data(
1317 GetStreamName(stream_id) + " rtcp capture-time", LINE_DOT_GRAPH);
1318 for (const LoggedRtcpPacket& rtcp : packets) {
1319 if (rtcp.type != kRtcpSr)
1320 continue;
1321 rtcp::SenderReport* sr;
1322 sr = static_cast<rtcp::SenderReport*>(rtcp.packet.get());
1323 float x = static_cast<float>(rtcp.timestamp - begin_time_) / 1000000;
1324 float y = sr->rtp_timestamp();
1325 timestamp_data.points.emplace_back(x, y);
1326 }
1327 plot->AppendTimeSeries(std::move(timestamp_data));
1328 }
1329 }
1330 }
1331
1332 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1333 plot->SetSuggestedYAxis(0, 1, "Timestamp (90khz)", kBottomMargin, kTopMargin);
1334 plot->SetTitle("Timestamps");
1335 }
1336
1337 void EventLogAnalyzer::CreateAudioEncoderTargetBitrateGraph(Plot* plot) {
1338 TimeSeries time_series("Audio encoder target bitrate", LINE_DOT_GRAPH);
1339 ProcessPoints<AudioNetworkAdaptationEvent>(
1340 [](const AudioNetworkAdaptationEvent& ana_event) -> rtc::Optional<float> {
1341 if (ana_event.config.bitrate_bps)
1342 return rtc::Optional<float>(
1343 static_cast<float>(*ana_event.config.bitrate_bps));
1344 return rtc::Optional<float>();
1345 },
1346 audio_network_adaptation_events_, begin_time_, &time_series);
1347 plot->AppendTimeSeries(std::move(time_series));
1348 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1349 plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
1350 plot->SetTitle("Reported audio encoder target bitrate");
1351 }
1352
1353 void EventLogAnalyzer::CreateAudioEncoderFrameLengthGraph(Plot* plot) {
1354 TimeSeries time_series("Audio encoder frame length", LINE_DOT_GRAPH);
1355 ProcessPoints<AudioNetworkAdaptationEvent>(
1356 [](const AudioNetworkAdaptationEvent& ana_event) {
1357 if (ana_event.config.frame_length_ms)
1358 return rtc::Optional<float>(
1359 static_cast<float>(*ana_event.config.frame_length_ms));
1360 return rtc::Optional<float>();
1361 },
1362 audio_network_adaptation_events_, begin_time_, &time_series);
1363 plot->AppendTimeSeries(std::move(time_series));
1364 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1365 plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
1366 plot->SetTitle("Reported audio encoder frame length");
1367 }
1368
1369 void EventLogAnalyzer::CreateAudioEncoderUplinkPacketLossFractionGraph(
1370 Plot* plot) {
1371 TimeSeries time_series("Audio encoder uplink packet loss fraction",
1372 LINE_DOT_GRAPH);
1373 ProcessPoints<AudioNetworkAdaptationEvent>(
1374 [](const AudioNetworkAdaptationEvent& ana_event) {
1375 if (ana_event.config.uplink_packet_loss_fraction)
1376 return rtc::Optional<float>(static_cast<float>(
1377 *ana_event.config.uplink_packet_loss_fraction));
1378 return rtc::Optional<float>();
1379 },
1380 audio_network_adaptation_events_, begin_time_, &time_series);
1381 plot->AppendTimeSeries(std::move(time_series));
1382 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1383 plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
1384 kTopMargin);
1385 plot->SetTitle("Reported audio encoder lost packets");
1386 }
1387
1388 void EventLogAnalyzer::CreateAudioEncoderEnableFecGraph(Plot* plot) {
1389 TimeSeries time_series("Audio encoder FEC", LINE_DOT_GRAPH);
1390 ProcessPoints<AudioNetworkAdaptationEvent>(
1391 [](const AudioNetworkAdaptationEvent& ana_event) {
1392 if (ana_event.config.enable_fec)
1393 return rtc::Optional<float>(
1394 static_cast<float>(*ana_event.config.enable_fec));
1395 return rtc::Optional<float>();
1396 },
1397 audio_network_adaptation_events_, begin_time_, &time_series);
1398 plot->AppendTimeSeries(std::move(time_series));
1399 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1400 plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
1401 plot->SetTitle("Reported audio encoder FEC");
1402 }
1403
1404 void EventLogAnalyzer::CreateAudioEncoderEnableDtxGraph(Plot* plot) {
1405 TimeSeries time_series("Audio encoder DTX", LINE_DOT_GRAPH);
1406 ProcessPoints<AudioNetworkAdaptationEvent>(
1407 [](const AudioNetworkAdaptationEvent& ana_event) {
1408 if (ana_event.config.enable_dtx)
1409 return rtc::Optional<float>(
1410 static_cast<float>(*ana_event.config.enable_dtx));
1411 return rtc::Optional<float>();
1412 },
1413 audio_network_adaptation_events_, begin_time_, &time_series);
1414 plot->AppendTimeSeries(std::move(time_series));
1415 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1416 plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
1417 plot->SetTitle("Reported audio encoder DTX");
1418 }
1419
1420 void EventLogAnalyzer::CreateAudioEncoderNumChannelsGraph(Plot* plot) {
1421 TimeSeries time_series("Audio encoder number of channels", LINE_DOT_GRAPH);
1422 ProcessPoints<AudioNetworkAdaptationEvent>(
1423 [](const AudioNetworkAdaptationEvent& ana_event) {
1424 if (ana_event.config.num_channels)
1425 return rtc::Optional<float>(
1426 static_cast<float>(*ana_event.config.num_channels));
1427 return rtc::Optional<float>();
1428 },
1429 audio_network_adaptation_events_, begin_time_, &time_series);
1430 plot->AppendTimeSeries(std::move(time_series));
1431 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1432 plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
1433 kBottomMargin, kTopMargin);
1434 plot->SetTitle("Reported audio encoder number of channels");
1435 }
1436
1437 class NetEqStreamInput : public test::NetEqInput {
1438 public:
1439 // Does not take any ownership, and all pointers must refer to valid objects
1440 // that outlive the one constructed.
1441 NetEqStreamInput(const std::vector<LoggedRtpPacket>* packet_stream,
1442 const std::vector<uint64_t>* output_events_us,
1443 rtc::Optional<uint64_t> end_time_us)
1444 : packet_stream_(*packet_stream),
1445 packet_stream_it_(packet_stream_.begin()),
1446 output_events_us_it_(output_events_us->begin()),
1447 output_events_us_end_(output_events_us->end()),
1448 end_time_us_(end_time_us) {
1449 RTC_DCHECK(packet_stream);
1450 RTC_DCHECK(output_events_us);
1451 }
1452
1453 rtc::Optional<int64_t> NextPacketTime() const override {
1454 if (packet_stream_it_ == packet_stream_.end()) {
1455 return rtc::Optional<int64_t>();
1456 }
1457 if (end_time_us_ && packet_stream_it_->timestamp > *end_time_us_) {
1458 return rtc::Optional<int64_t>();
1459 }
1460 // Convert from us to ms.
1461 return rtc::Optional<int64_t>(packet_stream_it_->timestamp / 1000);
1462 }
1463
1464 rtc::Optional<int64_t> NextOutputEventTime() const override {
1465 if (output_events_us_it_ == output_events_us_end_) {
1466 return rtc::Optional<int64_t>();
1467 }
1468 if (end_time_us_ && *output_events_us_it_ > *end_time_us_) {
1469 return rtc::Optional<int64_t>();
1470 }
1471 // Convert from us to ms.
1472 return rtc::Optional<int64_t>(
1473 rtc::checked_cast<int64_t>(*output_events_us_it_ / 1000));
1474 }
1475
1476 std::unique_ptr<PacketData> PopPacket() override {
1477 if (packet_stream_it_ == packet_stream_.end()) {
1478 return std::unique_ptr<PacketData>();
1479 }
1480 std::unique_ptr<PacketData> packet_data(new PacketData());
1481 packet_data->header = packet_stream_it_->header;
1482 // Convert from us to ms.
1483 packet_data->time_ms = packet_stream_it_->timestamp / 1000.0;
1484
1485 // This is a header-only "dummy" packet. Set the payload to all zeros, with
1486 // length according to the virtual length.
1487 packet_data->payload.SetSize(packet_stream_it_->total_length);
1488 std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
1489
1490 ++packet_stream_it_;
1491 return packet_data;
1492 }
1493
1494 void AdvanceOutputEvent() override {
1495 if (output_events_us_it_ != output_events_us_end_) {
1496 ++output_events_us_it_;
1497 }
1498 }
1499
1500 bool ended() const override { return !NextEventTime(); }
1501
1502 rtc::Optional<RTPHeader> NextHeader() const override {
1503 if (packet_stream_it_ == packet_stream_.end()) {
1504 return rtc::Optional<RTPHeader>();
1505 }
1506 return rtc::Optional<RTPHeader>(packet_stream_it_->header);
1507 }
1508
1509 private:
1510 const std::vector<LoggedRtpPacket>& packet_stream_;
1511 std::vector<LoggedRtpPacket>::const_iterator packet_stream_it_;
1512 std::vector<uint64_t>::const_iterator output_events_us_it_;
1513 const std::vector<uint64_t>::const_iterator output_events_us_end_;
1514 const rtc::Optional<uint64_t> end_time_us_;
1515 };
1516
1517 namespace {
1518 // Creates a NetEq test object and all necessary input and output helpers. Runs
1519 // the test and returns the NetEqDelayAnalyzer object that was used to
1520 // instrument the test.
1521 std::unique_ptr<test::NetEqDelayAnalyzer> CreateNetEqTestAndRun(
1522 const std::vector<LoggedRtpPacket>* packet_stream,
1523 const std::vector<uint64_t>* output_events_us,
1524 rtc::Optional<uint64_t> end_time_us,
1525 const std::string& replacement_file_name,
1526 int file_sample_rate_hz) {
1527 std::unique_ptr<test::NetEqInput> input(
1528 new NetEqStreamInput(packet_stream, output_events_us, end_time_us));
1529
1530 constexpr int kReplacementPt = 127;
1531 std::set<uint8_t> cn_types;
1532 std::set<uint8_t> forbidden_types;
1533 input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
1534 cn_types, forbidden_types));
1535
1536 NetEq::Config config;
1537 config.max_packets_in_buffer = 200;
1538 config.enable_fast_accelerate = true;
1539
1540 std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
1541
1542 test::NetEqTest::DecoderMap codecs;
1543
1544 // Create a "replacement decoder" that produces the decoded audio by reading
1545 // from a file rather than from the encoded payloads.
1546 std::unique_ptr<test::ResampleInputAudioFile> replacement_file(
1547 new test::ResampleInputAudioFile(replacement_file_name,
1548 file_sample_rate_hz));
1549 replacement_file->set_output_rate_hz(48000);
1550 std::unique_ptr<AudioDecoder> replacement_decoder(
1551 new test::FakeDecodeFromFile(std::move(replacement_file), 48000, false));
1552 test::NetEqTest::ExtDecoderMap ext_codecs;
1553 ext_codecs[kReplacementPt] = {replacement_decoder.get(),
1554 NetEqDecoder::kDecoderArbitrary,
1555 "replacement codec"};
1556
1557 std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
1558 new test::NetEqDelayAnalyzer);
1559 test::DefaultNetEqTestErrorCallback error_cb;
1560 test::NetEqTest::Callbacks callbacks;
1561 callbacks.error_callback = &error_cb;
1562 callbacks.post_insert_packet = delay_cb.get();
1563 callbacks.get_audio_callback = delay_cb.get();
1564
1565 test::NetEqTest test(config, codecs, ext_codecs, std::move(input),
1566 std::move(output), callbacks);
1567 test.Run();
1568 return delay_cb;
1569 }
1570 } // namespace
1571
1572 // Plots the jitter buffer delay profile. This will plot only for the first
1573 // incoming audio SSRC. If the stream contains more than one incoming audio
1574 // SSRC, all but the first will be ignored.
1575 void EventLogAnalyzer::CreateAudioJitterBufferGraph(
1576 const std::string& replacement_file_name,
1577 int file_sample_rate_hz,
1578 Plot* plot) {
1579 const auto& incoming_audio_kv = std::find_if(
1580 rtp_packets_.begin(), rtp_packets_.end(),
1581 [this](std::pair<StreamId, std::vector<LoggedRtpPacket>> kv) {
1582 return kv.first.GetDirection() == kIncomingPacket &&
1583 this->IsAudioSsrc(kv.first);
1584 });
1585 if (incoming_audio_kv == rtp_packets_.end()) {
1586 // No incoming audio stream found.
1587 return;
1588 }
1589
1590 const uint32_t ssrc = incoming_audio_kv->first.GetSsrc();
1591
1592 std::map<uint32_t, std::vector<uint64_t>>::const_iterator output_events_it =
1593 audio_playout_events_.find(ssrc);
1594 if (output_events_it == audio_playout_events_.end()) {
1595 // Could not find output events with SSRC matching the input audio stream.
1596 // Using the first available stream of output events.
1597 output_events_it = audio_playout_events_.cbegin();
1598 }
1599
1600 rtc::Optional<uint64_t> end_time_us =
1601 log_segments_.empty()
1602 ? rtc::Optional<uint64_t>()
1603 : rtc::Optional<uint64_t>(log_segments_.front().second);
1604
1605 auto delay_cb = CreateNetEqTestAndRun(
1606 &incoming_audio_kv->second, &output_events_it->second, end_time_us,
1607 replacement_file_name, file_sample_rate_hz);
1608
1609 std::vector<float> send_times_s;
1610 std::vector<float> arrival_delay_ms;
1611 std::vector<float> corrected_arrival_delay_ms;
1612 std::vector<rtc::Optional<float>> playout_delay_ms;
1613 std::vector<rtc::Optional<float>> target_delay_ms;
1614 delay_cb->CreateGraphs(&send_times_s, &arrival_delay_ms,
1615 &corrected_arrival_delay_ms, &playout_delay_ms,
1616 &target_delay_ms);
1617 RTC_DCHECK_EQ(send_times_s.size(), arrival_delay_ms.size());
1618 RTC_DCHECK_EQ(send_times_s.size(), corrected_arrival_delay_ms.size());
1619 RTC_DCHECK_EQ(send_times_s.size(), playout_delay_ms.size());
1620 RTC_DCHECK_EQ(send_times_s.size(), target_delay_ms.size());
1621
1622 std::map<StreamId, TimeSeries> time_series_packet_arrival;
1623 std::map<StreamId, TimeSeries> time_series_relative_packet_arrival;
1624 std::map<StreamId, TimeSeries> time_series_play_time;
1625 std::map<StreamId, TimeSeries> time_series_target_time;
1626 float min_y_axis = 0.f;
1627 float max_y_axis = 0.f;
1628 const StreamId stream_id = incoming_audio_kv->first;
1629 for (size_t i = 0; i < send_times_s.size(); ++i) {
1630 time_series_packet_arrival[stream_id].points.emplace_back(
1631 TimeSeriesPoint(send_times_s[i], arrival_delay_ms[i]));
1632 time_series_relative_packet_arrival[stream_id].points.emplace_back(
1633 TimeSeriesPoint(send_times_s[i], corrected_arrival_delay_ms[i]));
1634 min_y_axis = std::min(min_y_axis, corrected_arrival_delay_ms[i]);
1635 max_y_axis = std::max(max_y_axis, corrected_arrival_delay_ms[i]);
1636 if (playout_delay_ms[i]) {
1637 time_series_play_time[stream_id].points.emplace_back(
1638 TimeSeriesPoint(send_times_s[i], *playout_delay_ms[i]));
1639 min_y_axis = std::min(min_y_axis, *playout_delay_ms[i]);
1640 max_y_axis = std::max(max_y_axis, *playout_delay_ms[i]);
1641 }
1642 if (target_delay_ms[i]) {
1643 time_series_target_time[stream_id].points.emplace_back(
1644 TimeSeriesPoint(send_times_s[i], *target_delay_ms[i]));
1645 min_y_axis = std::min(min_y_axis, *target_delay_ms[i]);
1646 max_y_axis = std::max(max_y_axis, *target_delay_ms[i]);
1647 }
1648 }
1649
1650 // This code is adapted for a single stream. The creation of the streams above
1651 // guarantee that no more than one steam is included. If multiple streams are
1652 // to be plotted, they should likely be given distinct labels below.
1653 RTC_DCHECK_EQ(time_series_relative_packet_arrival.size(), 1);
1654 for (auto& series : time_series_relative_packet_arrival) {
1655 series.second.label = "Relative packet arrival delay";
1656 series.second.style = LINE_GRAPH;
1657 plot->AppendTimeSeries(std::move(series.second));
1658 }
1659 RTC_DCHECK_EQ(time_series_play_time.size(), 1);
1660 for (auto& series : time_series_play_time) {
1661 series.second.label = "Playout delay";
1662 series.second.style = LINE_GRAPH;
1663 plot->AppendTimeSeries(std::move(series.second));
1664 }
1665 RTC_DCHECK_EQ(time_series_target_time.size(), 1);
1666 for (auto& series : time_series_target_time) {
1667 series.second.label = "Target delay";
1668 series.second.style = LINE_DOT_GRAPH;
1669 plot->AppendTimeSeries(std::move(series.second));
1670 }
1671
1672 plot->SetXAxis(0, call_duration_s_, "Time (s)", kLeftMargin, kRightMargin);
1673 plot->SetYAxis(min_y_axis, max_y_axis, "Relative delay (ms)", kBottomMargin,
1674 kTopMargin);
1675 plot->SetTitle("NetEq timing");
1676 }
1677 } // namespace plotting
1678 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/tools/event_log_visualizer/analyzer.h ('k') | webrtc/tools/event_log_visualizer/chart.proto » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698