Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(242)

Side by Side Diff: webrtc/tools/py_event_log_analyzer/rtp_analyzer.py

Issue 2812273002: Fix lint errors to enable stricter PyLint rules (Closed)
Patch Set: Rebased Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. 1 # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2 # 2 #
3 # Use of this source code is governed by a BSD-style license 3 # Use of this source code is governed by a BSD-style license
4 # that can be found in the LICENSE file in the root of the source 4 # that can be found in the LICENSE file in the root of the source
5 # tree. An additional intellectual property rights grant can be found 5 # tree. An additional intellectual property rights grant can be found
6 # in the file PATENTS. All contributing project authors may 6 # in the file PATENTS. All contributing project authors may
7 # be found in the AUTHORS file in the root of the source tree. 7 # be found in the AUTHORS file in the root of the source tree.
8 8
9 """Displays statistics and plots graphs from RTC protobuf dump.""" 9 """Displays statistics and plots graphs from RTC protobuf dump."""
10 10
(...skipping 24 matching lines...) Expand all
35 Computes percentages of number of packets and packet sizes by 35 Computes percentages of number of packets and packet sizes by
36 SSRC. 36 SSRC.
37 37
38 Args: 38 Args:
39 data_points: list of pb_parse.DataPoints on which statistics are 39 data_points: list of pb_parse.DataPoints on which statistics are
40 calculated. 40 calculated.
41 41
42 """ 42 """
43 43
44 self.data_points = data_points 44 self.data_points = data_points
45 self.ssrc_frequencies = misc.normalize_counter( 45 self.ssrc_frequencies = misc.NormalizeCounter(
46 collections.Counter([pt.ssrc for pt in self.data_points])) 46 collections.Counter([pt.ssrc for pt in self.data_points]))
47 self.ssrc_size_table = misc.ssrc_normalized_size_table(self.data_points) 47 self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points)
48 self.bandwidth_kbps = None 48 self.bandwidth_kbps = None
49 self.smooth_bw_kbps = None 49 self.smooth_bw_kbps = None
50 50
51 def print_header_statistics(self): 51 def PrintHeaderStatistics(self):
52 print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( 52 print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
53 "SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC")) 53 "SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC"))
54 for point in self.data_points: 54 for point in self.data_points:
55 print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( 55 print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
56 point.sequence_number, point.timestamp, 56 point.sequence_number, point.timestamp,
57 int(point.arrival_timestamp_ms), point.size, point.payload_type, 57 int(point.arrival_timestamp_ms), point.size, point.payload_type,
58 point.marker_bit, "0x{:x}".format(point.ssrc))) 58 point.marker_bit, "0x{:x}".format(point.ssrc)))
59 59
60 def print_ssrc_info(self, ssrc_id, ssrc): 60 def PrintSsrcInfo(self, ssrc_id, ssrc):
61 """Prints packet and size statistics for a given SSRC. 61 """Prints packet and size statistics for a given SSRC.
62 62
63 Args: 63 Args:
64 ssrc_id: textual identifier of SSRC printed beside statistics for it. 64 ssrc_id: textual identifier of SSRC printed beside statistics for it.
65 ssrc: SSRC by which to filter data and display statistics 65 ssrc: SSRC by which to filter data and display statistics
66 """ 66 """
67 filtered_ssrc = [point for point in self.data_points if point.ssrc 67 filtered_ssrc = [point for point in self.data_points if point.ssrc
68 == ssrc] 68 == ssrc]
69 payloads = misc.normalize_counter( 69 payloads = misc.NormalizeCounter(
70 collections.Counter([point.payload_type for point in 70 collections.Counter([point.payload_type for point in
71 filtered_ssrc])) 71 filtered_ssrc]))
72 72
73 payload_info = "payload type(s): {}".format( 73 payload_info = "payload type(s): {}".format(
74 ", ".join(str(payload) for payload in payloads)) 74 ", ".join(str(payload) for payload in payloads))
75 print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format( 75 print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format(
76 ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100, 76 ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100,
77 self.ssrc_size_table[ssrc] * 100)) 77 self.ssrc_size_table[ssrc] * 100))
78 print(" packet sizes:") 78 print(" packet sizes:")
79 (bin_counts, bin_bounds) = numpy.histogram([point.size for point in 79 (bin_counts, bin_bounds) = numpy.histogram([point.size for point in
80 filtered_ssrc], bins=5, 80 filtered_ssrc], bins=5,
81 density=False) 81 density=False)
82 bin_proportions = bin_counts / sum(bin_counts) 82 bin_proportions = bin_counts / sum(bin_counts)
83 print("\n".join([ 83 print("\n".join([
84 " {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], bin_bounds[i + 1], 84 " {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], bin_bounds[i + 1],
85 bin_proportions[i] * 100) 85 bin_proportions[i] * 100)
86 for i in range(len(bin_proportions)) 86 for i in range(len(bin_proportions))
87 ])) 87 ]))
88 88
89 def choose_ssrc(self): 89 def ChooseSsrc(self):
90 """Queries user for SSRC.""" 90 """Queries user for SSRC."""
91 91
92 if len(self.ssrc_frequencies) == 1: 92 if len(self.ssrc_frequencies) == 1:
93 chosen_ssrc = self.ssrc_frequencies[0][-1] 93 chosen_ssrc = self.ssrc_frequencies[0][-1]
94 self.print_ssrc_info("", chosen_ssrc) 94 self.PrintSsrcInfo("", chosen_ssrc)
95 return chosen_ssrc 95 return chosen_ssrc
96 96
97 ssrc_is_incoming = misc.ssrc_directions(self.data_points) 97 ssrc_is_incoming = misc.SsrcDirections(self.data_points)
98 incoming = [ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]] 98 incoming = [ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]]
99 outgoing = [ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]] 99 outgoing = [ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]]
100 100
101 print("\nIncoming:\n") 101 print("\nIncoming:\n")
102 for (i, ssrc) in enumerate(incoming): 102 for (i, ssrc) in enumerate(incoming):
103 self.print_ssrc_info(i, ssrc) 103 self.PrintSsrcInfo(i, ssrc)
104 104
105 print("\nOutgoing:\n") 105 print("\nOutgoing:\n")
106 for (i, ssrc) in enumerate(outgoing): 106 for (i, ssrc) in enumerate(outgoing):
107 self.print_ssrc_info(i + len(incoming), ssrc) 107 self.PrintSsrcInfo(i + len(incoming), ssrc)
108 108
109 while True: 109 while True:
110 chosen_index = int(misc.get_input("choose one> ")) 110 chosen_index = int(misc.get_input("choose one> "))
111 if 0 <= chosen_index < len(self.ssrc_frequencies): 111 if 0 <= chosen_index < len(self.ssrc_frequencies):
112 return (incoming + outgoing)[chosen_index] 112 return (incoming + outgoing)[chosen_index]
113 else: 113 else:
114 print("Invalid index!") 114 print("Invalid index!")
115 115
116 def filter_ssrc(self, chosen_ssrc): 116 def FilterSsrc(self, chosen_ssrc):
117 """Filters and wraps data points. 117 """Filters and wraps data points.
118 118
119 Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence 119 Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence
120 numbers and timestamps for the chosen selection. 120 numbers and timestamps for the chosen selection.
121 """ 121 """
122 self.data_points = [point for point in self.data_points if 122 self.data_points = [point for point in self.data_points if
123 point.ssrc == chosen_ssrc] 123 point.ssrc == chosen_ssrc]
124 unwrapped_sequence_numbers = misc.unwrap( 124 unwrapped_sequence_numbers = misc.Unwrap(
125 [point.sequence_number for point in self.data_points], 2**16 - 1) 125 [point.sequence_number for point in self.data_points], 2**16 - 1)
126 for (data_point, sequence_number) in zip(self.data_points, 126 for (data_point, sequence_number) in zip(self.data_points,
127 unwrapped_sequence_numbers): 127 unwrapped_sequence_numbers):
128 data_point.sequence_number = sequence_number 128 data_point.sequence_number = sequence_number
129 129
130 unwrapped_timestamps = misc.unwrap([point.timestamp for point in 130 unwrapped_timestamps = misc.Unwrap([point.timestamp for point in
131 self.data_points], 2**32 - 1) 131 self.data_points], 2**32 - 1)
132 132
133 for (data_point, timestamp) in zip(self.data_points, 133 for (data_point, timestamp) in zip(self.data_points,
134 unwrapped_timestamps): 134 unwrapped_timestamps):
135 data_point.timestamp = timestamp 135 data_point.timestamp = timestamp
136 136
137 def print_sequence_number_statistics(self): 137 def PrintSequenceNumberStatistics(self):
138 seq_no_set = set(point.sequence_number for point in 138 seq_no_set = set(point.sequence_number for point in
139 self.data_points) 139 self.data_points)
140 missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + ( 140 missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + (
141 1 - len(seq_no_set)) 141 1 - len(seq_no_set))
142 print("Missing sequence numbers: {} out of {} ({:.2f}%)".format( 142 print("Missing sequence numbers: {} out of {} ({:.2f}%)".format(
143 missing_sequence_numbers, 143 missing_sequence_numbers,
144 len(seq_no_set), 144 len(seq_no_set),
145 100 * missing_sequence_numbers / len(seq_no_set) 145 100 * missing_sequence_numbers / len(seq_no_set)
146 )) 146 ))
147 print("Duplicated packets: {}".format(len(self.data_points) - 147 print("Duplicated packets: {}".format(len(self.data_points) -
148 len(seq_no_set))) 148 len(seq_no_set)))
149 print("Reordered packets: {}".format( 149 print("Reordered packets: {}".format(
150 misc.count_reordered([point.sequence_number for point in 150 misc.CountReordered([point.sequence_number for point in
151 self.data_points]))) 151 self.data_points])))
152 152
153 def estimate_frequency(self, always_query_sample_rate): 153 def EstimateFrequency(self, always_query_sample_rate):
154 """Estimates frequency and updates data. 154 """Estimates frequency and updates data.
155 155
156 Guesses the most probable frequency by looking at changes in 156 Guesses the most probable frequency by looking at changes in
157 timestamps (RFC 3550 section 5.1), calculates clock drifts and 157 timestamps (RFC 3550 section 5.1), calculates clock drifts and
158 sending time of packets. Updates `self.data_points` with changes 158 sending time of packets. Updates `self.data_points` with changes
159 in delay and send time. 159 in delay and send time.
160 """ 160 """
161 delta_timestamp = (self.data_points[-1].timestamp - 161 delta_timestamp = (self.data_points[-1].timestamp -
162 self.data_points[0].timestamp) 162 self.data_points[0].timestamp)
163 delta_arr_timestamp = float((self.data_points[-1].arrival_timestamp_ms - 163 delta_arr_timestamp = float((self.data_points[-1].arrival_timestamp_ms -
(...skipping 12 matching lines...) Expand all
176 print ("Frequency could not be guessed.", end=" ") 176 print ("Frequency could not be guessed.", end=" ")
177 freq = int(misc.get_input("Input frequency (in kHz)> ")) 177 freq = int(misc.get_input("Input frequency (in kHz)> "))
178 else: 178 else:
179 print("Guessed frequency: {}kHz".format(freq)) 179 print("Guessed frequency: {}kHz".format(freq))
180 180
181 for point in self.data_points: 181 for point in self.data_points:
182 point.real_send_time_ms = (point.timestamp - 182 point.real_send_time_ms = (point.timestamp -
183 self.data_points[0].timestamp) / freq 183 self.data_points[0].timestamp) / freq
184 point.delay = point.arrival_timestamp_ms - point.real_send_time_ms 184 point.delay = point.arrival_timestamp_ms - point.real_send_time_ms
185 185
186 def print_duration_statistics(self): 186 def PrintDurationStatistics(self):
187 """Prints delay, clock drift and bitrate statistics.""" 187 """Prints delay, clock drift and bitrate statistics."""
188 188
189 min_delay = min(point.delay for point in self.data_points) 189 min_delay = min(point.delay for point in self.data_points)
190 190
191 for point in self.data_points: 191 for point in self.data_points:
192 point.absdelay = point.delay - min_delay 192 point.absdelay = point.delay - min_delay
193 193
194 stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000 194 stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000
195 print("Stream duration at sender: {:.1f} seconds".format( 195 print("Stream duration at sender: {:.1f} seconds".format(
196 stream_duration_sender 196 stream_duration_sender
(...skipping 11 matching lines...) Expand all
208 100 * (stream_duration_receiver / stream_duration_sender - 1) 208 100 * (stream_duration_receiver / stream_duration_sender - 1)
209 )) 209 ))
210 210
211 total_size = sum(point.size for point in self.data_points) * 8 / 1000 211 total_size = sum(point.size for point in self.data_points) * 8 / 1000
212 print("Send average bitrate: {:.2f} kbps".format( 212 print("Send average bitrate: {:.2f} kbps".format(
213 total_size / stream_duration_sender)) 213 total_size / stream_duration_sender))
214 214
215 print("Receive average bitrate: {:.2f} kbps".format( 215 print("Receive average bitrate: {:.2f} kbps".format(
216 total_size / stream_duration_receiver)) 216 total_size / stream_duration_receiver))
217 217
218 def remove_reordered(self): 218 def RemoveReordered(self):
219 last = self.data_points[0] 219 last = self.data_points[0]
220 data_points_ordered = [last] 220 data_points_ordered = [last]
221 for point in self.data_points[1:]: 221 for point in self.data_points[1:]:
222 if point.sequence_number > last.sequence_number and ( 222 if point.sequence_number > last.sequence_number and (
223 point.real_send_time_ms > last.real_send_time_ms): 223 point.real_send_time_ms > last.real_send_time_ms):
224 data_points_ordered.append(point) 224 data_points_ordered.append(point)
225 last = point 225 last = point
226 self.data_points = data_points_ordered 226 self.data_points = data_points_ordered
227 227
228 def compute_bandwidth(self): 228 def ComputeBandwidth(self):
229 """Computes bandwidth averaged over several consecutive packets. 229 """Computes bandwidth averaged over several consecutive packets.
230 230
231 The number of consecutive packets used in the average is 231 The number of consecutive packets used in the average is
232 BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with 232 BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with
233 numpy.correlate. 233 numpy.correlate.
234 """ 234 """
235 start_ms = self.data_points[0].real_send_time_ms 235 start_ms = self.data_points[0].real_send_time_ms
236 stop_ms = self.data_points[-1].real_send_time_ms 236 stop_ms = self.data_points[-1].real_send_time_ms
237 (self.bandwidth_kbps, _) = numpy.histogram( 237 (self.bandwidth_kbps, _) = numpy.histogram(
238 [point.real_send_time_ms for point in self.data_points], 238 [point.real_send_time_ms for point in self.data_points],
239 bins=numpy.arange(start_ms, stop_ms, 239 bins=numpy.arange(start_ms, stop_ms,
240 RTPStatistics.PLOT_RESOLUTION_MS), 240 RTPStatistics.PLOT_RESOLUTION_MS),
241 weights=[point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS 241 weights=[point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS
242 for point in self.data_points] 242 for point in self.data_points]
243 ) 243 )
244 correlate_filter = (numpy.ones( 244 correlate_filter = (numpy.ones(
245 RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) / 245 RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) /
246 RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) 246 RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE)
247 self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter) 247 self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter)
248 248
249 def plot_statistics(self): 249 def PlotStatistics(self):
250 """Plots changes in delay and average bandwidth.""" 250 """Plots changes in delay and average bandwidth."""
251 251
252 start_ms = self.data_points[0].real_send_time_ms 252 start_ms = self.data_points[0].real_send_time_ms
253 stop_ms = self.data_points[-1].real_send_time_ms 253 stop_ms = self.data_points[-1].real_send_time_ms
254 time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000, 254 time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000,
255 RTPStatistics.PLOT_RESOLUTION_MS / 1000) 255 RTPStatistics.PLOT_RESOLUTION_MS / 1000)
256 256
257 delay = calculate_delay(start_ms, stop_ms, 257 delay = CalculateDelay(start_ms, stop_ms,
258 RTPStatistics.PLOT_RESOLUTION_MS, 258 RTPStatistics.PLOT_RESOLUTION_MS,
259 self.data_points) 259 self.data_points)
260 260
261 plt.figure(1) 261 plt.figure(1)
262 plt.plot(time_axis, delay[:len(time_axis)]) 262 plt.plot(time_axis, delay[:len(time_axis)])
263 plt.xlabel("Send time [s]") 263 plt.xlabel("Send time [s]")
264 plt.ylabel("Relative transport delay [ms]") 264 plt.ylabel("Relative transport delay [ms]")
265 265
266 plt.figure(2) 266 plt.figure(2)
267 plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps) 267 plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps)
268 plt.xlabel("Send time [s]") 268 plt.xlabel("Send time [s]")
269 plt.ylabel("Bandwidth [kbps]") 269 plt.ylabel("Bandwidth [kbps]")
270 270
271 plt.show() 271 plt.show()
272 272
273 273
274 def calculate_delay(start, stop, step, points): 274 def CalculateDelay(start, stop, step, points):
275 """Quantizes the time coordinates for the delay. 275 """Quantizes the time coordinates for the delay.
276 276
277 Quantizes points by rounding the timestamps downwards to the nearest 277 Quantizes points by rounding the timestamps downwards to the nearest
278 point in the time sequence start, start+step, start+2*step... Takes 278 point in the time sequence start, start+step, start+2*step... Takes
279 the average of the delays of points rounded to the same. Returns 279 the average of the delays of points rounded to the same. Returns
280 masked array, in which time points with no value are masked. 280 masked array, in which time points with no value are masked.
281 281
282 """ 282 """
283 grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)] 283 grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)]
284 rounded_value_index = lambda x: int((x - start) / step) 284 rounded_value_index = lambda x: int((x - start) / step)
(...skipping 23 matching lines...) Expand all
308 308
309 if len(args) < 1: 309 if len(args) < 1:
310 parser.print_help() 310 parser.print_help()
311 sys.exit(0) 311 sys.exit(0)
312 312
313 input_file = args[0] 313 input_file = args[0]
314 314
315 if options.working_directory and not os.path.isabs(input_file): 315 if options.working_directory and not os.path.isabs(input_file):
316 input_file = os.path.join(options.working_directory, input_file) 316 input_file = os.path.join(options.working_directory, input_file)
317 317
318 data_points = pb_parse.parse_protobuf(input_file) 318 data_points = pb_parse.ParseProtobuf(input_file)
319 rtp_stats = RTPStatistics(data_points) 319 rtp_stats = RTPStatistics(data_points)
320 320
321 if options.dump_header_to_stdout: 321 if options.dump_header_to_stdout:
322 print("Printing header info to stdout.", file=sys.stderr) 322 print("Printing header info to stdout.", file=sys.stderr)
323 rtp_stats.print_header_statistics() 323 rtp_stats.PrintHeaderStatistics()
324 sys.exit(0) 324 sys.exit(0)
325 325
326 chosen_ssrc = rtp_stats.choose_ssrc() 326 chosen_ssrc = rtp_stats.ChooseSsrc()
327 print("Chosen SSRC: 0X{:X}".format(chosen_ssrc)) 327 print("Chosen SSRC: 0X{:X}".format(chosen_ssrc))
328 328
329 rtp_stats.filter_ssrc(chosen_ssrc) 329 rtp_stats.FilterSsrc(chosen_ssrc)
330 330
331 print("Statistics:") 331 print("Statistics:")
332 rtp_stats.print_sequence_number_statistics() 332 rtp_stats.PrintSequenceNumberStatistics()
333 rtp_stats.estimate_frequency(options.query_sample_rate) 333 rtp_stats.EstimateFrequency(options.query_sample_rate)
334 rtp_stats.print_duration_statistics() 334 rtp_stats.PrintDurationStatistics()
335 rtp_stats.remove_reordered() 335 rtp_stats.RemoveReordered()
336 rtp_stats.compute_bandwidth() 336 rtp_stats.ComputeBandwidth()
337 rtp_stats.plot_statistics() 337 rtp_stats.PlotStatistics()
338 338
339 if __name__ == "__main__": 339 if __name__ == "__main__":
340 main() 340 main()
OLDNEW
« no previous file with comments | « webrtc/tools/py_event_log_analyzer/pb_parse.py ('k') | webrtc/tools/py_event_log_analyzer/rtp_analyzer_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698