Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(999)

Side by Side Diff: tracing/tracing/metrics/media_metric.html

Issue 3020433002: Finish migrating media metrics to TBMv2 (Closed)
Patch Set: Remove an unnecessary if Created 3 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | tracing/tracing/metrics/media_metric_test.html » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 <!DOCTYPE html> 1 <!DOCTYPE html>
2 <!-- 2 <!--
3 Copyright 2017 The Chromium Authors. All rights reserved. 3 Copyright 2017 The Chromium Authors. All rights reserved.
4 Use of this source code is governed by a BSD-style license that can be 4 Use of this source code is governed by a BSD-style license that can be
5 found in the LICENSE file. 5 found in the LICENSE file.
6 --> 6 -->
7 7
8 <!-- 8 <!--
9 media_metrics uses Chrome trace events to calculate metrics about video 9 media_metrics uses Chrome trace events to calculate metrics about video
10 and audio playback. It is meant to be used for pages with a <video> or 10 and audio playback. It is meant to be used for pages with a <video> or
11 <audio> element. It is used by videostack-eng@google.com team for 11 <audio> element. It is used by videostack-eng@google.com team for
12 regression testing. 12 regression testing.
13 13
14 This metric currently supports the following measurement: 14 This metric currently supports the following measurement:
15 * time_to_video_play calculates how long after a video is requested to 15 * time_to_video_play calculates how long after a video is requested to
16 start playing before the video actually starts. If time_to_video_play 16 start playing before the video actually starts. If time_to_video_play
17 regresses, then users will click to play videos and then have 17 regresses, then users will click to play videos and then have
18 to wait longer before the videos start actually playing. 18 to wait longer before the videos start actually playing.
19 * time_to_audio_play is similar to time_to_video_play, but measures the 19 * time_to_audio_play is similar to time_to_video_play, but measures the
20 time delay before audio starts playing. 20 time delay before audio starts playing.
21 21 * buffering_time calculates the difference between the actual play time of
22 More measurements are expected to be added in the near future, such as: 22 media vs its expected play time. Ideally the two should be the same.
23 * buffering_time 23 If actual play time is significantly longer than expected play time,
24 * seek_time 24 it indicates that there were stalls during the play for buffering or
25 * dropped_frame_count 25 some other reasons.
26 * dropped_frame_count reports the number of video frames that were dropped.
27 Ideally this should be 0. If a large number of frames are dropped, the
28 video play will not be smooth.
29 * seek_time calculates how long after a user requests a seek operation
30 before the seek completes and the media starts playing at the new
31 location.
26 32
27 Please inform crouleau@chromium.org and johnchen@chromium.org about 33 Please inform crouleau@chromium.org and johnchen@chromium.org about
28 changes to this file. 34 changes to this file.
29 --> 35 -->
30 36
31 <link rel="import" href="/tracing/metrics/metric_registry.html"> 37 <link rel="import" href="/tracing/metrics/metric_registry.html">
32 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> 38 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html">
33 <link rel="import" href="/tracing/value/histogram.html"> 39 <link rel="import" href="/tracing/value/histogram.html">
34 40
35 <script> 41 <script>
36 'use strict'; 42 'use strict';
37 43
38 tr.exportTo('tr.metrics', function() { 44 tr.exportTo('tr.metrics', function() {
39 function mediaMetric(histograms, model) { 45 function mediaMetric(histograms, model) {
40 let playStart;
41 let timeToAudioPlay;
42 let timeToVideoPlay;
43
44 const chromeHelper = model.getOrCreateHelper( 46 const chromeHelper = model.getOrCreateHelper(
45 tr.model.helpers.ChromeModelHelper); 47 tr.model.helpers.ChromeModelHelper);
46 if (chromeHelper === undefined) return; 48 if (chromeHelper === undefined) return;
47 49
48 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { 50 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) {
49 // Find the threads we're interested in, and if a needed thread 51 // Find the threads we're interested in, and if a needed thread
50 // is missing, no need to look further in this process. 52 // is missing, no need to look further in this process.
51 const mainThread = rendererHelper.mainThread; 53 const mainThread = rendererHelper.mainThread;
52 if (mainThread === undefined) continue; 54 if (mainThread === undefined) continue;
53 55
54 const compositorThread = rendererHelper.compositorThread; 56 const compositorThread = rendererHelper.compositorThread;
55 const audioThread = 57 const audioThread =
56 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); 58 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice');
57 if (compositorThread === undefined && audioThread === undefined) continue; 59 if (compositorThread === undefined && audioThread === undefined) continue;
58 60
59 // Look for the media player DoLoad event on main thread. 61 const playStart = getPlayStart(mainThread);
60 for (const event of mainThread.getDescendantEvents()) {
61 if (event.title === 'WebMediaPlayerImpl::DoLoad') {
62 // TODO(johnchen@chromium.org): Support multiple audio/video
63 // elements per page. Currently, we only support a single
64 // audio or video element, so we can store the start time in
65 // a simple variable, and exit the loop.
66 if (playStart !== undefined) {
67 throw new Error(
68 'Loading multiple audio/video elements not yet supported');
69 }
70 playStart = event.start;
71 break;
72 }
73 }
74 if (playStart === undefined) continue; 62 if (playStart === undefined) continue;
75 63
76 // Look for video render event. 64 const timeToVideoPlay = compositorThread === undefined ? undefined :
77 if (compositorThread !== undefined) { 65 getTimeToVideoPlay(compositorThread, playStart);
78 for (const event of compositorThread.getDescendantEvents()) { 66 const timeToAudioPlay = audioThread === undefined ? undefined :
79 if (event.title === 'VideoRendererImpl::Render') { 67 getTimeToAudioPlay(audioThread, playStart);
80 timeToVideoPlay = event.start - playStart; 68
81 break; 69 if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) {
82 } 70 continue;
83 }
84 } 71 }
85 72
86 // Look for audio render event. 73 const droppedFrameCount = timeToVideoPlay === undefined ? undefined :
87 if (audioThread !== undefined) { 74 getDroppedFrameCount(compositorThread);
88 for (const event of audioThread.getDescendantEvents()) { 75 const seekTimes = timeToVideoPlay === undefined ? new Map() :
89 if (event.title === 'AudioRendererImpl::Render') { 76 getSeekTimes(mainThread);
90 timeToAudioPlay = event.start - playStart; 77 const bufferingTime = seekTimes.size !== 0 ? undefined :
91 break; 78 getBufferingTime(mainThread, playStart, timeToVideoPlay,
92 } 79 timeToAudioPlay);
93 } 80
81 if (timeToVideoPlay !== undefined) {
82 histograms.createHistogram('time_to_video_play',
83 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
84 timeToVideoPlay);
94 } 85 }
95 if (timeToVideoPlay !== undefined) break; 86 if (timeToAudioPlay !== undefined) {
96 if (timeToAudioPlay !== undefined) break; 87 histograms.createHistogram('time_to_audio_play',
97 } 88 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
98 89 timeToAudioPlay);
99 if (timeToVideoPlay !== undefined) { 90 }
100 histograms.createHistogram('time_to_video_play', 91 if (droppedFrameCount !== undefined) {
101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); 92 histograms.createHistogram('dropped_frame_count',
102 } 93 tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount);
103 if (timeToAudioPlay !== undefined) { 94 }
104 histograms.createHistogram('time_to_audio_play', 95 for (const [key, value] of seekTimes.entries()) {
105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); 96 histograms.createHistogram('seek_time_' + key,
97 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value);
98 }
99 if (bufferingTime !== undefined) {
100 histograms.createHistogram('buffering_time',
101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime);
102 }
106 } 103 }
107 } 104 }
108 105
106 function getPlayStart(mainThread) {
107 let playStart;
108 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
109 if (event.title === 'WebMediaPlayerImpl::DoLoad') {
110 // TODO(johnchen@chromium.org): Support multiple audio/video
111 // elements per page. Currently, we only support a single
112 // audio or video element, so we can store the start time in
113 // a simple variable.
114 if (playStart !== undefined) {
115 throw new Error(
116 'Loading multiple audio/video elements not yet supported');
117 }
118 playStart = event.start;
119 }
120 }
121 return playStart;
122 }
123
124 function getTimeToVideoPlay(compositorThread, playStart) {
125 for (const event of compositorThread.sliceGroup.getDescendantEvents()) {
126 if (event.title === 'VideoRendererImpl::Render') {
127 return event.start - playStart;
128 }
129 }
130 return undefined;
131 }
132
133 function getTimeToAudioPlay(audioThread, playStart) {
134 for (const event of audioThread.sliceGroup.getDescendantEvents()) {
135 if (event.title === 'AudioRendererImpl::Render') {
136 return event.start - playStart;
137 }
138 }
139 return undefined;
140 }
141
142 function getSeekTimes(mainThread) {
143 // We support multiple seeks per page, as long as they seek to different
144 // target time. Thus the following two variables are maps instead of simple
145 // variables. The key of the maps is event.args.target, which is a numerical
146 // value indicating the target location of the seek, in unit of seconds.
147 // For example, with a seek to 5 seconds mark, event.args.target === 5.
148 const seekStartTimes = new Map();
149 const seekTimes = new Map();
150 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
151 if (event.title === 'WebMediaPlayerImpl::DoSeek') {
152 seekStartTimes.set(event.args.target, event.start);
153 } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') {
154 const startTime = seekStartTimes.get(event.args.target);
155 if (startTime !== undefined) {
156 seekTimes.set(event.args.target, event.start - startTime);
157 seekStartTimes.delete(event.args.target);
158 }
159 }
160 }
161 return seekTimes;
162 }
163
164 function getBufferingTime(mainThread, playStart, timeToVideoPlay,
165 timeToAudioPlay) {
166 let playEnd;
167 let duration;
168 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
169 if (event.title === 'WebMediaPlayerImpl::OnEnded') {
170 // TODO(johnchen@chromium.org): Support multiple audio/video
171 // elements per page. Currently, we only support a single
172 // audio or video element, so we can store the end time in
173 // a simple variable.
174 if (playEnd !== undefined) {
175 throw new Error(
176 'Multiple media ended events not yet supported');
177 }
178 playEnd = event.start;
179 duration = 1000 * event.args.duration; // seconds to milliseconds
180 }
181 }
182 if (playEnd === undefined) return undefined;
183 let bufferingTime = playEnd - playStart - duration;
184 if (timeToVideoPlay !== undefined) {
185 bufferingTime -= timeToVideoPlay;
186 } else {
187 bufferingTime -= timeToAudioPlay;
188 }
189 return bufferingTime;
190 }
191
192 function getDroppedFrameCount(compositorThread) {
193 let droppedFrameCount = 0;
194 for (const event of compositorThread.sliceGroup.getDescendantEvents()) {
195 if (event.title === 'VideoFramesDropped') {
196 droppedFrameCount += event.args.count;
197 }
198 }
199 return droppedFrameCount;
200 }
201
109 tr.metrics.MetricRegistry.register(mediaMetric); 202 tr.metrics.MetricRegistry.register(mediaMetric);
110 203
111 return { 204 return {
112 mediaMetric, 205 mediaMetric,
113 }; 206 };
114 }); 207 });
115 </script> 208 </script>
OLDNEW
« no previous file with comments | « no previous file | tracing/tracing/metrics/media_metric_test.html » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698