Add units to metrics definitions
Switch from having separate METRICS and METRIC_DESCRIPTIONS objects in a perf module to a single METRICS array. This is done so the perf module can define the units for each metric. In addition to improving the output in the web interface, the purpose of having units is to give some clue about how to pick from multiple values from different runs. In particular, with the assumption that "noise" on the system will increase run times, for time values we want to pick the smallest values, while for "rate" values, we want to pick the largest value. https://bugzilla.gnome.org/show_bug.cgi?id=618189
This commit is contained in:
parent
52a68eb24a
commit
20d579e7d8
@ -8,16 +8,25 @@ const Scripting = imports.ui.scripting;
|
|||||||
// someone should be able to get an idea of how well the shell is performing
|
// someone should be able to get an idea of how well the shell is performing
|
||||||
// on a particular system.
|
// on a particular system.
|
||||||
|
|
||||||
let METRIC_DESCRIPTIONS = {
|
|
||||||
overviewLatencyFirst: "Time to first frame after triggering overview, first time",
|
|
||||||
overviewFramesFirst: "Frames displayed when going to overview, first time",
|
|
||||||
overviewLatencySubsequent: "Time to first frame after triggering overview, second time",
|
|
||||||
overviewFramesSubsequent: "Frames displayed when going to overview, second time",
|
|
||||||
usedAfterOverview: "Malloc'ed bytes after the overview is shown once",
|
|
||||||
leakedAfterOverview: "Additional malloc'ed bytes the second time the overview is shown"
|
|
||||||
};
|
|
||||||
|
|
||||||
let METRICS = {
|
let METRICS = {
|
||||||
|
overviewLatencyFirst:
|
||||||
|
{ description: "Time to first frame after triggering overview, first time",
|
||||||
|
units: "us" },
|
||||||
|
overviewFramesFirst:
|
||||||
|
{ description: "Frames displayed when going to overview, first time",
|
||||||
|
units: "frames" },
|
||||||
|
overviewLatencySubsequent:
|
||||||
|
{ description: "Time to first frame after triggering overview, second time",
|
||||||
|
units: "us"},
|
||||||
|
overviewFramesSubsequent:
|
||||||
|
{ description: "Frames displayed when going to overview, second time",
|
||||||
|
units: "us" },
|
||||||
|
usedAfterOverview:
|
||||||
|
{ description: "Malloc'ed bytes after the overview is shown once",
|
||||||
|
units: "B" },
|
||||||
|
leakedAfterOverview:
|
||||||
|
{ description: "Additional malloc'ed bytes the second time the overview is shown",
|
||||||
|
units: "B" }
|
||||||
};
|
};
|
||||||
|
|
||||||
function run() {
|
function run() {
|
||||||
@ -63,19 +72,19 @@ function script_overviewShowDone(time) {
|
|||||||
overviewShowCount++;
|
overviewShowCount++;
|
||||||
|
|
||||||
if (overviewShowCount == 1) {
|
if (overviewShowCount == 1) {
|
||||||
METRICS.overviewLatencyFirst = overviewLatency;
|
METRICS.overviewLatencyFirst.value = overviewLatency;
|
||||||
METRICS.overviewFramesFirst = overviewFrames;
|
METRICS.overviewFramesFirst.value = overviewFrames;
|
||||||
} else {
|
} else {
|
||||||
METRICS.overviewLatencySubsequent = overviewLatency;
|
METRICS.overviewLatencySubsequent.value = overviewLatency;
|
||||||
METRICS.overviewFramesSubsequent = overviewFrames;
|
METRICS.overviewFramesSubsequent.value = overviewFrames;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function script_afterShowHide(time) {
|
function script_afterShowHide(time) {
|
||||||
if (overviewShowCount == 1) {
|
if (overviewShowCount == 1) {
|
||||||
METRICS.usedAfterOverview = mallocUsedSize;
|
METRICS.usedAfterOverview.value = mallocUsedSize;
|
||||||
} else {
|
} else {
|
||||||
METRICS.leakedAfterOverview = mallocUsedSize - METRICS.usedAfterOverview;
|
METRICS.leakedAfterOverview.value = mallocUsedSize - METRICS.usedAfterOverview.value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,8 +151,7 @@ function _collect(scriptModule, outputFile) {
|
|||||||
Shell.write_string_to_stream(out, ',\n"metrics":\n[ ');
|
Shell.write_string_to_stream(out, ',\n"metrics":\n[ ');
|
||||||
let first = true;
|
let first = true;
|
||||||
for (let name in scriptModule.METRICS) {
|
for (let name in scriptModule.METRICS) {
|
||||||
let value = scriptModule.METRICS[name];
|
let metric = scriptModule.METRICS[name];
|
||||||
let description = scriptModule.METRIC_DESCRIPTIONS[name];
|
|
||||||
|
|
||||||
if (!first)
|
if (!first)
|
||||||
Shell.write_string_to_stream(out, ',\n ');
|
Shell.write_string_to_stream(out, ',\n ');
|
||||||
@ -160,8 +159,9 @@ function _collect(scriptModule, outputFile) {
|
|||||||
|
|
||||||
Shell.write_string_to_stream(out,
|
Shell.write_string_to_stream(out,
|
||||||
'{ "name": ' + JSON.stringify(name) + ',\n' +
|
'{ "name": ' + JSON.stringify(name) + ',\n' +
|
||||||
' "description": ' + JSON.stringify(description) + ',\n' +
|
' "description": ' + JSON.stringify(metric.description) + ',\n' +
|
||||||
' "value": ' + JSON.stringify(value) + ' }');
|
' "units": ' + JSON.stringify(metric.units) + ',\n' +
|
||||||
|
' "value": ' + JSON.stringify(metric.value) + ' }');
|
||||||
}
|
}
|
||||||
Shell.write_string_to_stream(out, ' ]');
|
Shell.write_string_to_stream(out, ' ]');
|
||||||
|
|
||||||
@ -208,9 +208,18 @@ function _collect(scriptModule, outputFile) {
|
|||||||
* be called.
|
* be called.
|
||||||
*
|
*
|
||||||
* The event handler and finish functions are expected to fill in
|
* The event handler and finish functions are expected to fill in
|
||||||
* metrics to an object within the module called METRICS. The module
|
* metrics to an object within the module called METRICS. Each
|
||||||
* should also have an object called METRIC_DESCRIPTIONS with
|
* property of this object represents an individual metric. The
|
||||||
* descriptions for each metric that will be written into METRIC.
|
* name of the property is the name of the metric, the value
|
||||||
|
* of the property is an object with the following properties:
|
||||||
|
*
|
||||||
|
* description: human readable description of the metric
|
||||||
|
* units: a string representing the units of the metric. It has
|
||||||
|
* the form '<unit> <unit> ... / <unit> / <unit> ...'. Certain
|
||||||
|
* unit values are recognized: s, ms, us, B, KiB, MiB. Other
|
||||||
|
* values can appear but are uninterpreted. Examples 's',
|
||||||
|
* '/ s', 'frames', 'frames / s', 'MiB / s / frame'
|
||||||
|
* value: computed value of the metric
|
||||||
*
|
*
|
||||||
* The resulting metrics will be written to @outputFile as JSON, or,
|
* The resulting metrics will be written to @outputFile as JSON, or,
|
||||||
* if @outputFile is not provided, logged.
|
* if @outputFile is not provided, logged.
|
||||||
|
@ -325,6 +325,7 @@ def run_performance_test():
|
|||||||
if not name in metric_summaries:
|
if not name in metric_summaries:
|
||||||
summary = {}
|
summary = {}
|
||||||
summary['description'] = metric['description']
|
summary['description'] = metric['description']
|
||||||
|
summary['units'] = metric['units']
|
||||||
summary['values'] = []
|
summary['values'] = []
|
||||||
metric_summaries[name] = summary
|
metric_summaries[name] = summary
|
||||||
else:
|
else:
|
||||||
|
Loading…
Reference in New Issue
Block a user