Add units to metrics definitions

Switch from having separate METRICS and METRIC_DESCRIPTIONS objects
in a perf module to a single METRICS array. This is done so the
perf module can define the units for each metric.

In addition to improving the output in the web interface, the purpose
of having units is to give some clue about how to pick from multiple
values from different runs. In particular, with the assumption that
"noise" on the system will increase run times, for time values we want
to pick the smallest values, while for "rate" values, we want to pick
the largest value.

https://bugzilla.gnome.org/show_bug.cgi?id=618189
This commit is contained in:
Owen W. Taylor 2010-05-17 14:04:09 -04:00
parent 52a68eb24a
commit 20d579e7d8
3 changed files with 41 additions and 22 deletions

View File

@ -8,16 +8,25 @@ const Scripting = imports.ui.scripting;
// someone should be able to get an idea of how well the shell is performing
// on a particular system.
let METRIC_DESCRIPTIONS = {
overviewLatencyFirst: "Time to first frame after triggering overview, first time",
overviewFramesFirst: "Frames displayed when going to overview, first time",
overviewLatencySubsequent: "Time to first frame after triggering overview, second time",
overviewFramesSubsequent: "Frames displayed when going to overview, second time",
usedAfterOverview: "Malloc'ed bytes after the overview is shown once",
leakedAfterOverview: "Additional malloc'ed bytes the second time the overview is shown"
};
let METRICS = {
overviewLatencyFirst:
{ description: "Time to first frame after triggering overview, first time",
units: "us" },
overviewFramesFirst:
{ description: "Frames displayed when going to overview, first time",
units: "frames" },
overviewLatencySubsequent:
{ description: "Time to first frame after triggering overview, second time",
units: "us"},
overviewFramesSubsequent:
{ description: "Frames displayed when going to overview, second time",
units: "us" },
usedAfterOverview:
{ description: "Malloc'ed bytes after the overview is shown once",
units: "B" },
leakedAfterOverview:
{ description: "Additional malloc'ed bytes the second time the overview is shown",
units: "B" }
};
function run() {
@ -63,19 +72,19 @@ function script_overviewShowDone(time) {
overviewShowCount++;
if (overviewShowCount == 1) {
METRICS.overviewLatencyFirst = overviewLatency;
METRICS.overviewFramesFirst = overviewFrames;
METRICS.overviewLatencyFirst.value = overviewLatency;
METRICS.overviewFramesFirst.value = overviewFrames;
} else {
METRICS.overviewLatencySubsequent = overviewLatency;
METRICS.overviewFramesSubsequent = overviewFrames;
METRICS.overviewLatencySubsequent.value = overviewLatency;
METRICS.overviewFramesSubsequent.value = overviewFrames;
}
}
function script_afterShowHide(time) {
if (overviewShowCount == 1) {
METRICS.usedAfterOverview = mallocUsedSize;
METRICS.usedAfterOverview.value = mallocUsedSize;
} else {
METRICS.leakedAfterOverview = mallocUsedSize - METRICS.usedAfterOverview;
METRICS.leakedAfterOverview.value = mallocUsedSize - METRICS.usedAfterOverview.value;
}
}

View File

@ -151,8 +151,7 @@ function _collect(scriptModule, outputFile) {
Shell.write_string_to_stream(out, ',\n"metrics":\n[ ');
let first = true;
for (let name in scriptModule.METRICS) {
let value = scriptModule.METRICS[name];
let description = scriptModule.METRIC_DESCRIPTIONS[name];
let metric = scriptModule.METRICS[name];
if (!first)
Shell.write_string_to_stream(out, ',\n ');
@ -160,8 +159,9 @@ function _collect(scriptModule, outputFile) {
Shell.write_string_to_stream(out,
'{ "name": ' + JSON.stringify(name) + ',\n' +
' "description": ' + JSON.stringify(description) + ',\n' +
' "value": ' + JSON.stringify(value) + ' }');
' "description": ' + JSON.stringify(metric.description) + ',\n' +
' "units": ' + JSON.stringify(metric.units) + ',\n' +
' "value": ' + JSON.stringify(metric.value) + ' }');
}
Shell.write_string_to_stream(out, ' ]');
@ -208,9 +208,18 @@ function _collect(scriptModule, outputFile) {
* be called.
*
* The event handler and finish functions are expected to fill in
* metrics to an object within the module called METRICS. The module
* should also have an object called METRIC_DESCRIPTIONS with
* descriptions for each metric that will be written into METRIC.
* metrics to an object within the module called METRICS. Each
* property of this object represents an individual metric. The
* name of the property is the name of the metric, the value
* of the property is an object with the following properties:
*
* description: human readable description of the metric
* units: a string representing the units of the metric. It has
* the form '<unit> <unit> ... / <unit> / <unit> ...'. Certain
* unit values are recognized: s, ms, us, B, KiB, MiB. Other
* values can appear but are uninterpreted. Examples 's',
* '/ s', 'frames', 'frames / s', 'MiB / s / frame'
* value: computed value of the metric
*
* The resulting metrics will be written to @outputFile as JSON, or,
* if @outputFile is not provided, logged.

View File

@ -325,6 +325,7 @@ def run_performance_test():
if not name in metric_summaries:
summary = {}
summary['description'] = metric['description']
summary['units'] = metric['units']
summary['values'] = []
metric_summaries[name] = summary
else: