Add a facility for automated performance measurement

We want to be able to summarize the behavior of the shell's
performance in a series of "metrics", like the latency between
clicking on the Activities button and seeing a response.

This patch adds the ability to create a script under perf/
in a special format that automates a series of actions in the
shell, writing events to the performance log, then collects
statistics as the log as replayed and turns them into a set
of metrics.

The script is then executed by running as gnome-shell
--perf=<script>.

The 'core' script which is added here will be the primary
performance measurement script that we use for shell performance
regression testing; right now it has a couple of placeholder
metrics.

https://bugzilla.gnome.org/show_bug.cgi?id=618189
This commit is contained in:
Owen W. Taylor 2010-05-09 13:42:35 -04:00
parent a97b8c1947
commit 98c2247c1b
4 changed files with 283 additions and 0 deletions

67
js/perf/core.js Normal file
View File

@ -0,0 +1,67 @@
/* -*- mode: js2; js2-basic-offset: 4; indent-tabs-mode: nil -*- */
const Main = imports.ui.main;
const Scripting = imports.ui.scripting;
// This performance script measure the most important (core) performance
// metrics for the shell. By looking at the output metrics of this script
// someone should be able to get an idea of how well the shell is performing
// on a particular system.
let METRIC_DESCRIPTIONS = {
overviewLatencyFirst: "Time to first frame after triggering overview, first time",
overviewFramesFirst: "Frames displayed when going to overview, first time",
overviewLatencySubsequent: "Time to first frame after triggering overview, second time",
overviewFramesSubsequent: "Frames displayed when going to overview, second time"
};
let METRICS = {
};
function run() {
Scripting.defineScriptEvent("overviewShowStart", "Starting to show the overview");
Scripting.defineScriptEvent("overviewShowDone", "Overview finished showing");
yield Scripting.sleep(1000);
yield Scripting.waitLeisure();
for (let i = 0; i < 2; i++) {
Scripting.scriptEvent('overviewShowStart');
Main.overview.show();
yield Scripting.waitLeisure();
Scripting.scriptEvent('overviewShowDone');
Main.overview.hide();
yield Scripting.waitLeisure();
}
}
let showingOverview = false;
let overviewShowStart;
let overviewFrames;
let overviewLatency;
function script_overviewShowStart(time) {
showingOverview = true;
overviewShowStart = time;
overviewFrames = 0;
}
function script_overviewShowDone(time) {
showingOverview = false;
if (!('overviewLatencyFirst' in METRICS)) {
METRICS.overviewLatencyFirst = overviewLatency;
METRICS.overviewFramesFirst = overviewFrames;
} else {
METRICS.overviewLatencySubsequent = overviewLatency;
METRICS.overviewFramesSubsequent = overviewFrames;
}
}
function clutter_stagePaintDone(time) {
if (showingOverview) {
if (overviewFrames == 0)
overviewLatency = time - overviewShowStart;
overviewFrames++;
}
}

View File

@ -29,6 +29,7 @@ const RunDialog = imports.ui.runDialog;
const LookingGlass = imports.ui.lookingGlass; const LookingGlass = imports.ui.lookingGlass;
const NotificationDaemon = imports.ui.notificationDaemon; const NotificationDaemon = imports.ui.notificationDaemon;
const WindowAttentionHandler = imports.ui.windowAttentionHandler; const WindowAttentionHandler = imports.ui.windowAttentionHandler;
const Scripting = imports.ui.scripting;
const ShellDBus = imports.ui.shellDBus; const ShellDBus = imports.ui.shellDBus;
const TelepathyClient = imports.ui.telepathyClient; const TelepathyClient = imports.ui.telepathyClient;
const WindowManager = imports.ui.windowManager; const WindowManager = imports.ui.windowManager;
@ -180,6 +181,13 @@ function start() {
log('GNOME Shell started at ' + _startDate); log('GNOME Shell started at ' + _startDate);
Mainloop.idle_add(_removeUnusedWorkspaces); Mainloop.idle_add(_removeUnusedWorkspaces);
let perfModuleName = GLib.getenv("SHELL_PERF_MODULE");
if (perfModuleName) {
let perfOutput = GLib.getenv("SHELL_PERF_OUTPUT");
let module = eval('imports.perf.' + perfModuleName + ';');
Scripting.runPerfScript(module, perfOutput);
}
} }
/** /**

203
js/ui/scripting.js Normal file
View File

@ -0,0 +1,203 @@
/* -*- mode: js2; js2-basic-offset: 4; indent-tabs-mode: nil -*- */
const GLib = imports.gi.GLib;
const Mainloop = imports.mainloop;
const Meta = imports.gi.Meta;
const Shell = imports.gi.Shell;
// This module provides functionality for driving the shell user interface
// in an automated fashion. The primary current use case for this is
// automated performance testing (see runPerfScript()), but it could
// be applied to other forms of automation, such as testing for
// correctness as well.
//
// When scripting an automated test we want to make a series of calls
// in a linear fashion, but we also want to be able to let the main
// loop run so actions can finish. For this reason we write the script
// as a generator function that yields when it want to let the main
// loop run.
//
// yield Scripting.sleep(1000);
// main.overview.show();
// yield Scripting.waitLeisure();
//
// While it isn't important to the person writing the script, the actual
// yielded result is a function that the caller uses to provide the
// callback for resuming the script.
/**
* sleep:
* @milliseconds: number of milliseconds to wait
*
* Used within an automation script to pause the the execution of the
* current script for the specified amount of time. Use as
* 'yield Scripting.sleep(500);'
*/
function sleep(milliseconds) {
let cb;
Mainloop.timeout_add(milliseconds, function() {
if (cb)
cb();
return false;
});
return function(callback) {
cb = callback;
};
}
/**
* waitLeisure:
*
* Used within an automation script to pause the the execution of the
* current script until the shell is completely idle. Use as
* 'yield Scripting.waitLeisure();'
*/
function waitLeisure() {
let cb;
global.run_at_leisure(function() {
if (cb)
cb();
});
return function(callback) {
cb = callback;
};
}
/**
* defineScriptEvent
* @name: The event will be called script.<name>
* @description: Short human-readable description of the event
*
* Convenience function to define a zero-argument performance event
* within the 'script' namespace that is reserved for events defined locally
* within a performance automation script
*/
function defineScriptEvent(name, description) {
Shell.PerfLog.get_default().define_event("script." + name,
description,
"");
}
/**
* scriptEvent
* @name: Name registered with defineScriptEvent()
*
* Convenience function to record a script-local performance event
* previously defined with defineScriptEvent
*/
function scriptEvent(name) {
Shell.PerfLog.get_default().event("script." + name);
}
function _step(g, finish, onError) {
try {
let waitFunction = g.next();
waitFunction(function() {
_step(g, finish, onError);
});
} catch (err if err instanceof StopIteration) {
if (finish)
finish();
} catch (err) {
if (onError)
onError(err);
}
}
function _collect(scriptModule, outputFile) {
let eventHandlers = {};
for (let f in scriptModule) {
let m = /([A-Za-z]+)_([A-Za-z]+)/.exec(f);
if (m)
eventHandlers[m[1] + "." + m[2]] = scriptModule[f];
}
Shell.PerfLog.get_default().replay(
function(time, eventName, signature, arg) {
if (eventName in eventHandlers)
eventHandlers[eventName](time, arg);
});
if ('finish' in scriptModule)
scriptModule.finish();
if (outputFile) {
let result = {};
for (let metric in scriptModule.METRICS) {
result[metric] = {
value: scriptModule.METRICS[metric],
description: scriptModule.METRIC_DESCRIPTIONS[metric]
};
}
let contents = JSON.stringify(result);
GLib.file_set_contents(outputFile, contents, contents.length);
} else {
let metrics = [];
for (let metric in scriptModule.METRICS)
metrics.push(metric);
metrics.sort();
print ('------------------------------------------------------------');
for (let i = 0; i < metrics.length; i++) {
let metric = metrics[i];
print ('# ' + scriptModule.METRIC_DESCRIPTIONS[metric]);
print (metric + ': ' + scriptModule.METRICS[metric]);
}
print ('------------------------------------------------------------');
}
}
/**
* runPerfScript
* @scriptModule: module object with run and finish functions
* and event handlers
*
* Runs a script for automated collection of performance data. The
* script is defined as a Javascript module with specified contents.
*
* First the run() function within the module will be called as a
* generator to automate a series of actions. These actions will
* trigger performance events and the script can also record its
* own performance events.
*
* Then the recorded event log is replayed using handler functions
* within the module. The handler for the event 'foo.bar' is called
* foo_bar().
*
* Finally if the module has a function called finish(), that will
* be called.
*
* The event handler and finish functions are expected to fill in
* metrics to an object within the module called METRICS. The module
* should also have an object called METRIC_DESCRIPTIONS with
* descriptions for each metric that will be written into METRIC.
*
* The resulting metrics will be written to @outputFile as JSON, or,
* if @outputFile is not provided, logged.
*
* After running the script and collecting statistics from the
* event log, GNOME Shell will exit.
**/
function runPerfScript(scriptModule, outputFile) {
Shell.PerfLog.get_default().set_enabled(true);
let g = scriptModule.run();
_step(g,
function() {
_collect(scriptModule, outputFile);
Meta.exit(Meta.ExitCode.ERROR);
},
function(err) {
log("Script failed: " + err + "\n" + err.stack);
Meta.exit(Meta.ExitCode.ERROR);
});
}

View File

@ -197,6 +197,9 @@ def start_shell():
# need to force indirect rendering for NVIDIA. # need to force indirect rendering for NVIDIA.
env['LIBGL_ALWAYS_INDIRECT'] = '1' env['LIBGL_ALWAYS_INDIRECT'] = '1'
if options.perf is not None:
env['SHELL_PERF_MODULE'] = options.perf
if options.debug: if options.debug:
debug_command = options.debug_command.split() debug_command = options.debug_command.split()
args = list(debug_command) args = list(debug_command)
@ -256,6 +259,8 @@ parser.add_option("", "--debug-command", metavar="COMMAND",
help="Command to use for debugging (defaults to 'gdb --args')") help="Command to use for debugging (defaults to 'gdb --args')")
parser.add_option("-v", "--verbose", action="store_true") parser.add_option("-v", "--verbose", action="store_true")
parser.add_option("", "--sync", action="store_true") parser.add_option("", "--sync", action="store_true")
parser.add_option("", "--perf", metavar="PERF_MODULE",
help="Specify the name of a performance module to run")
parser.add_option("", "--xephyr", action="store_true", parser.add_option("", "--xephyr", action="store_true",
help="Run a debugging instance inside Xephyr") help="Run a debugging instance inside Xephyr")
parser.add_option("", "--geometry", metavar="GEOMETRY", parser.add_option("", "--geometry", metavar="GEOMETRY",