e37a3fa7e6
Currently running the perf tool results into no wm running afterwards making it hard for the user to get the results from a terminal and generally does not make it easy for users to run it to gather numbers. So restore the shell after the test has completed. https://bugzilla.gnome.org/show_bug.cgi?id=724870
335 lines
10 KiB
Plaintext
335 lines
10 KiB
Plaintext
#!@PYTHON@
|
|
# -*- mode: Python; indent-tabs-mode: nil; -*-
|
|
|
|
import datetime
|
|
from gi.repository import GLib, GObject, Gio
|
|
try:
|
|
import json
|
|
except ImportError:
|
|
import simplejson as json
|
|
import optparse
|
|
import os
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
import tempfile
|
|
import base64
|
|
from ConfigParser import RawConfigParser
|
|
import hashlib
|
|
import hmac
|
|
import httplib
|
|
import urlparse
|
|
import urllib
|
|
|
|
def show_version(option, opt_str, value, parser):
|
|
print "GNOME Shell Performance Test @VERSION@"
|
|
sys.exit()
|
|
|
|
def wait_for_dbus_name(wait_name):
|
|
loop = GLib.MainLoop()
|
|
|
|
def on_name_appeared(connection, name, new_owner, *args):
|
|
if not (name == wait_name and new_owner != ''):
|
|
return
|
|
loop.quit()
|
|
return
|
|
|
|
watch_id = Gio.bus_watch_name(Gio.BusType.SESSION,
|
|
wait_name,
|
|
Gio.BusNameWatcherFlags.NONE,
|
|
on_name_appeared,
|
|
None)
|
|
|
|
def on_timeout():
|
|
print "\nFailed to start %s: timed out" % (wait_name,)
|
|
sys.exit(1)
|
|
GLib.timeout_add_seconds(7, on_timeout)
|
|
|
|
loop.run()
|
|
Gio.bus_unwatch_name(watch_id)
|
|
|
|
PERF_HELPER_NAME = "org.gnome.Shell.PerfHelper"
|
|
PERF_HELPER_IFACE = "org.gnome.Shell.PerfHelper"
|
|
PERF_HELPER_PATH = "/org/gnome/Shell/PerfHelper"
|
|
|
|
def start_perf_helper():
|
|
self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
|
|
perf_helper_path = "@libexecdir@/gnome-shell-perf-helper"
|
|
|
|
subprocess.Popen([perf_helper_path])
|
|
wait_for_dbus_name (PERF_HELPER_NAME)
|
|
|
|
def stop_perf_helper():
|
|
bus = Gio.bus_get_sync(Gio.BusType.SESSION, None)
|
|
|
|
proxy = Gio.DBusProxy.new_sync(bus,
|
|
Gio.DBusProxyFlags.NONE,
|
|
None,
|
|
PERF_HELPER_NAME,
|
|
PERF_HELPER_PATH,
|
|
PERF_HELPER_IFACE,
|
|
None)
|
|
proxy.Exit()
|
|
|
|
def start_shell(perf_output=None):
|
|
# Set up environment
|
|
env = dict(os.environ)
|
|
env['SHELL_PERF_MODULE'] = options.perf
|
|
env['MUTTER_WM_CLASS_FILTER'] = 'Gnome-shell-perf-helper'
|
|
|
|
if perf_output is not None:
|
|
env['SHELL_PERF_OUTPUT'] = perf_output
|
|
|
|
self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
|
|
args = []
|
|
args.append(os.path.join(self_dir, 'gnome-shell'))
|
|
|
|
if options.replace:
|
|
args.append('--replace')
|
|
|
|
return subprocess.Popen(args, env=env)
|
|
|
|
def run_shell(perf_output=None):
|
|
# we do no additional supervision of gnome-shell,
|
|
# beyond that of wait
|
|
# in particular, we don't kill the shell upon
|
|
# receving a KeyboardInterrupt, as we expect to be
|
|
# in the same process group
|
|
shell = start_shell(perf_output=perf_output)
|
|
shell.wait()
|
|
return shell.returncode == 0
|
|
|
|
def restore_shell():
|
|
pid = os.fork()
|
|
if (pid == 0):
|
|
if "MUTTER_WM_CLASS_FILTER" in os.environ:
|
|
del os.environ["MUTTER_WM_CLASS_FILTER"]
|
|
os.execlp("gnome-shell", "gnome-shell", "--replace")
|
|
else:
|
|
sys.exit(0)
|
|
|
|
def upload_performance_report(report_text):
|
|
try:
|
|
config_home = os.environ['XDG_CONFIG_HOME']
|
|
except KeyError:
|
|
config_home = None
|
|
|
|
if not config_home:
|
|
config_home = os.path.expanduser("~/.config")
|
|
|
|
config_file = os.path.join(config_home, "gnome-shell/perf.ini")
|
|
|
|
try:
|
|
config = RawConfigParser()
|
|
f = open(config_file)
|
|
config.readfp(f)
|
|
f.close()
|
|
|
|
base_url = config.get('upload', 'url')
|
|
system_name = config.get('upload', 'name')
|
|
secret_key = config.get('upload', 'key')
|
|
except Exception, e:
|
|
print "Can't read upload configuration from %s: %s" % (config_file, str(e))
|
|
sys.exit(1)
|
|
|
|
# Determine host, port and upload URL from provided data, we're
|
|
# a bit extra-careful about normalization since the URL is part
|
|
# of the signature.
|
|
|
|
split = urlparse.urlsplit(base_url)
|
|
scheme = split[0].lower()
|
|
netloc = split[1]
|
|
base_path = split[2]
|
|
|
|
m = re.match(r'^(.*?)(?::(\d+))?$', netloc)
|
|
if m.group(2):
|
|
host, port = m.group(1), int(m.group(2))
|
|
else:
|
|
host, port = m.group(1), None
|
|
|
|
if scheme != "http":
|
|
print "'%s' is not a HTTP URL" % base_url
|
|
sys.exit(1)
|
|
|
|
if port is None:
|
|
port = 80
|
|
|
|
if base_path.endswith('/'):
|
|
base_path = base_path[:-1]
|
|
|
|
if port == 80:
|
|
normalized_base = "%s://%s%s" % (scheme, host, base_path)
|
|
else:
|
|
normalized_base = "%s://%s:%d%s" % (scheme, host, port, base_path)
|
|
|
|
upload_url = normalized_base + '/system/%s/upload' % system_name
|
|
upload_path = urlparse.urlsplit(upload_url)[2] # path portion
|
|
|
|
# Create signature based on upload URL and the report data
|
|
|
|
signature_data = 'POST&' + upload_url + "&&"
|
|
h = hmac.new(secret_key, digestmod=hashlib.sha1)
|
|
h.update(signature_data)
|
|
h.update(report_text)
|
|
signature = urllib.quote(base64.b64encode(h.digest()), "~")
|
|
|
|
headers = {
|
|
'User-Agent': 'gnome-shell-performance-tool/@VERSION@',
|
|
'Content-Type': 'application/json',
|
|
'X-Shell-Signature': 'HMAC-SHA1 ' + signature
|
|
};
|
|
|
|
connection = httplib.HTTPConnection(host, port)
|
|
connection.request('POST', upload_path, report_text, headers)
|
|
response = connection.getresponse()
|
|
|
|
if response.status == 200:
|
|
print "Performance report upload succeeded"
|
|
else:
|
|
print "Performance report upload failed with status %d" % response.status
|
|
print response.read()
|
|
|
|
def run_performance_test():
|
|
iters = options.perf_iters
|
|
if options.perf_warmup:
|
|
iters += 1
|
|
|
|
logs = []
|
|
metric_summaries = {}
|
|
|
|
start_perf_helper()
|
|
|
|
for i in xrange(0, iters):
|
|
# We create an empty temporary file that the shell will overwrite
|
|
# with the contents.
|
|
handle, output_file = tempfile.mkstemp(".json", "gnome-shell-perf.")
|
|
os.close(handle)
|
|
|
|
# Run the performance test and collect the output as JSON
|
|
normal_exit = False
|
|
try:
|
|
normal_exit = run_shell(perf_output=output_file)
|
|
except:
|
|
stop_perf_helper()
|
|
raise
|
|
finally:
|
|
if not normal_exit:
|
|
os.remove(output_file)
|
|
|
|
if not normal_exit:
|
|
stop_perf_helper()
|
|
return False
|
|
|
|
try:
|
|
f = open(output_file)
|
|
output = json.load(f)
|
|
f.close()
|
|
except:
|
|
stop_perf_helper()
|
|
raise
|
|
finally:
|
|
os.remove(output_file)
|
|
|
|
# Grab the event definitions and monitor layout the first time around
|
|
if i == 0:
|
|
events = output['events']
|
|
monitors = output['monitors']
|
|
|
|
if options.perf_warmup and i == 0:
|
|
continue
|
|
|
|
for metric in output['metrics']:
|
|
name = metric['name']
|
|
if not name in metric_summaries:
|
|
summary = {}
|
|
summary['description'] = metric['description']
|
|
summary['units'] = metric['units']
|
|
summary['values'] = []
|
|
metric_summaries[name] = summary
|
|
else:
|
|
summary = metric_summaries[name]
|
|
|
|
summary['values'].append(metric['value'])
|
|
|
|
logs.append(output['log'])
|
|
|
|
stop_perf_helper()
|
|
|
|
if options.perf_output or options.perf_upload:
|
|
# Write a complete report, formatted as JSON. The Javascript/C code that
|
|
# generates the individual reports we are summarizing here is very careful
|
|
# to format them nicely, but we just dump out a compressed no-whitespace
|
|
# version here for simplicity. Using json.dump(indent=0) doesn't real
|
|
# improve the readability of the output much.
|
|
report = {
|
|
'date': datetime.datetime.utcnow().isoformat() + 'Z',
|
|
'events': events,
|
|
'monitors': monitors,
|
|
'metrics': metric_summaries,
|
|
'logs': logs
|
|
}
|
|
|
|
# Add the Git revision if available
|
|
self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
|
|
if os.path.exists(os.path.join(self_dir, 'gnome-shell-jhbuild.in')):
|
|
top_dir = os.path.dirname(self_dir)
|
|
git_dir = os.path.join(top_dir, '.git')
|
|
if os.path.exists(git_dir):
|
|
env = dict(os.environ)
|
|
env['GIT_DIR'] = git_dir
|
|
revision = subprocess.Popen(['git', 'rev-parse', 'HEAD'],
|
|
env=env,
|
|
stdout=subprocess.PIPE).communicate()[0].strip()
|
|
report['revision'] = revision
|
|
|
|
if options.perf_output:
|
|
f = open(options.perf_output, 'w')
|
|
json.dump(report, f)
|
|
f.close()
|
|
|
|
if options.perf_upload:
|
|
upload_performance_report(json.dumps(report))
|
|
else:
|
|
# Write a human readable summary
|
|
print '------------------------------------------------------------';
|
|
for metric in sorted(metric_summaries.keys()):
|
|
summary = metric_summaries[metric]
|
|
print "#", summary['description']
|
|
print metric, ", ".join((str(x) for x in summary['values']))
|
|
print '------------------------------------------------------------';
|
|
|
|
return True
|
|
|
|
# Main program
|
|
|
|
parser = optparse.OptionParser()
|
|
parser.add_option("", "--perf", metavar="PERF_MODULE",
|
|
help="Specify the name of a performance module to run",
|
|
default="core")
|
|
parser.add_option("", "--perf-iters", type="int", metavar="ITERS",
|
|
help="Numbers of iterations of performance module to run",
|
|
default=1)
|
|
parser.add_option("", "--perf-warmup", action="store_true",
|
|
help="Run a dry run before performance tests")
|
|
parser.add_option("", "--perf-output", metavar="OUTPUT_FILE",
|
|
help="Output file to write performance report")
|
|
parser.add_option("", "--perf-upload", action="store_true",
|
|
help="Upload performance report to server")
|
|
parser.add_option("", "--version", action="callback", callback=show_version,
|
|
help="Display version and exit")
|
|
|
|
parser.add_option("-r", "--replace", action="store_true",
|
|
help="Replace the running window manager")
|
|
|
|
options, args = parser.parse_args()
|
|
|
|
if args:
|
|
parser.print_usage()
|
|
sys.exit(1)
|
|
|
|
normal_exit = run_performance_test()
|
|
if normal_exit:
|
|
restore_shell()
|
|
else:
|
|
sys.exit(1)
|