[gnome-shell] jhbuild wrapper: move performance measurement to a separate tool



commit 47afd87e84cba746bac15b8af6c663f019e45ae0
Author: Giovanni Campagna <gcampagna src gnome org>
Date:   Thu Apr 12 19:43:08 2012 +0200

    jhbuild wrapper: move performance measurement to a separate tool
    
    Introduce a new gnome-shell-perf-tool, which can be used instead
    of the old gnome-shell-jhbuild wrapper to gather data about gnome-shell
    performance and submit to shell-perf.gnome.org. This runs the
    shell with no extra setup beyond the WM_CLASS filter, so it can
    be used for a jhbuild setup or for an installed shell.

 .gitignore                   |    1 +
 src/Makefile.am              |    9 +-
 src/gnome-shell-jhbuild.in   |  260 +----------------------------------
 src/gnome-shell-perf-tool.in |  310 ++++++++++++++++++++++++++++++++++++++++++
 4 files changed, 324 insertions(+), 256 deletions(-)
---
diff --git a/.gitignore b/.gitignore
index f6dc70b..860fc42 100644
--- a/.gitignore
+++ b/.gitignore
@@ -68,6 +68,7 @@ src/gnome-shell-extension-prefs
 src/gnome-shell-hotplug-sniffer
 src/gnome-shell-jhbuild
 src/gnome-shell-perf-helper
+src/gnome-shell-perf-tool
 src/gnome-shell-real
 src/hotplug-sniffer/org.gnome.Shell.HotplugSniffer.service
 src/run-js-test
diff --git a/src/Makefile.am b/src/Makefile.am
index 95f1a41..ca058e5 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -27,8 +27,10 @@ CLEANFILES += $(service_DATA)
 
 CLEANFILES += $(gir_DATA) $(typelib_DATA)
 
-bin_SCRIPTS += gnome-shell-extension-tool gnome-shell-extension-prefs
-EXTRA_DIST += gnome-shell-extension-tool.in gnome-shell-extension-prefs.in
+bin_SCRIPTS += gnome-shell-extension-tool gnome-shell-extension-prefs \
+	       gnome-shell-perf-tool
+EXTRA_DIST += gnome-shell-extension-tool.in gnome-shell-extension-prefs.in \
+	      gnome-shell-perf-tool.in
 bin_PROGRAMS = gnome-shell-real
 
 if USE_JHBUILD_WRAPPER_SCRIPT
@@ -71,6 +73,9 @@ gnome-shell-extension-tool: gnome-shell-extension-tool.in Makefile
 gnome-shell-extension-prefs: gnome-shell-extension-prefs.in Makefile
 	$(AM_V_GEN) sed $(generated_script_substitutions) $< > $  tmp && mv $  tmp $@ && chmod a+x $@
 
+gnome-shell-perf-tool: gnome-shell-perf-tool.in Makefile
+	$(AM_V_GEN) sed $(generated_script_substitutions) $< > $  tmp && mv $  tmp $@ && chmod a+x $@
+
 CLEANFILES += gnome-shell $(bin_SCRIPTS)
 
 include Makefile-st.am
diff --git a/src/gnome-shell-jhbuild.in b/src/gnome-shell-jhbuild.in
index a346f91..03b8679 100755
--- a/src/gnome-shell-jhbuild.in
+++ b/src/gnome-shell-jhbuild.in
@@ -168,31 +168,7 @@ def start_dconf_await_service():
         sys.exit(1)
 
     wait_for_dbus_name (DCONF_NAME)
-
-PERF_HELPER_NAME = "org.gnome.Shell.PerfHelper"
-PERF_HELPER_IFACE = "org.gnome.Shell.PerfHelper"
-PERF_HELPER_PATH = "/org/gnome/Shell/PerfHelper"
-
-def start_perf_helper():
-    get_bus_iface() # connect to NameOwnerChanged signal
-
-    self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
-    running_from_source_tree = os.path.exists(os.path.join(self_dir, 'gnome-shell-jhbuild.in'))
-
-    if running_from_source_tree:
-        perf_helper_path = os.path.join(self_dir, "gnome-shell-perf-helper")
-    else:
-        perf_helper_path = "@libexecdir@/gnome-shell-perf-helper"
-
-    subprocess.Popen([perf_helper_path])
-    wait_for_dbus_name (PERF_HELPER_NAME)
-
-def stop_perf_helper():
-    bus = get_bus()
-    proxy = bus.get_object(PERF_HELPER_NAME, PERF_HELPER_PATH)
-    proxy.Exit(dbus_interface=PERF_HELPER_IFACE)
-
-def start_shell(perf_output=None):
+def start_shell():
     self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
     if os.path.exists(os.path.join(self_dir, 'gnome-shell-jhbuild.in')):
         running_from_source_tree = True
@@ -235,12 +211,6 @@ def start_shell(perf_output=None):
     if os.path.exists(jhbuild_gconf_source):
         env['GCONF_DEFAULT_SOURCE_PATH'] = jhbuild_gconf_source
 
-    if options.perf is not None:
-        env['SHELL_PERF_MODULE'] = options.perf
-        env['MUTTER_WM_CLASS_FILTER'] = 'Gnome-shell-perf-helper'
-
-    if perf_output is not None:
-        env['SHELL_PERF_OUTPUT'] = perf_output
 
     args = []
     if options.debug:
@@ -271,7 +241,7 @@ def ensure_desktop_infrastructure_state():
     _killall('notification-daemon')
     _killall('notify-osd')
 
-def run_shell(perf_output=None):
+def run_shell():
     if options.debug:
         # Record initial terminal state so we can reset it to that
         # later, in case we kill gdb at a bad time
@@ -283,7 +253,7 @@ def run_shell(perf_output=None):
         print "Starting shell"
 
     try:
-        shell = start_shell(perf_output=perf_output)
+        shell = start_shell()
 
         # Wait for shell to exit
         if options.verbose:
@@ -317,206 +287,6 @@ def run_shell(perf_output=None):
 
     return normal_exit
 
-def upload_performance_report(report_text):
-    # Local imports to avoid impacting gnome-shell startup time
-    import base64
-    from ConfigParser import RawConfigParser
-    import hashlib
-    import hmac
-    import httplib
-    import urlparse
-    import urllib
-
-    try:
-        config_home = os.environ['XDG_CONFIG_HOME']
-    except KeyError:
-        config_home = None
-
-    if not config_home:
-        config_home = os.path.expanduser("~/.config")
-
-    config_file = os.path.join(config_home, "gnome-shell/perf.ini")
-
-    try:
-        config = RawConfigParser()
-        f = open(config_file)
-        config.readfp(f)
-        f.close()
-
-        base_url = config.get('upload', 'url')
-        system_name = config.get('upload', 'name')
-        secret_key = config.get('upload', 'key')
-    except Exception, e:
-        print "Can't read upload configuration from %s: %s" % (config_file, str(e))
-        sys.exit(1)
-
-    # Determine host, port and upload URL from provided data, we're
-    # a bit extra-careful about normalization since the URL is part
-    # of the signature.
-
-    split = urlparse.urlsplit(base_url)
-    scheme = split[0].lower()
-    netloc = split[1]
-    base_path = split[2]
-
-    m = re.match(r'^(.*?)(?::(\d+))?$', netloc)
-    if m.group(2):
-        host, port = m.group(1), int(m.group(2))
-    else:
-        host, port = m.group(1), None
-
-    if scheme != "http":
-        print "'%s' is not a HTTP URL" % base_url
-        sys.exit(1)
-
-    if port is None:
-        port = 80
-
-    if base_path.endswith('/'):
-        base_path = base_path[:-1]
-
-    if port == 80:
-        normalized_base = "%s://%s%s" % (scheme, host, base_path)
-    else:
-        normalized_base = "%s://%s:%d%s" % (scheme, host, port, base_path)
-
-    upload_url = normalized_base + '/system/%s/upload' % system_name
-    upload_path = urlparse.urlsplit(upload_url)[2] # path portion
-
-    # Create signature based on upload URL and the report data
-
-    signature_data = 'POST&' + upload_url + "&&"
-    h = hmac.new(secret_key, digestmod=hashlib.sha1)
-    h.update(signature_data)
-    h.update(report_text)
-    signature = urllib.quote(base64.b64encode(h.digest()), "~")
-
-    headers = {
-        'User-Agent': 'gnome-shell',
-        'Content-Type': 'application/json',
-        'X-Shell-Signature': 'HMAC-SHA1 ' + signature
-    };
-
-    connection = httplib.HTTPConnection(host, port)
-    connection.request('POST', upload_path, report_text, headers)
-    response = connection.getresponse()
-
-    if response.status == 200:
-        print "Performance report upload succeeded"
-    else:
-        print "Performance report upload failed with status %d" % response.status
-        print response.read()
-
-def run_performance_test():
-    iters = options.perf_iters
-    if options.perf_warmup:
-        iters += 1
-
-    logs = []
-    metric_summaries = {}
-
-    start_perf_helper()
-
-    for i in xrange(0, iters):
-        # We create an empty temporary file that the shell will overwrite
-        # with the contents.
-        handle, output_file = tempfile.mkstemp(".json", "gnome-shell-perf.")
-        os.close(handle)
-
-        # Run the performance test and collect the output as JSON
-        normal_exit = False
-        try:
-            normal_exit = run_shell(perf_output=output_file)
-        except:
-            stop_perf_helper()
-            raise
-        finally:
-            if not normal_exit:
-                os.remove(output_file)
-
-        if not normal_exit:
-            stop_perf_helper()
-            return False
-
-        try:
-            f = open(output_file)
-            output = json.load(f)
-            f.close()
-        except:
-            stop_perf_helper()
-            raise
-        finally:
-            os.remove(output_file)
-
-        # Grab the event definitions and monitor layout the first time around
-        if i == 0:
-            events = output['events']
-            monitors = output['monitors']
-
-        if options.perf_warmup and i == 0:
-            continue
-
-        for metric in output['metrics']:
-            name = metric['name']
-            if not name in metric_summaries:
-                summary = {}
-                summary['description'] = metric['description']
-                summary['units'] = metric['units']
-                summary['values'] = []
-                metric_summaries[name] = summary
-            else:
-                summary = metric_summaries[name]
-
-            summary['values'].append(metric['value'])
-
-        logs.append(output['log'])
-
-    stop_perf_helper()
-
-    if options.perf_output or options.perf_upload:
-        # Write a complete report, formatted as JSON. The Javascript/C code that
-        # generates the individual reports we are summarizing here is very careful
-        # to format them nicely, but we just dump out a compressed no-whitespace
-        # version here for simplicity. Using json.dump(indent=0) doesn't real
-        # improve the readability of the output much.
-        report = {
-            'date': datetime.datetime.utcnow().isoformat() + 'Z',
-            'events': events,
-            'monitors': monitors,
-            'metrics': metric_summaries,
-            'logs': logs
-        }
-
-        # Add the Git revision if available
-        self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
-        if os.path.exists(os.path.join(self_dir, 'gnome-shell-jhbuild.in')):
-            top_dir = os.path.dirname(self_dir)
-            git_dir = os.path.join(top_dir, '.git')
-            if os.path.exists(git_dir):
-                env = dict(os.environ)
-                env['GIT_DIR'] = git_dir
-                revision = subprocess.Popen(['git', 'rev-parse', 'HEAD'],
-                                            env=env,
-                                            stdout=subprocess.PIPE).communicate()[0].strip()
-                report['revision'] = revision
-
-        if options.perf_output:
-            f = open(options.perf_output, 'w')
-            json.dump(report, f)
-            f.close()
-
-        if options.perf_upload:
-            upload_performance_report(json.dumps(report))
-    else:
-        # Write a human readable summary
-        print '------------------------------------------------------------';
-        for metric in sorted(metric_summaries.keys()):
-            summary = metric_summaries[metric]
-            print "#", summary['description']
-            print metric, ", ".join((str(x) for x in summary['values']))
-        print '------------------------------------------------------------';
-
-    return True
 
 def restore_gnome():
     # Do imports lazily to save time and memory
@@ -570,17 +340,6 @@ parser.add_option("", "--debug-command", metavar="COMMAND",
                   help="Command to use for debugging (defaults to 'gdb --args')")
 parser.add_option("-v", "--verbose", action="store_true")
 parser.add_option("", "--sync", action="store_true")
-parser.add_option("", "--perf", metavar="PERF_MODULE",
-		  help="Specify the name of a performance module to run")
-parser.add_option("", "--perf-iters", type="int", metavar="ITERS",
-		  help="Numbers of iterations of performance module to run",
-                  default=1)
-parser.add_option("", "--perf-warmup", action="store_true",
-		  help="Run a dry run before performance tests")
-parser.add_option("", "--perf-output", metavar="OUTPUT_FILE",
-		  help="Output file to write performance report")
-parser.add_option("", "--perf-upload", action="store_true",
-		  help="Upload performance report to server")
 parser.add_option("", "--version", action="callback", callback=show_version,
                   help="Display version and exit")
 
@@ -590,10 +349,6 @@ if args:
     parser.print_usage()
     sys.exit(1)
 
-if options.perf and json is None:
-    print 'The Python simplejson module is required for performance tests'
-    sys.exit(1)
-
 # Handle ssh logins
 if 'DISPLAY' not in os.environ:
     running_env = get_running_session_environs()
@@ -610,13 +365,10 @@ elif options.debug:
 normal_exit = False
 
 try:
-    if options.perf:
-        normal_exit = run_performance_test()
-    else:
-        ensure_desktop_infrastructure_state()
-        normal_exit = run_shell()
+    ensure_desktop_infrastructure_state()
+    normal_exit = run_shell()
 finally:
-    if options.replace and (options.perf or not normal_exit):
+    if options.replace and not normal_exit:
         restore_gnome()
 
 if normal_exit:
diff --git a/src/gnome-shell-perf-tool.in b/src/gnome-shell-perf-tool.in
new file mode 100644
index 0000000..1bdd46c
--- /dev/null
+++ b/src/gnome-shell-perf-tool.in
@@ -0,0 +1,310 @@
+#! PYTHON@
+# -*- mode: Python; indent-tabs-mode: nil; -*-
+
+import datetime
+from gi.repository import GLib, GObject, Gio
+try:
+    import json
+except ImportError:
+    import simplejson as json
+import optparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import base64
+from ConfigParser import RawConfigParser
+import hashlib
+import hmac
+import httplib
+import urlparse
+import urllib
+
+def show_version(option, opt_str, value, parser):
+    print "GNOME Shell Performance Test @VERSION@"
+    sys.exit()
+
+def wait_for_dbus_name(wait_name):
+    loop = GLib.MainLoop()
+
+    def on_name_appeared(connection, name, name_owner, *args):
+        if not (name == wait_name and new_owner != ''):
+            return
+        loop.quit()
+        return
+
+    watch_id = Gio.bus_watch_name(Gio.BusType.SESSION,
+                                  wait_name,
+                                  Gio.BusNameWatcherFlags.NONE,
+                                  on_name_appeared,
+                                  None)
+
+    def on_timeout():
+        print "\nFailed to start %s: timed out" % (wait_name,)
+        sys.exit(1)
+    GLib.timeout_add_seconds(7, on_timeout)
+
+    loop.run()
+    Gio.bus_unwatch_name(watch_id)
+
+PERF_HELPER_NAME = "org.gnome.Shell.PerfHelper"
+PERF_HELPER_IFACE = "org.gnome.Shell.PerfHelper"
+PERF_HELPER_PATH = "/org/gnome/Shell/PerfHelper"
+
+def start_perf_helper():
+    self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+    perf_helper_path = "@libexecdir@/gnome-shell-perf-helper"
+
+    subprocess.Popen([perf_helper_path])
+    wait_for_dbus_name (PERF_HELPER_NAME)
+
+def stop_perf_helper():
+    proxy = Gio.DBusProxy(g_bus_type=Gio.BusType.SESSION,
+                          g_name=PERF_HELPER_NAME,
+                          g_interface=PERF_HELPER_IFACE,
+                          g_object_path=PERF_HELPER_PATH)
+    proxy.Exit()
+
+def start_shell(extra_args, perf_output=None):
+    # Set up environment
+    env = dict(os.environ)
+    env['SHELL_PERF_MODULE'] = options.perf
+    env['MUTTER_WM_CLASS_FILTER'] = 'Gnome-shell-perf-helper'
+
+    if perf_output is not None:
+        env['SHELL_PERF_OUTPUT'] = perf_output
+
+    self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+    args.append(os.path.join(self_dir, 'gnome-shell'))
+    # pass on any additional arguments
+    args += extra_args
+
+    return subprocess.Popen(args, env=env)
+
+def run_shell(args, perf_output=None):
+    # we do no additional supervision of gnome-shell,
+    # beyond that of wait
+    # in particular, we don't kill the shell upon
+    # receving a KeyboardInterrupt, as we expect to be
+    # in the same process group
+    shell = start_shell(extra_args, perf_output=perf_output)
+    shell.wait()
+    return shell.returncode == 0
+
+def upload_performance_report(report_text):
+    try:
+        config_home = os.environ['XDG_CONFIG_HOME']
+    except KeyError:
+        config_home = None
+
+    if not config_home:
+        config_home = os.path.expanduser("~/.config")
+
+    config_file = os.path.join(config_home, "gnome-shell/perf.ini")
+
+    try:
+        config = RawConfigParser()
+        f = open(config_file)
+        config.readfp(f)
+        f.close()
+
+        base_url = config.get('upload', 'url')
+        system_name = config.get('upload', 'name')
+        secret_key = config.get('upload', 'key')
+    except Exception, e:
+        print "Can't read upload configuration from %s: %s" % (config_file, str(e))
+        sys.exit(1)
+
+    # Determine host, port and upload URL from provided data, we're
+    # a bit extra-careful about normalization since the URL is part
+    # of the signature.
+
+    split = urlparse.urlsplit(base_url)
+    scheme = split[0].lower()
+    netloc = split[1]
+    base_path = split[2]
+
+    m = re.match(r'^(.*?)(?::(\d+))?$', netloc)
+    if m.group(2):
+        host, port = m.group(1), int(m.group(2))
+    else:
+        host, port = m.group(1), None
+
+    if scheme != "http":
+        print "'%s' is not a HTTP URL" % base_url
+        sys.exit(1)
+
+    if port is None:
+        port = 80
+
+    if base_path.endswith('/'):
+        base_path = base_path[:-1]
+
+    if port == 80:
+        normalized_base = "%s://%s%s" % (scheme, host, base_path)
+    else:
+        normalized_base = "%s://%s:%d%s" % (scheme, host, port, base_path)
+
+    upload_url = normalized_base + '/system/%s/upload' % system_name
+    upload_path = urlparse.urlsplit(upload_url)[2] # path portion
+
+    # Create signature based on upload URL and the report data
+
+    signature_data = 'POST&' + upload_url + "&&"
+    h = hmac.new(secret_key, digestmod=hashlib.sha1)
+    h.update(signature_data)
+    h.update(report_text)
+    signature = urllib.quote(base64.b64encode(h.digest()), "~")
+
+    headers = {
+        'User-Agent': 'gnome-shell-performance-tool/@VERSION@',
+        'Content-Type': 'application/json',
+        'X-Shell-Signature': 'HMAC-SHA1 ' + signature
+    };
+
+    connection = httplib.HTTPConnection(host, port)
+    connection.request('POST', upload_path, report_text, headers)
+    response = connection.getresponse()
+
+    if response.status == 200:
+        print "Performance report upload succeeded"
+    else:
+        print "Performance report upload failed with status %d" % response.status
+        print response.read()
+
+def run_performance_test(args):
+    iters = options.perf_iters
+    if options.perf_warmup:
+        iters += 1
+
+    logs = []
+    metric_summaries = {}
+
+    start_perf_helper()
+
+    for i in xrange(0, iters):
+        # We create an empty temporary file that the shell will overwrite
+        # with the contents.
+        handle, output_file = tempfile.mkstemp(".json", "gnome-shell-perf.")
+        os.close(handle)
+
+        # Run the performance test and collect the output as JSON
+        normal_exit = False
+        try:
+            normal_exit = run_shell(args, perf_output=output_file)
+        except:
+            stop_perf_helper()
+            raise
+        finally:
+            if not normal_exit:
+                os.remove(output_file)
+
+        if not normal_exit:
+            stop_perf_helper()
+            return False
+
+        try:
+            f = open(output_file)
+            output = json.load(f)
+            f.close()
+        except:
+            stop_perf_helper()
+            raise
+        finally:
+            os.remove(output_file)
+
+        # Grab the event definitions and monitor layout the first time around
+        if i == 0:
+            events = output['events']
+            monitors = output['monitors']
+
+        if options.perf_warmup and i == 0:
+            continue
+
+        for metric in output['metrics']:
+            name = metric['name']
+            if not name in metric_summaries:
+                summary = {}
+                summary['description'] = metric['description']
+                summary['units'] = metric['units']
+                summary['values'] = []
+                metric_summaries[name] = summary
+            else:
+                summary = metric_summaries[name]
+
+            summary['values'].append(metric['value'])
+
+        logs.append(output['log'])
+
+    stop_perf_helper()
+
+    if options.perf_output or options.perf_upload:
+        # Write a complete report, formatted as JSON. The Javascript/C code that
+        # generates the individual reports we are summarizing here is very careful
+        # to format them nicely, but we just dump out a compressed no-whitespace
+        # version here for simplicity. Using json.dump(indent=0) doesn't real
+        # improve the readability of the output much.
+        report = {
+            'date': datetime.datetime.utcnow().isoformat() + 'Z',
+            'events': events,
+            'monitors': monitors,
+            'metrics': metric_summaries,
+            'logs': logs
+        }
+
+        # Add the Git revision if available
+        self_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+        if os.path.exists(os.path.join(self_dir, 'gnome-shell-jhbuild.in')):
+            top_dir = os.path.dirname(self_dir)
+            git_dir = os.path.join(top_dir, '.git')
+            if os.path.exists(git_dir):
+                env = dict(os.environ)
+                env['GIT_DIR'] = git_dir
+                revision = subprocess.Popen(['git', 'rev-parse', 'HEAD'],
+                                            env=env,
+                                            stdout=subprocess.PIPE).communicate()[0].strip()
+                report['revision'] = revision
+
+        if options.perf_output:
+            f = open(options.perf_output, 'w')
+            json.dump(report, f)
+            f.close()
+
+        if options.perf_upload:
+            upload_performance_report(json.dumps(report))
+    else:
+        # Write a human readable summary
+        print '------------------------------------------------------------';
+        for metric in sorted(metric_summaries.keys()):
+            summary = metric_summaries[metric]
+            print "#", summary['description']
+            print metric, ", ".join((str(x) for x in summary['values']))
+        print '------------------------------------------------------------';
+
+    return True
+
+# Main program
+
+parser = optparse.OptionParser()
+parser.add_option("", "--perf", metavar="PERF_MODULE",
+		  help="Specify the name of a performance module to run")
+parser.add_option("", "--perf-iters", type="int", metavar="ITERS",
+		  help="Numbers of iterations of performance module to run",
+                  default=1)
+parser.add_option("", "--perf-warmup", action="store_true",
+		  help="Run a dry run before performance tests")
+parser.add_option("", "--perf-output", metavar="OUTPUT_FILE",
+		  help="Output file to write performance report")
+parser.add_option("", "--perf-upload", action="store_true",
+		  help="Upload performance report to server")
+parser.add_option("", "--version", action="callback", callback=show_version,
+                  help="Display version and exit")
+
+options, args = parser.parse_args()
+
+normal_exit = run_performance_test(args)
+if normal_exit:
+    sys.exit(0)
+else:
+    sys.exit(1)



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]