comparison contrib/benchmarks/__init__.py @ 30406:cff0f5926797

perf: add asv benchmarks Airspeed velocity (ASV) is a python framework for benchmarking Python packages over their lifetime. The results are displayed in an interactive web frontend. Add ASV benchmarks for mercurial that use contrib/perf.py extension that could be run against multiple reference repositories. The benchmark suite now includes revsets from contrib/base-revsets.txt with variants, perftags, perfstatus, perfmanifest and perfheads. Installation requires asv>=0.2, python-hglib and virtualenv This is part of PerformanceTrackingSuitePlan https://www.mercurial-scm.org/wiki/PerformanceTrackingSuitePlan
author Philippe Pepiot <philippe.pepiot@logilab.fr>
date Thu, 29 Sep 2016 10:16:34 +0200
parents
children d83ca854fa21
comparison
equal deleted inserted replaced
30405:e77e8876886f 30406:cff0f5926797
1 # __init__.py - asv benchmark suite
2 #
3 # Copyright 2016 Logilab SA <contact@logilab.fr>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 '''ASV (https://asv.readthedocs.io) benchmark suite
9
10 Benchmark are parameterized against reference repositories found in the
11 directory pointed by the REPOS_DIR environment variable.
12
13 Invocation example:
14
15 $ export REPOS_DIR=~/hgperf/repos
16 # run suite on given revision
17 $ asv --config contrib/asv.conf.json run REV
18 # run suite on new changesets found in stable and default branch
19 $ asv --config contrib/asv.conf.json run NEW
20 # display a comparative result table of benchmark results between two given
21 # revisions
22 $ asv --config contrib/asv.conf.json compare REV1 REV2
23 # compute regression detection and generate ASV static website
24 $ asv --config contrib/asv.conf.json publish
25 # serve the static website
26 $ asv --config contrib/asv.conf.json preview
27 '''
28
29 from __future__ import absolute_import
30
31 import functools
32 import os
33 import re
34
35 from mercurial import (
36 extensions,
37 hg,
38 ui as uimod,
39 )
40
41 basedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
42 os.path.pardir, os.path.pardir))
43 reposdir = os.environ['REPOS_DIR']
44 reposnames = [name for name in os.listdir(reposdir)
45 if os.path.isdir(os.path.join(reposdir, name, ".hg"))]
46 if not reposnames:
47 raise ValueError("No repositories found in $REPO_DIR")
48 outputre = re.compile((r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
49 r'\d+.\d+ \(best of \d+\)'))
50
51 def runperfcommand(reponame, command, *args, **kwargs):
52 os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
53 ui = uimod.ui()
54 repo = hg.repository(ui, os.path.join(reposdir, reponame))
55 perfext = extensions.load(ui, 'perfext',
56 os.path.join(basedir, 'contrib', 'perf.py'))
57 cmd = getattr(perfext, command)
58 ui.pushbuffer()
59 cmd(ui, repo, *args, **kwargs)
60 output = ui.popbuffer()
61 match = outputre.search(output)
62 if not match:
63 raise ValueError("Invalid output {0}".format(output))
64 return float(match.group(1))
65
66 def perfbench(repos=reposnames, name=None, params=None):
67 """decorator to declare ASV benchmark based on contrib/perf.py extension
68
69 An ASV benchmark is a python function with the given attributes:
70
71 __name__: should start with track_, time_ or mem_ to be collected by ASV
72 params and param_name: parameter matrix to display multiple graphs on the
73 same page.
74 pretty_name: If defined it's displayed in web-ui instead of __name__
75 (useful for revsets)
76 the module name is prepended to the benchmark name and displayed as
77 "category" in webui.
78
79 Benchmarks are automatically parameterized with repositories found in the
80 REPOS_DIR environment variable.
81
82 `params` is the param matrix in the form of a list of tuple
83 (param_name, [value0, value1])
84
85 For example [(x, [a, b]), (y, [c, d])] declare benchmarks for
86 (a, c), (a, d), (b, c) and (b, d).
87 """
88 params = list(params or [])
89 params.insert(0, ("repo", repos))
90
91 def decorator(func):
92 @functools.wraps(func)
93 def wrapped(repo, *args):
94 def perf(command, *a, **kw):
95 return runperfcommand(repo, command, *a, **kw)
96 return func(perf, *args)
97
98 wrapped.params = [p[1] for p in params]
99 wrapped.param_names = [p[0] for p in params]
100 wrapped.pretty_name = name
101 return wrapped
102 return decorator