tests/check-perf-code.py
author Kyle Lippincott <spectral@google.com>
Fri, 13 Dec 2019 14:40:52 -0800
changeset 43891 7eb6a2680ae6
parent 43076 2372284d9457
child 45830 c102b704edb5
permissions -rwxr-xr-x
dirstate: when calling rebuild(), avoid some N^2 codepaths I had a user repo with 200k files in it. Calling `hg debugrebuilddirstate` took tens of minutes (I didn't wait for it). In that situation, changedfiles==allfiles, and both are lists. This meant that we had to run an average of 100k comparisons, for each of 200k files, just to check whether a file needed to have normallookup called (it always did), or drop. While it's probably not a huge issue, in my very awkward synthetic benchmark I wrote (not using a benchmark library or anything), I was seeing some slowdowns for small-changedfiles and very-large-allfiles invocations, with an inflection somewhere around 10 items in changedfiles (regardless of the size of allfiles); above 10 items in changedfiles, the new code appears to always be faster. For the case of 50k files in changedfiles and the same items in allfiles, I'm seeing differences of 15s of just running comparisons vs. 0.003793s. I haven't bothered to run a comparison of 200k items in changedfiles and allfiles. :) Differential Revision: https://phab.mercurial-scm.org/D7665

#!/usr/bin/env python
#
# check-perf-code - (historical) portability checker for contrib/perf.py

from __future__ import absolute_import

import os
import sys

# write static check patterns here
perfpypats = [
    [
        (
            r'(branchmap|repoview|repoviewutil)\.subsettable',
            "use getbranchmapsubsettable() for early Mercurial",
        ),
        (
            r'\.(vfs|svfs|opener|sopener)',
            "use getvfs()/getsvfs() for early Mercurial",
        ),
        (
            r'ui\.configint',
            "use getint() instead of ui.configint() for early Mercurial",
        ),
    ],
    # warnings
    [],
]


def modulewhitelist(names):
    replacement = [
        ('.py', ''),
        ('.c', ''),  # trim suffix
        ('mercurial%s' % '/', ''),  # trim "mercurial/" path
    ]
    ignored = {'__init__'}
    modules = {}

    # convert from file name to module name, and count # of appearances
    for name in names:
        name = name.strip()
        for old, new in replacement:
            name = name.replace(old, new)
        if name not in ignored:
            modules[name] = modules.get(name, 0) + 1

    # list up module names, which appear multiple times
    whitelist = []
    for name, count in modules.items():
        if count > 1:
            whitelist.append(name)

    return whitelist


if __name__ == "__main__":
    # in this case, it is assumed that result of "hg files" at
    # multiple revisions is given via stdin
    whitelist = modulewhitelist(sys.stdin)
    assert whitelist, "module whitelist is empty"

    # build up module whitelist check from file names given at runtime
    perfpypats[0].append(
        # this matching pattern assumes importing modules from
        # "mercurial" package in the current style below, for simplicity
        #
        #    from mercurial import (
        #        foo,
        #        bar,
        #        baz
        #    )
        (
            (
                r'from mercurial import [(][a-z0-9, \n#]*\n(?! *%s,|^[ #]*\n|[)])'
                % ',| *'.join(whitelist)
            ),
            "import newer module separately in try clause for early Mercurial",
        )
    )

    # import contrib/check-code.py as checkcode
    assert 'RUNTESTDIR' in os.environ, "use check-perf-code.py in *.t script"
    contribpath = os.path.join(os.environ['RUNTESTDIR'], '..', 'contrib')
    sys.path.insert(0, contribpath)
    checkcode = __import__('check-code')

    # register perf.py specific entry with "checks" in check-code.py
    checkcode.checks.append(
        ('perf.py', r'contrib/perf.py$', '', checkcode.pyfilters, perfpypats)
    )

    sys.exit(checkcode.main())