view tests/autodiff.py @ 31397:8f5ed8fa39f8

perf: perform a garbage collection before each iteration Currently, no explicit garbage collection is performed when running the microbenchmarks in `hg perf`. I think this is wrong because garbage collection can have a significant impact on execution times. And, if gc is triggered via the default heuristics, it will fire effectively randomly during subsequent benchmark iterations due to variable amount of garbage left over from previous runs. Running a gc before invoking the measured function will help ensure state is more consistent across all iterations.
author Gregory Szorc <gregory.szorc@gmail.com>
date Mon, 13 Mar 2017 18:16:42 -0700
parents 3b517f2a3989
children 46ba2cdda476
line wrap: on
line source

# Extension dedicated to test patch.diff() upgrade modes

from __future__ import absolute_import

from mercurial import (
    cmdutil,
    error,
    patch,
    scmutil,
)

cmdtable = {}
command = cmdutil.command(cmdtable)

@command('autodiff',
    [('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')],
    '[OPTION]... [FILE]...')
def autodiff(ui, repo, *pats, **opts):
    diffopts = patch.difffeatureopts(ui, opts)
    git = opts.get('git', 'no')
    brokenfiles = set()
    losedatafn = None
    if git in ('yes', 'no'):
        diffopts.git = git == 'yes'
        diffopts.upgrade = False
    elif git == 'auto':
        diffopts.git = False
        diffopts.upgrade = True
    elif git == 'warn':
        diffopts.git = False
        diffopts.upgrade = True
        def losedatafn(fn=None, **kwargs):
            brokenfiles.add(fn)
            return True
    elif git == 'abort':
        diffopts.git = False
        diffopts.upgrade = True
        def losedatafn(fn=None, **kwargs):
            raise error.Abort('losing data for %s' % fn)
    else:
        raise error.Abort('--git must be yes, no or auto')

    node1, node2 = scmutil.revpair(repo, [])
    m = scmutil.match(repo[node2], pats, opts)
    it = patch.diff(repo, node1, node2, match=m, opts=diffopts,
                    losedatafn=losedatafn)
    for chunk in it:
        ui.write(chunk)
    for fn in sorted(brokenfiles):
        ui.write(('data lost for: %s\n' % fn))