view contrib/revsetbenchmarks.py @ 22196:23fe278bde43

largefiles: keep largefiles from colliding with normal one during linear merge Before this patch, linear merging of modified or newly added largefile causes unexpected result, if (1) largefile collides with same name normal one in the target revision and (2) "local" largefile is chosen, even though branch merging between such revisions doesn't. Expected result of such linear merging is: (1) (not yet recorded) largefile is kept in the working directory (2) largefile is marked as (re-)"added" (3) colliding normal file is marked as "removed" But actual result is: (1) largefile in the working directory is unlinked (2) largefile is marked as "normal" (so treated as "missing") (3) the dirstate entry for colliding normal file is just dropped (1) is very serious, because there is no way to restore temporarily modified largefiles. (3) prevents the next commit from adding the manifest with correct "removal of (normal) file" information for newly created changeset. The root cause of this problem is putting "lfile" into "actions['r']" in linear-merging case. At liner merging, "actions['r']" causes: - unlinking "target file" in the working directory, but "lfile" as "target file" is also largefile itself in this case - dropping the dirstate entry for target file "actions['f']" (= "forget") does only the latter, and this is reason why this patch doesn't choose putting "lfile" into it instead of "actions['r']". This patch newly introduces action "lfmr" (LargeFiles: Mark as Removed) to mark colliding normal file as "removed" without unlinking it. This patch uses "hg debugdirstate" instead of "hg status" in test, because: - choosing "local largefile" hides "removed" status of "remote normal file" in "hg status" output, and - "hg status" for "large2" in this case has another problem fixed in the subsequent patch
author FUJIWARA Katsunori <foozy@lares.dti.ne.jp>
date Fri, 15 Aug 2014 20:28:51 +0900
parents d5cef58d8ec8
children 2143d794e960
line wrap: on
line source

#!/usr/bin/env python

# Measure the performance of a list of revsets against multiple revisions
# defined by parameter. Checkout one by one and run perfrevset with every
# revset in the list to benchmark its performance.
#
# - First argument is a revset of mercurial own repo to runs against.
# - Second argument is the file from which the revset array will be taken
#   If second argument is omitted read it from standard input
#
# You should run this from the root of your mercurial repository.
#
# This script also does one run of the current version of mercurial installed
# to compare performance.

import sys
import os
from subprocess import check_call, Popen, CalledProcessError, STDOUT, PIPE
# cannot use argparse, python 2.7 only
from optparse import OptionParser

def check_output(*args, **kwargs):
    kwargs.setdefault('stderr', PIPE)
    kwargs.setdefault('stdout', PIPE)
    proc = Popen(*args, **kwargs)
    output, error = proc.communicate()
    if proc.returncode != 0:
        raise CalledProcessError(proc.returncode, ' '.join(args[0]))
    return output

def update(rev):
    """update the repo to a revision"""
    try:
        check_call(['hg', 'update', '--quiet', '--check', str(rev)])
    except CalledProcessError, exc:
        print >> sys.stderr, 'update to revision %s failed, aborting' % rev
        sys.exit(exc.returncode)

def perf(revset, target=None):
    """run benchmark for this very revset"""
    try:
        cmd = ['./hg',
               '--config',
               'extensions.perf='
               + os.path.join(contribdir, 'perf.py'),
               'perfrevset',
               revset]
        if target is not None:
            cmd.append('-R')
            cmd.append(target)
        output = check_output(cmd, stderr=STDOUT)
        output = output.lstrip('!') # remove useless ! in this context
        return output.strip()
    except CalledProcessError, exc:
        print >> sys.stderr, 'abort: cannot run revset benchmark'
        sys.exit(exc.returncode)

def printrevision(rev):
    """print data about a revision"""
    sys.stdout.write("Revision: ")
    sys.stdout.flush()
    check_call(['hg', 'log', '--rev', str(rev), '--template',
               '{desc|firstline}\n'])

def getrevs(spec):
    """get the list of rev matched by a revset"""
    try:
        out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
    except CalledProcessError, exc:
        print >> sys.stderr, "abort, can't get revision from %s" % spec
        sys.exit(exc.returncode)
    return [r for r in out.split() if r]


parser = OptionParser(usage="usage: %prog [options] <revs>")
parser.add_option("-f", "--file",
                  help="read revset from FILE", metavar="FILE")
parser.add_option("-R", "--repo",
                  help="run benchmark on REPO", metavar="REPO")

(options, args) = parser.parse_args()

if len(sys.argv) < 2:
    parser.print_help()
    sys.exit(255)

# the directory where both this script and the perf.py extension live.
contribdir = os.path.dirname(__file__)

target_rev = args[0]

revsetsfile = sys.stdin
if options.file:
    revsetsfile = open(options.file)

revsets = [l.strip() for l in revsetsfile]

print "Revsets to benchmark"
print "----------------------------"

for idx, rset in enumerate(revsets):
    print "%i) %s" % (idx, rset)

print "----------------------------"
print


revs = getrevs(target_rev)

results = []
for r in revs:
    print "----------------------------"
    printrevision(r)
    print "----------------------------"
    update(r)
    res = []
    results.append(res)
    for idx, rset in enumerate(revsets):
        data = perf(rset, target=options.repo)
        res.append(data)
        print "%i)" % idx, data
        sys.stdout.flush()
    print "----------------------------"


print """

Result by revset
================
"""

print 'Revision:', revs
for idx, rev in enumerate(revs):
    sys.stdout.write('%i) ' % idx)
    sys.stdout.flush()
    printrevision(rev)

print
print

for ridx, rset in enumerate(revsets):

    print "revset #%i: %s" % (ridx, rset)
    for idx, data in enumerate(results):
        print '%i) %s' % (idx, data[ridx])
    print