view contrib/perf-utils/discovery-helper.sh @ 48178:f12a19d03d2c

fix: reduce number of tool executions By grouping together (path, ctx) pairs according to the inputs they would provide to fixer tools, we can deduplicate executions of fixer tools to significantly reduce the amount of time spent running slow tools. This change does not handle clean files in the working copy, which could still be deduplicated against the files in the checked out commit. It's a little harder to do that because the filerev is not available in the workingfilectx (and it doesn't exist for added files). Anecdotally, this change makes some real uses cases at Google 10x faster. I think we were originally hesitant to do this because the benefits weren't obvious, and implementing it efficiently is kind of tricky. If we simply memoized the formatter execution function, we would be keeping tons of file content in memory. Also included is a regression test for a corner case that I broke with my first attempt at optimizing this code. Differential Revision: https://phab.mercurial-scm.org/D11280
author Danny Hooper <hooper@google.com>
date Thu, 02 Sep 2021 14:08:45 -0700
parents cae3f7e37623
children
line wrap: on
line source

#!/bin/bash
#
# produces two repositories with different common and missing subsets
#
#   $ discovery-helper.sh REPO NBHEADS DEPT
#
# The Goal is to produce two repositories with some common part and some
# exclusive part on each side. Provide a source repository REPO, it will
# produce two repositories REPO-left and REPO-right.
#
# Each repository will be missing some revisions exclusive to NBHEADS of the
# repo topological heads. These heads and revisions exclusive to them (up to
# DEPTH depth) are stripped.
#
# The "left" repository will use the NBHEADS first heads (sorted by
# description). The "right" use the last NBHEADS one.
#
# To find out how many topological heads a repo has, use:
#
#   $ hg heads -t -T '{rev}\n' | wc -l
#
# Example:
#
#  The `pypy-2018-09-01` repository has 192 heads. To produce two repositories
#  with 92 common heads and ~50 exclusive heads on each side.
#
#    $ ./discovery-helper.sh pypy-2018-08-01 50 10

set -euo pipefail

printusage () {
     echo "usage: `basename $0` REPO NBHEADS DEPTH [left|right]" >&2
}

if [ $# -lt 3 ]; then
    printusage
    exit 64
fi

repo="$1"
shift

nbheads="$1"
shift

depth="$1"
shift

doleft=1
doright=1
if [ $# -gt 1 ]; then
    printusage
    exit 64
elif [ $# -eq 1 ]; then
    if [ "$1" == "left" ]; then
        doleft=1
        doright=0
    elif [ "$1" == "right" ]; then
        doleft=0
        doright=1
    else
        printusage
        exit 64
    fi
fi

leftrepo="${repo}-${nbheads}h-${depth}d-left"
rightrepo="${repo}-${nbheads}h-${depth}d-right"

left="first(sort(heads(all()), 'desc'), $nbheads)"
right="last(sort(heads(all()), 'desc'), $nbheads)"

leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))"
rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))"

echo '### creating left/right repositories with missing changesets:'
if [ $doleft -eq 1 ]; then
    echo '# left  revset:' '"'${leftsubset}'"'
fi
if [ $doright -eq 1 ]; then
    echo '# right revset:' '"'${rightsubset}'"'
fi

buildone() {
    side="$1"
    dest="$2"
    revset="$3"
    echo "### building $side repository: $dest"
    if [ -e "$dest" ]; then
        echo "destination repo already exists: $dest" >&2
        exit 1
    fi
    echo '# cloning'
    if ! cp --recursive --reflink=always ${repo} ${dest}; then
        hg clone --noupdate "${repo}" "${dest}"
    fi
    echo '# stripping' '"'${revset}'"'
    hg -R "${dest}" --config extensions.strip= strip --rev "$revset" --no-backup
}

if [ $doleft -eq 1 ]; then
    buildone left "$leftrepo" "$leftsubset"
fi

if [ $doright -eq 1 ]; then
    buildone right "$rightrepo" "$rightsubset"
fi