view tests/generate-working-copy-states.py @ 39270:37e56607cbb9

lfs: add a progress bar when searching for blobs to upload The search itself can take an extreme amount of time if there are a lot of revisions involved. I've got a local repo that took 6 minutes to push 1850 commits, and 60% of that time was spent here (there are ~70K files): \ 58.1% wrapper.py: extractpointers line 297: pointers = extractpointers(... | 57.7% wrapper.py: pointersfromctx line 352: for p in pointersfromctx(ct... | 57.4% wrapper.py: pointerfromctx line 397: p = pointerfromctx(ctx, f, ... \ 38.7% context.py: __contains__ line 368: if f not in ctx: | 38.7% util.py: __get__ line 82: return key in self._manifest | 38.7% context.py: _manifest line 1416: result = self.func(obj) | 38.7% manifest.py: read line 472: return self._manifestctx.re... \ 25.6% revlog.py: revision line 1562: text = rl.revision(self._node) \ 12.8% revlog.py: _chunks line 2217: bins = self._chunks(chain, ... | 12.0% revlog.py: decompressline 2112: ladd(decomp(buffer(data, ch... \ 7.8% revlog.py: checkhash line 2232: self.checkhash(text, node, ... | 7.8% revlog.py: hash line 2315: if node != self.hash(text, ... | 7.8% revlog.py: hash line 2242: return hash(text, p1, p2) \ 12.0% manifest.py: __init__ line 1565: self._data = manifestdict(t... \ 16.8% context.py: filenode line 378: if not _islfs(fctx.filelog(... | 15.7% util.py: __get__ line 706: return self._filelog | 14.8% context.py: _filelog line 1416: result = self.func(obj) | 14.8% localrepo.py: file line 629: return self._repo.file(self... | 14.8% filelog.py: __init__ line 1134: return filelog.filelog(self... | 14.5% revlog.py: __init__ line 24: censorable=True)
author Matt Harbison <matt_harbison@yahoo.com>
date Fri, 24 Aug 2018 17:45:46 -0400
parents ed46d48453e8
children 2372284d9457
line wrap: on
line source

# Helper script used for generating history and working copy files and content.
# The file's name corresponds to its history. The number of changesets can
# be specified on the command line. With 2 changesets, files with names like
# content1_content2_content1-untracked are generated. The first two filename
# segments describe the contents in the two changesets. The third segment
# ("content1-untracked") describes the state in the working copy, i.e.
# the file has content "content1" and is untracked (since it was previously
# tracked, it has been forgotten).
#
# This script generates the filenames and their content, but it's up to the
# caller to tell hg about the state.
#
# There are two subcommands:
#   filelist <numchangesets>
#   state <numchangesets> (<changeset>|wc)
#
# Typical usage:
#
# $ python $TESTDIR/generate-working-copy-states.py state 2 1
# $ hg addremove --similarity 0
# $ hg commit -m 'first'
#
# $ python $TESTDIR/generate-working-copy-states.py state 2 1
# $ hg addremove --similarity 0
# $ hg commit -m 'second'
#
# $ python $TESTDIR/generate-working-copy-states.py state 2 wc
# $ hg addremove --similarity 0
# $ hg forget *_*_*-untracked
# $ rm *_*_missing-*

from __future__ import absolute_import, print_function

import os
import sys

# Generates pairs of (filename, contents), where 'contents' is a list
# describing the file's content at each revision (or in the working copy).
# At each revision, it is either None or the file's actual content. When not
# None, it may be either new content or the same content as an earlier
# revisions, so all of (modified,clean,added,removed) can be tested.
def generatestates(maxchangesets, parentcontents):
    depth = len(parentcontents)
    if depth == maxchangesets + 1:
        for tracked in (b'untracked', b'tracked'):
            filename = b"_".join([(content is None and b'missing' or content)
                                for content in parentcontents]) + b"-" + tracked
            yield (filename, parentcontents)
    else:
        for content in ({None, b'content' + (b"%d" % (depth + 1))} |
                      set(parentcontents)):
            for combination in generatestates(maxchangesets,
                                              parentcontents + [content]):
                yield combination

# retrieve the command line arguments
target = sys.argv[1]
maxchangesets = int(sys.argv[2])
if target == 'state':
    depth = sys.argv[3]

# sort to make sure we have stable output
combinations = sorted(generatestates(maxchangesets, []))

# compute file content
content = []
for filename, states in combinations:
    if target == 'filelist':
        print(filename.decode('ascii'))
    elif target == 'state':
        if depth == 'wc':
            # Make sure there is content so the file gets written and can be
            # tracked. It will be deleted outside of this script.
            content.append((filename, states[maxchangesets] or b'TOBEDELETED'))
        else:
            content.append((filename, states[int(depth) - 1]))
    else:
        print("unknown target:", target, file=sys.stderr)
        sys.exit(1)

# write actual content
for filename, data in content:
    if data is not None:
        f = open(filename, 'wb')
        f.write(data + b'\n')
        f.close()
    elif os.path.exists(filename):
        os.remove(filename)