Mercurial > hg-stable
changeset 13337:c86db7b1681e
merge with stable
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Tue, 01 Feb 2011 17:52:25 -0600 |
parents | 60792fa3c1a8 (diff) 5fc7c84ed9b0 (current diff) |
children | 8f5c865b7b4a |
files | |
diffstat | 128 files changed, 4940 insertions(+), 2635 deletions(-) [+] |
line wrap: on
line diff
--- a/Makefile Tue Feb 01 17:30:13 2011 -0600 +++ b/Makefile Tue Feb 01 17:52:25 2011 -0600 @@ -45,7 +45,7 @@ clean: -$(PYTHON) setup.py clean --all # ignore errors from this command find . \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';' - rm -f MANIFEST mercurial/__version__.py tests/*.err + rm -f MANIFEST tests/*.err rm -rf build mercurial/locale $(MAKE) -C doc clean
--- a/contrib/bash_completion Tue Feb 01 17:30:13 2011 -0600 +++ b/contrib/bash_completion Tue Feb 01 17:52:25 2011 -0600 @@ -241,7 +241,7 @@ _hg_tags _hg_branches ;; - commit) + commit|record) _hg_status "mar" ;; remove)
--- a/contrib/check-code.py Tue Feb 01 17:30:13 2011 -0600 +++ b/contrib/check-code.py Tue Feb 01 17:52:25 2011 -0600 @@ -8,6 +8,7 @@ # GNU General Public License version 2 or any later version. import re, glob, os, sys +import keyword import optparse def repquote(m): @@ -64,6 +65,7 @@ ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), (r'^source\b', "don't use 'source', use '.'"), (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), + (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"), ] testfilters = [ @@ -117,8 +119,8 @@ (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', "linebreak after :"), (r'class\s[^(]:', "old-style class, use class foo(object)"), - (r'^\s+del\(', "del isn't a function"), - (r'^\s+except\(', "except isn't a function"), + (r'\b(%s)\(' % '|'.join(keyword.kwlist), + "Python keyword is not a function"), (r',]', "unneeded trailing ',' in list"), # (r'class\s[A-Z][^\(]*\((?!Exception)', # "don't capitalize non-exception classes"), @@ -127,11 +129,15 @@ (r'[\x80-\xff]', "non-ASCII character literal"), (r'("\')\.format\(', "str.format() not available in Python 2.4"), (r'^\s*with\s+', "with not available in Python 2.4"), + (r'^\s*except.* as .*:', "except as not available in Python 2.4"), + (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"), (r'(?<!def)\s+(any|all|format)\(', "any/all/format not available in Python 2.4"), (r'(?<!def)\s+(callable)\(', "callable not available in Python 3, use hasattr(f, '__call__')"), (r'if\s.*\selse', "if ... else form not available in Python 2.4"), + (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist), + "gratuitous whitespace after Python keyword"), (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), # (r'\s\s=', "gratuitous whitespace before ="), (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', @@ -145,6 +151,9 @@ (r'raise Exception', "don't raise generic exceptions"), (r'ui\.(status|progress|write|note|warn)\([\'\"]x', "warning: unwrapped ui message"), + (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), + (r' [=!]=\s+(True|False|None)', + "comparison with singleton, use 'is' or 'is not' instead"), ] pyfilters = [
--- a/contrib/perf.py Tue Feb 01 17:30:13 2011 -0600 +++ b/contrib/perf.py Tue Feb 01 17:52:25 2011 -0600 @@ -80,11 +80,12 @@ timer(d) def perfindex(ui, repo): - import mercurial.changelog + import mercurial.revlog + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg + n = repo["tip"].node() def d(): - t = repo.changelog.tip() - repo.changelog = mercurial.changelog.changelog(repo.sopener) - repo.changelog._loadindexmap() + repo.invalidate() + repo[n] timer(d) def perfstartup(ui, repo):
--- a/doc/Makefile Tue Feb 01 17:30:13 2011 -0600 +++ b/doc/Makefile Tue Feb 01 17:52:25 2011 -0600 @@ -1,11 +1,13 @@ SOURCES=$(wildcard *.[0-9].txt) MAN=$(SOURCES:%.txt=%) HTML=$(SOURCES:%.txt=%.html) -GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py ../mercurial/help/*.txt +GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py \ + ../mercurial/help/*.txt ../hgext/*.py ../hgext/*/__init__.py PREFIX=/usr/local MANDIR=$(PREFIX)/share/man INSTALL=install -c -m 644 PYTHON=python +RSTARGS= export LANGUAGE=C export LC_ALL=C @@ -24,11 +26,11 @@ mv $@.tmp $@ %: %.txt common.txt - $(PYTHON) runrst hgmanpage --halt warning \ + $(PYTHON) runrst hgmanpage $(RSTARGS) --halt warning \ --strip-elements-with-class htmlonly $*.txt $* %.html: %.txt common.txt - $(PYTHON) runrst html --halt warning \ + $(PYTHON) runrst html $(RSTARGS) --halt warning \ --link-stylesheet --stylesheet-path style.css $*.txt $*.html MANIFEST: man html
--- a/doc/gendoc.py Tue Feb 01 17:30:13 2011 -0600 +++ b/doc/gendoc.py Tue Feb 01 17:52:25 2011 -0600 @@ -40,7 +40,7 @@ if longopt: allopts.append("--%s" % longopt) desc += default and _(" (default: %s)") % default or "" - yield(", ".join(allopts), desc) + yield (", ".join(allopts), desc) def get_cmd(cmd, cmdtable): d = {}
--- a/doc/hgrc.5.txt Tue Feb 01 17:30:13 2011 -0600 +++ b/doc/hgrc.5.txt Tue Feb 01 17:52:25 2011 -0600 @@ -727,8 +727,8 @@ ``port`` Optional. Port to connect to on mail server. Default: 25. ``tls`` - Optional. Whether to connect to mail server using TLS. True or - False. Default: False. + Optional. Method to enable TLS when connecting to mail server: starttls, + smtps or none. Default: none. ``username`` Optional. User name for authenticating with the SMTP server. Default: none.
--- a/hgext/bookmarks.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/bookmarks.py Tue Feb 01 17:52:25 2011 -0600 @@ -31,7 +31,7 @@ from mercurial.i18n import _ from mercurial.node import nullid, nullrev, bin, hex, short from mercurial import util, commands, repair, extensions, pushkey, hg, url -from mercurial import revset +from mercurial import revset, encoding import os def write(repo): @@ -57,7 +57,7 @@ try: file = repo.opener('bookmarks', 'w', atomictemp=True) for refspec, node in refs.iteritems(): - file.write("%s %s\n" % (hex(node), refspec)) + file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec))) file.rename() # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) @@ -142,7 +142,7 @@ write(repo) return - if mark != None: + if mark is not None: if "\n" in mark: raise util.Abort(_("bookmark name cannot contain newlines")) mark = mark.strip() @@ -238,6 +238,7 @@ bookmarks = {} for line in self.opener('bookmarks'): sha, refspec = line.strip().split(' ', 1) + refspec = encoding.tolocal(refspec) bookmarks[refspec] = self.changelog.lookup(sha) except: pass @@ -342,7 +343,7 @@ rb = remote.listkeys('bookmarks') for k in rb.keys(): if k in self._bookmarks: - nr, nl = rb[k], self._bookmarks[k] + nr, nl = rb[k], hex(self._bookmarks[k]) if nr in self: cr = self[nr] cl = self[nl] @@ -357,14 +358,12 @@ return result def addchangegroup(self, *args, **kwargs): - parents = self.dirstate.parents() - result = super(bookmark_repo, self).addchangegroup(*args, **kwargs) if result > 1: # We have more heads than before return result node = self.changelog.tip() - + parents = self.dirstate.parents() self._bookmarksupdate(parents, node) return result
--- a/hgext/convert/__init__.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/convert/__init__.py Tue Feb 01 17:52:25 2011 -0600 @@ -59,10 +59,10 @@ --sourcesort try to preserve source revisions order, only supported by Mercurial sources. - If <REVMAP> isn't given, it will be put in a default location - (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file - that maps each source commit ID to the destination ID for that - revision, like so:: + If ``REVMAP`` isn't given, it will be put in a default location + (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple + text file that maps each source commit ID to the destination ID + for that revision, like so:: <source ID> <destination ID> @@ -138,15 +138,19 @@ Mercurial Source '''''''''''''''' - --config convert.hg.ignoreerrors=False (boolean) - ignore integrity errors when reading. Use it to fix Mercurial - repositories with missing revlogs, by converting from and to - Mercurial. - --config convert.hg.saverev=False (boolean) - store original revision ID in changeset (forces target IDs to - change) - --config convert.hg.startrev=0 (hg revision identifier) - convert start revision and its descendants + The Mercurial source recognizes the following configuration + options, which you can set on the command line with ``--config``: + + :convert.hg.ignoreerrors: ignore integrity errors when reading. + Use it to fix Mercurial repositories with missing revlogs, by + converting from and to Mercurial. Default is False. + + :convert.hg.saverev: store original. revision ID in changeset + (forces target IDs to change). It takes and boolean argument + and defaults to False. + + :convert.hg.startrev: convert start revision and its descendants. + It takes a hg revision identifier and defaults to 0. CVS Source '''''''''' @@ -154,42 +158,46 @@ CVS source will use a sandbox (i.e. a checked-out copy) from CVS to indicate the starting point of what will be converted. Direct access to the repository files is not needed, unless of course the - repository is :local:. The conversion uses the top level directory - in the sandbox to find the CVS repository, and then uses CVS rlog - commands to find files to convert. This means that unless a - filemap is given, all files under the starting directory will be + repository is ``:local:``. The conversion uses the top level + directory in the sandbox to find the CVS repository, and then uses + CVS rlog commands to find files to convert. This means that unless + a filemap is given, all files under the starting directory will be converted, and that any directory reorganization in the CVS sandbox is ignored. - The options shown are the defaults. + The following options can be used with ``--config``: + + :convert.cvsps.cache: Set to False to disable remote log caching, + for testing and debugging purposes. Default is True. + + :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is + allowed between commits with identical user and log message in + a single changeset. When very large files were checked in as + part of a changeset then the default may not be long enough. + The default is 60. - --config convert.cvsps.cache=True (boolean) - Set to False to disable remote log caching, for testing and - debugging purposes. - --config convert.cvsps.fuzz=60 (integer) - Specify the maximum time (in seconds) that is allowed between - commits with identical user and log message in a single - changeset. When very large files were checked in as part of a - changeset then the default may not be long enough. - --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}' - Specify a regular expression to which commit log messages are - matched. If a match occurs, then the conversion process will - insert a dummy revision merging the branch on which this log - message occurs to the branch indicated in the regex. - --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}' - Specify a regular expression to which commit log messages are - matched. If a match occurs, then the conversion process will - add the most recent revision on the branch indicated in the - regex as the second parent of the changeset. - --config hook.cvslog - Specify a Python function to be called at the end of gathering - the CVS log. The function is passed a list with the log entries, - and can modify the entries in-place, or add or delete them. - --config hook.cvschangesets - Specify a Python function to be called after the changesets - are calculated from the the CVS log. The function is passed - a list with the changeset entries, and can modify the changesets - in-place, or add or delete them. + :convert.cvsps.mergeto: Specify a regular expression to which + commit log messages are matched. If a match occurs, then the + conversion process will insert a dummy revision merging the + branch on which this log message occurs to the branch + indicated in the regex. Default is ``{{mergetobranch + ([-\\w]+)}}`` + + :convert.cvsps.mergefrom: Specify a regular expression to which + commit log messages are matched. If a match occurs, then the + conversion process will add the most recent revision on the + branch indicated in the regex as the second parent of the + changeset. Default is ``{{mergefrombranch ([-\\w]+)}}`` + + :hook.cvslog: Specify a Python function to be called at the end of + gathering the CVS log. The function is passed a list with the + log entries, and can modify the entries in-place, or add or + delete them. + + :hook.cvschangesets: Specify a Python function to be called after + the changesets are calculated from the the CVS log. The + function is passed a list with the changeset entries, and can + modify the changesets in-place, or add or delete them. An additional "debugcvsps" Mercurial command allows the builtin changeset merging code to be run without doing a conversion. Its @@ -200,29 +208,33 @@ ''''''''''''''''' Subversion source detects classical trunk/branches/tags layouts. - By default, the supplied "svn://repo/path/" source URL is - converted as a single branch. If "svn://repo/path/trunk" exists it - replaces the default branch. If "svn://repo/path/branches" exists, - its subdirectories are listed as possible branches. If - "svn://repo/path/tags" exists, it is looked for tags referencing - converted branches. Default "trunk", "branches" and "tags" values - can be overridden with following options. Set them to paths + By default, the supplied ``svn://repo/path/`` source URL is + converted as a single branch. If ``svn://repo/path/trunk`` exists + it replaces the default branch. If ``svn://repo/path/branches`` + exists, its subdirectories are listed as possible branches. If + ``svn://repo/path/tags`` exists, it is looked for tags referencing + converted branches. Default ``trunk``, ``branches`` and ``tags`` + values can be overridden with following options. Set them to paths relative to the source URL, or leave them blank to disable auto detection. - --config convert.svn.branches=branches (directory name) - specify the directory containing branches - --config convert.svn.tags=tags (directory name) - specify the directory containing tags - --config convert.svn.trunk=trunk (directory name) - specify the name of the trunk branch + The following options can be set with ``--config``: + + :convert.svn.branches: specify the directory containing branches. + The defaults is ``branches``. + + :convert.svn.tags: specify the directory containing tags. The + default is ``tags``. + + :convert.svn.trunk: specify the name of the trunk branch The + defauls is ``trunk``. Source history can be retrieved starting at a specific revision, instead of being integrally converted. Only single branch conversions are supported. - --config convert.svn.startrev=0 (svn revision number) - specify start Subversion revision. + :convert.svn.startrev: specify start Subversion revision number. + The default is 0. Perforce Source ''''''''''''''' @@ -232,24 +244,27 @@ source to a flat Mercurial repository, ignoring labels, branches and integrations. Note that when a depot path is given you then usually should specify a target directory, because otherwise the - target may be named ...-hg. + target may be named ``...-hg``. It is possible to limit the amount of source history to be - converted by specifying an initial Perforce revision. + converted by specifying an initial Perforce revision: - --config convert.p4.startrev=0 (perforce changelist number) - specify initial Perforce revision. + :convert.p4.startrev: specify initial Perforce revision, a + Perforce changelist number). Mercurial Destination ''''''''''''''''''''' - --config convert.hg.clonebranches=False (boolean) - dispatch source branches in separate clones. - --config convert.hg.tagsbranch=default (branch name) - tag revisions branch name - --config convert.hg.usebranchnames=True (boolean) - preserve branch names + The following options are supported: + + :convert.hg.clonebranches: dispatch source branches in separate + clones. The default is False. + :convert.hg.tagsbranch: branch name for tag revisions, defaults to + ``default``. + + :convert.hg.usebranchnames: preserve branch names. The default is + True """ return convcmd.convert(ui, src, dest, revmapfile, **opts)
--- a/hgext/convert/subversion.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/convert/subversion.py Tue Feb 01 17:52:25 2011 -0600 @@ -914,7 +914,7 @@ arg = encodeargs(args) hgexe = util.hgexecutable() cmd = '%s debugsvnlog' % util.shellquote(hgexe) - stdin, stdout = util.popen2(cmd) + stdin, stdout = util.popen2(util.quotecommand(cmd)) stdin.write(arg) try: stdin.close()
--- a/hgext/eol.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/eol.py Tue Feb 01 17:52:25 2011 -0600 @@ -67,6 +67,11 @@ Such files are normally not touched under the assumption that they have mixed EOLs on purpose. +The extension provides ``cleverencode:`` and ``cleverdecode:`` filters +like the deprecated win32text extension does. This means that you can +disable win32text and enable eol and your filters will still work. You +only need to these filters until you have prepared a ``.hgeol`` file. + The ``win32text.forbid*`` hooks provided by the win32text extension have been unified into a single hook named ``eol.hook``. The hook will lookup the expected line endings from the ``.hgeol`` file, which means @@ -115,6 +120,9 @@ 'to-lf': tolf, 'to-crlf': tocrlf, 'is-binary': isbinary, + # The following provide backwards compatibility with win32text + 'cleverencode:': tolf, + 'cleverdecode:': tocrlf }
--- a/hgext/hgk.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/hgk.py Tue Feb 01 17:52:25 2011 -0600 @@ -181,14 +181,14 @@ if i + x >= count: l[chunk - x:] = [0] * (chunk - x) break - if full != None: + if full is not None: l[x] = repo[i + x] l[x].changeset() # force reading else: l[x] = 1 for x in xrange(chunk - 1, -1, -1): if l[x] != 0: - yield (i + x, full != None and l[x] or None) + yield (i + x, full is not None and l[x] or None) if i == 0: break
--- a/hgext/keyword.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/keyword.py Tue Feb 01 17:52:25 2011 -0600 @@ -70,9 +70,8 @@ replaced with customized keywords and templates. Again, run :hg:`kwdemo` to control the results of your configuration changes. -Before changing/disabling active keywords, run :hg:`kwshrink` to avoid -the risk of inadvertently storing expanded keywords in the change -history. +Before changing/disabling active keywords, you must run :hg:`kwshrink` +to avoid storing expanded keywords in the change history. To force expansion after enabling it, or a configuration change, run :hg:`kwexpand`. @@ -101,6 +100,14 @@ # names of extensions using dorecord recordextensions = 'record' +colortable = { + 'kwfiles.enabled': 'green bold', + 'kwfiles.deleted': 'cyan bold underline', + 'kwfiles.enabledunknown': 'green', + 'kwfiles.ignored': 'bold', + 'kwfiles.ignoredunknown': 'none' +} + # date like in cvs' $Date utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S') # date like in svn's $Date @@ -111,7 +118,6 @@ # make keyword tools accessible kwtools = {'templater': None, 'hgcmd': ''} - def _defaultkwmaps(ui): '''Returns default keywordmaps according to keywordset configuration.''' templates = { @@ -170,14 +176,25 @@ for k, v in kwmaps) else: self.templates = _defaultkwmaps(self.ui) - escaped = '|'.join(map(re.escape, self.templates.keys())) - self.re_kw = re.compile(r'\$(%s)\$' % escaped) - self.re_kwexp = re.compile(r'\$(%s): [^$\n\r]*? \$' % escaped) - templatefilters.filters.update({'utcdate': utcdate, 'svnisodate': svnisodate, 'svnutcdate': svnutcdate}) + @util.propertycache + def escape(self): + '''Returns bar-separated and escaped keywords.''' + return '|'.join(map(re.escape, self.templates.keys())) + + @util.propertycache + def rekw(self): + '''Returns regex for unexpanded keywords.''' + return re.compile(r'\$(%s)\$' % self.escape) + + @util.propertycache + def rekwexp(self): + '''Returns regex for expanded keywords.''' + return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape) + def substitute(self, data, path, ctx, subfunc): '''Replaces keywords in data with expanded template.''' def kwsub(mobj): @@ -191,11 +208,15 @@ return '$%s: %s $' % (kw, ekw) return subfunc(kwsub, data) + def linkctx(self, path, fileid): + '''Similar to filelog.linkrev, but returns a changectx.''' + return self.repo.filectx(path, fileid=fileid).changectx() + def expand(self, path, node, data): '''Returns data with keywords expanded.''' if not self.restrict and self.match(path) and not util.binary(data): - ctx = self.repo.filectx(path, fileid=node).changectx() - return self.substitute(data, path, ctx, self.re_kw.sub) + ctx = self.linkctx(path, node) + return self.substitute(data, path, ctx, self.rekw.sub) return data def iskwfile(self, cand, ctx): @@ -212,8 +233,8 @@ kwcmd = self.restrict and lookup # kwexpand/kwshrink if self.restrict or expand and lookup: mf = ctx.manifest() - fctx = ctx - subn = (self.restrict or rekw) and self.re_kw.subn or self.re_kwexp.subn + lctx = ctx + re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp msg = (expand and _('overwriting %s expanding keywords\n') or _('overwriting %s shrinking keywords\n')) for f in candidates: @@ -225,12 +246,12 @@ continue if expand: if lookup: - fctx = self.repo.filectx(f, fileid=mf[f]).changectx() - data, found = self.substitute(data, f, fctx, subn) + lctx = self.linkctx(f, mf[f]) + data, found = self.substitute(data, f, lctx, re_kw.subn) elif self.restrict: - found = self.re_kw.search(data) + found = re_kw.search(data) else: - data, found = _shrinktext(data, subn) + data, found = _shrinktext(data, re_kw.subn) if found: self.ui.note(msg % f) self.repo.wwrite(f, data, ctx.flags(f)) @@ -242,7 +263,7 @@ def shrink(self, fname, text): '''Returns text with all keyword substitutions removed.''' if self.match(fname) and not util.binary(text): - return _shrinktext(text, self.re_kwexp.sub) + return _shrinktext(text, self.rekwexp.sub) return text def shrinklines(self, fname, lines): @@ -250,7 +271,7 @@ if self.match(fname): text = ''.join(lines) if not util.binary(text): - return _shrinktext(text, self.re_kwexp.sub).splitlines(True) + return _shrinktext(text, self.rekwexp.sub).splitlines(True) return lines def wread(self, fname, data): @@ -334,6 +355,9 @@ ui.note(_('creating temporary repository at %s\n') % tmpdir) repo = localrepo.localrepository(ui, tmpdir, True) ui.setconfig('keyword', fn, '') + svn = ui.configbool('keywordset', 'svn') + # explicitly set keywordset for demo output + ui.setconfig('keywordset', 'svn', svn) uikwmaps = ui.configitems('keywordmaps') if args or opts.get('rcfile'): @@ -341,7 +365,10 @@ if uikwmaps: ui.status(_('\textending current template maps\n')) if opts.get('default') or not uikwmaps: - ui.status(_('\toverriding default template maps\n')) + if svn: + ui.status(_('\toverriding default svn keywordset\n')) + else: + ui.status(_('\toverriding default cvs keywordset\n')) if opts.get('rcfile'): ui.readconfig(opts.get('rcfile')) if args: @@ -353,7 +380,10 @@ ui.readconfig(repo.join('hgrc')) kwmaps = dict(ui.configitems('keywordmaps')) elif opts.get('default'): - ui.status(_('\n\tconfiguration using default keyword template maps\n')) + if svn: + ui.status(_('\n\tconfiguration using default svn keywordset\n')) + else: + ui.status(_('\n\tconfiguration using default cvs keywordset\n')) kwmaps = _defaultkwmaps(ui) if uikwmaps: ui.status(_('\tdisabling current template maps\n')) @@ -367,6 +397,7 @@ reposetup(ui, repo) ui.write('[extensions]\nkeyword =\n') demoitems('keyword', ui.configitems('keyword')) + demoitems('keywordset', ui.configitems('keywordset')) demoitems('keywordmaps', kwmaps.iteritems()) keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n' repo.wopener(fn, 'w').write(keywords) @@ -424,24 +455,26 @@ files = sorted(modified + added + clean) wctx = repo[None] kwfiles = kwt.iskwfile(files, wctx) + kwdeleted = kwt.iskwfile(deleted, wctx) kwunknown = kwt.iskwfile(unknown, wctx) if not opts.get('ignore') or opts.get('all'): - showfiles = kwfiles, kwunknown + showfiles = kwfiles, kwdeleted, kwunknown else: - showfiles = [], [] + showfiles = [], [], [] if opts.get('all') or opts.get('ignore'): showfiles += ([f for f in files if f not in kwfiles], [f for f in unknown if f not in kwunknown]) - for char, filenames in zip('KkIi', showfiles): + kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split() + kwstates = zip('K!kIi', showfiles, kwlabels) + for char, filenames, kwstate in kwstates: fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n' for f in filenames: - ui.write(fmt % repo.pathto(f, cwd)) + ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate) def shrink(ui, repo, *pats, **opts): '''revert expanded keywords in the working directory - Run before changing/disabling active keywords or if you experience - problems with :hg:`import` or :hg:`merge`. + Must be run before changing/disabling active keywords. kwshrink refuses to run if given files contain local changes. ''' @@ -603,8 +636,6 @@ finally: wlock.release() - repo.__class__ = kwrepo - def kwfilectx_cmp(orig, self, fctx): # keyword affects data size, comparing wdir and filelog size does # not make sense @@ -628,6 +659,8 @@ except KeyError: pass + repo.__class__ = kwrepo + cmdtable = { 'kwdemo': (demo,
--- a/hgext/mq.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/mq.py Tue Feb 01 17:52:25 2011 -0600 @@ -86,6 +86,8 @@ parent = None format = None subject = None + branch = None + nodeid = None diffstart = 0 for line in file(pf): @@ -106,6 +108,10 @@ date = line[7:] elif line.startswith("# Parent "): parent = line[9:] + elif line.startswith("# Branch "): + branch = line[9:] + elif line.startswith("# Node ID "): + nodeid = line[10:] elif not line.startswith("# ") and line: message.append(line) format = None @@ -134,6 +140,9 @@ eatdiff(message) eatdiff(comments) + # Remember the exact starting line of the patch diffs before consuming + # empty lines, for external use by TortoiseHg and others + self.diffstartline = len(comments) eatempty(message) eatempty(comments) @@ -147,6 +156,9 @@ self.user = user self.date = date self.parent = parent + # nodeid and branch are for external use by TortoiseHg and others + self.nodeid = nodeid + self.branch = branch self.haspatch = diffstart > 1 self.plainmode = plainmode @@ -793,6 +805,19 @@ return top, patch return None, None + def check_substate(self, repo): + '''return list of subrepos at a different revision than substate. + Abort if any subrepos have uncommitted changes.''' + inclsubs = [] + wctx = repo[None] + for s in wctx.substate: + if wctx.sub(s).dirty(True): + raise util.Abort( + _("uncommitted changes in subrepository %s") % s) + elif wctx.sub(s).dirty(): + inclsubs.append(s) + return inclsubs + def check_localchanges(self, repo, force=False, refresh=True): m, a, r, d = repo.status()[:4] if (m or a or r or d) and not force: @@ -826,16 +851,23 @@ % patchfn) else: raise util.Abort(_('patch "%s" already exists') % patchfn) + + inclsubs = self.check_substate(repo) + if inclsubs: + inclsubs.append('.hgsubstate') if opts.get('include') or opts.get('exclude') or pats: + if inclsubs: + pats = list(pats or []) + inclsubs match = cmdutil.match(repo, pats, opts) # detect missing files in pats def badfn(f, msg): - raise util.Abort('%s: %s' % (f, msg)) + if f != '.hgsubstate': # .hgsubstate is auto-created + raise util.Abort('%s: %s' % (f, msg)) match.bad = badfn m, a, r, d = repo.status(match=match)[:4] else: m, a, r, d = self.check_localchanges(repo, force=True) - match = cmdutil.matchfiles(repo, m + a + r) + match = cmdutil.matchfiles(repo, m + a + r + inclsubs) if len(repo[None].parents()) > 1: raise util.Abort(_('cannot manage merge changesets')) commitfiles = m + a + r @@ -1006,7 +1038,7 @@ raise util.Abort(_("patch %s not in series") % patch) def push(self, repo, patch=None, force=False, list=False, - mergeq=None, all=False, move=False): + mergeq=None, all=False, move=False, exact=False): diffopts = self.diffopts() wlock = repo.wlock() try: @@ -1015,7 +1047,7 @@ heads += ls if not heads: heads = [nullid] - if repo.dirstate.parents()[0] not in heads: + if repo.dirstate.parents()[0] not in heads and not exact: self.ui.status(_("(working directory not at a head)\n")) if not self.series: @@ -1062,9 +1094,21 @@ if not force: self.check_localchanges(repo) + if exact: + if move: + raise util.Abort(_("cannot use --exact and --move together")) + if self.applied: + raise util.Abort(_("cannot push --exact with applied patches")) + root = self.series[start] + target = patchheader(self.join(root), self.plainmode).parent + if not target: + raise util.Abort(_("%s does not have a parent recorded" % root)) + if not repo[target] == repo['.']: + hg.update(repo, target) + if move: if not patch: - raise util.Abort(_("please specify the patch to move")) + raise util.Abort(_("please specify the patch to move")) for i, rpn in enumerate(self.full_series[start:]): # strip markers for patch guards if self.guard_re.split(rpn, 1)[0] == patch: @@ -1102,7 +1146,7 @@ for f in all_files: if f not in repo.dirstate: try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: raise @@ -1196,7 +1240,7 @@ raise util.Abort(_("deletions found between repo revs")) for f in a: try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, e: if e.errno != errno.ENOENT: raise @@ -1247,6 +1291,8 @@ if repo.changelog.heads(top) != [top]: raise util.Abort(_("cannot refresh a revision with children")) + inclsubs = self.check_substate(repo) + cparents = repo.changelog.parents(top) patchparent = self.qparents(repo, top) ph = patchheader(self.join(patchfn), self.plainmode) @@ -1270,10 +1316,10 @@ # and then commit. # # this should really read: - # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4] + # mm, dd, aa = repo.status(top, patchparent)[:3] # but we do it backwards to take advantage of manifest/chlog # caching against the next repo.status call - mm, aa, dd, aa2 = repo.status(patchparent, top)[:4] + mm, aa, dd = repo.status(patchparent, top)[:3] changes = repo.changelog.read(top) man = repo.manifest.read(changes[0]) aaa = aa[:] @@ -1289,49 +1335,43 @@ else: match = cmdutil.matchall(repo) m, a, r, d = repo.status(match=match)[:4] + mm = set(mm) + aa = set(aa) + dd = set(dd) # we might end up with files that were added between # qtip and the dirstate parent, but then changed in the # local dirstate. in this case, we want them to only # show up in the added section for x in m: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not refreshing %s\n') % x) - continue if x not in aa: - mm.append(x) + mm.add(x) # we might end up with files added by the local dirstate that # were deleted by the patch. In this case, they should only # show up in the changed section. for x in a: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not adding %s\n') % x) - continue if x in dd: - del dd[dd.index(x)] - mm.append(x) + dd.remove(x) + mm.add(x) else: - aa.append(x) + aa.add(x) # make sure any files deleted in the local dirstate # are not in the add or change column of the patch forget = [] for x in d + r: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not removing %s\n') % x) - continue if x in aa: - del aa[aa.index(x)] + aa.remove(x) forget.append(x) continue - elif x in mm: - del mm[mm.index(x)] - dd.append(x) - - m = list(set(mm)) - r = list(set(dd)) - a = list(set(aa)) + else: + mm.discard(x) + dd.add(x) + + m = list(mm) + r = list(dd) + a = list(aa) c = [filter(matchfn, l) for l in (m, a, r)] - match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2])) + match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs)) chunks = patch.diff(repo, patchparent, match=match, changes=c, opts=diffopts) for chunk in chunks: @@ -1529,7 +1569,7 @@ l = line.rstrip() l = l[10:].split(' ') qpp = [bin(x) for x in l] - elif datastart != None: + elif datastart is not None: l = line.rstrip() n, name = l.split(':', 1) if n: @@ -2344,7 +2384,8 @@ mergeq = queue(ui, repo.join(""), newpath) ui.warn(_("merging with queue at: %s\n") % mergeq.path) ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'), - mergeq=mergeq, all=opts.get('all'), move=opts.get('move')) + mergeq=mergeq, all=opts.get('all'), move=opts.get('move'), + exact=opts.get('exact')) return ret def pop(ui, repo, patch=None, **opts): @@ -2878,7 +2919,7 @@ return super(mqrepo, self).commit(text, user, date, match, force, editor, extra) - def push(self, remote, force=False, revs=None, newbranch=False): + def checkpush(self, force, revs): if self.mq.applied and not force: haspatches = True if revs: @@ -2889,7 +2930,7 @@ haspatches = bool([n for n in revs if n in applied]) if haspatches: raise util.Abort(_('source has mq patches applied')) - return super(mqrepo, self).push(remote, force, revs, newbranch) + super(mqrepo, self).checkpush(force, revs) def _findtags(self): '''augment tags from base class with patch tags''' @@ -2901,7 +2942,7 @@ mqtags = [(patch.node, patch.name) for patch in q.applied] - if mqtags[-1][0] not in self.changelog.nodemap: + if mqtags[-1][0] not in self: self.ui.warn(_('mq status file refers to unknown node %s\n') % short(mqtags[-1][0])) return result @@ -2926,7 +2967,7 @@ cl = self.changelog qbasenode = q.applied[0].node - if qbasenode not in cl.nodemap: + if qbasenode not in self: self.ui.warn(_('mq status file refers to unknown node %s\n') % short(qbasenode)) return super(mqrepo, self)._branchtags(partial, lrev) @@ -3120,6 +3161,7 @@ "^qpush": (push, [('f', 'force', None, _('apply on top of local changes')), + ('e', 'exact', None, _('apply the target patch to its recorded parent')), ('l', 'list', None, _('list patch name in commit text')), ('a', 'all', None, _('apply all patches')), ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
--- a/hgext/patchbomb.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/patchbomb.py Tue Feb 01 17:52:25 2011 -0600 @@ -193,6 +193,9 @@ PAGER environment variable is set, your pager will be fired up once for each patchbomb message, so you can verify everything is alright. + In case email sending fails, you will find a backup of your series + introductory message in ``.hg/last-email.txt``. + Examples:: hg email -r 3000 # send patch 3000 only @@ -309,6 +312,10 @@ ui.write(_('\nWrite the introductory message for the ' 'patch series.\n\n')) body = ui.edit(body, sender) + # Save serie description in case sendmail fails + msgfile = repo.opener('last-email.txt', 'wb') + msgfile.write(body) + msgfile.close() return body def getpatchmsgs(patches, patchnames=None):
--- a/hgext/progress.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/progress.py Tue Feb 01 17:52:25 2011 -0600 @@ -28,7 +28,7 @@ [progress] delay = 3 # number of seconds (float) before showing the progress bar refresh = 0.1 # time in seconds between refreshes of the progress bar - format = topic bar number # format of the progress bar + format = topic bar number estimate # format of the progress bar width = <none> # if set, the maximum width of the progress information # (that is, min(width, term width) will be used) clear-complete = True # clear the progress bar after it's done @@ -36,15 +36,17 @@ assume-tty = False # if true, ALWAYS show a progress bar, unless # disable is given -Valid entries for the format field are topic, bar, number, unit, and -item. item defaults to the last 20 characters of the item, but this -can be changed by adding either ``-<num>`` which would take the last -num characters, or ``+<num>`` for the first num characters. +Valid entries for the format field are topic, bar, number, unit, +estimate, and item. item defaults to the last 20 characters of the +item, but this can be changed by adding either ``-<num>`` which would +take the last num characters, or ``+<num>`` for the first num +characters. """ import sys import time +from mercurial.i18n import _ from mercurial import util def spacejoin(*args): @@ -54,6 +56,43 @@ return (getattr(sys.stderr, 'isatty', None) and (sys.stderr.isatty() or ui.configbool('progress', 'assume-tty'))) +def fmtremaining(seconds): + if seconds < 60: + # i18n: format XX seconds as "XXs" + return _("%02ds") % (seconds) + minutes = seconds // 60 + if minutes < 60: + seconds -= minutes * 60 + # i18n: format X minutes and YY seconds as "XmYYs" + return _("%dm%02ds") % (minutes, seconds) + # we're going to ignore seconds in this case + minutes += 1 + hours = minutes // 60 + minutes -= hours * 60 + if hours < 30: + # i18n: format X hours and YY minutes as "XhYYm" + return _("%dh%02dm") % (hours, minutes) + # we're going to ignore minutes in this case + hours += 1 + days = hours // 24 + hours -= days * 24 + if days < 15: + # i18n: format X days and YY hours as "XdYYh" + return _("%dd%02dh") % (days, hours) + # we're going to ignore hours in this case + days += 1 + weeks = days // 7 + days -= weeks * 7 + if weeks < 55: + # i18n: format X weeks and YY days as "XwYYd" + return _("%dw%02dd") % (weeks, days) + # we're going to ignore days and treat a year as 52 weeks + weeks += 1 + years = weeks // 52 + weeks -= years * 52 + # i18n: format X years and YY weeks as "XyYYw" + return _("%dy%02dw") % (years, weeks) + class progbar(object): def __init__(self, ui): self.ui = ui @@ -61,6 +100,9 @@ def resetstate(self): self.topics = [] + self.topicstates = {} + self.starttimes = {} + self.startvals = {} self.printed = False self.lastprint = time.time() + float(self.ui.config( 'progress', 'delay', default=3)) @@ -69,9 +111,9 @@ 'progress', 'refresh', default=0.1)) self.order = self.ui.configlist( 'progress', 'format', - default=['topic', 'bar', 'number']) + default=['topic', 'bar', 'number', 'estimate']) - def show(self, topic, pos, item, unit, total): + def show(self, now, topic, pos, item, unit, total): if not shouldprint(self.ui): return termwidth = self.width() @@ -108,10 +150,12 @@ needprogress = True elif indicator == 'unit' and unit: add = unit + elif indicator == 'estimate': + add = self.estimate(topic, pos, total, now) if not needprogress: head = spacejoin(head, add) else: - tail = spacejoin(add, tail) + tail = spacejoin(tail, add) if needprogress: used = 0 if head: @@ -159,19 +203,44 @@ tw = self.ui.termwidth() return min(int(self.ui.config('progress', 'width', default=tw)), tw) + def estimate(self, topic, pos, total, now): + if total is None: + return '' + initialpos = self.startvals[topic] + target = total - initialpos + delta = pos - initialpos + if delta > 0: + elapsed = now - self.starttimes[topic] + if elapsed > float( + self.ui.config('progress', 'estimate', default=2)): + seconds = (elapsed * (target - delta)) // delta + 1 + return fmtremaining(seconds) + return '' + def progress(self, topic, pos, item='', unit='', total=None): + now = time.time() if pos is None: - if self.topics and self.topics[-1] == topic and self.printed: + self.starttimes.pop(topic, None) + self.startvals.pop(topic, None) + self.topicstates.pop(topic, None) + # reset the progress bar if this is the outermost topic + if self.topics and self.topics[0] == topic and self.printed: self.complete() self.resetstate() + # truncate the list of topics assuming all topics within + # this one are also closed + if topic in self.topics: + self.topics = self.topics[:self.topics.index(topic)] else: if topic not in self.topics: + self.starttimes[topic] = now + self.startvals[topic] = pos self.topics.append(topic) - now = time.time() - if (now - self.lastprint >= self.refresh - and topic == self.topics[-1]): + self.topicstates[topic] = pos, item, unit, total + if now - self.lastprint >= self.refresh and self.topics: self.lastprint = now - self.show(topic, pos, item, unit, total) + current = self.topics[-1] + self.show(now, topic, *self.topicstates[topic]) def uisetup(ui): class progressui(ui.__class__):
--- a/hgext/rebase.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/rebase.py Tue Feb 01 17:52:25 2011 -0600 @@ -215,7 +215,7 @@ clearstatus(repo) ui.note(_("rebase completed\n")) if os.path.exists(repo.sjoin('undo')): - util.unlink(repo.sjoin('undo')) + util.unlinkpath(repo.sjoin('undo')) if skipped: ui.note(_("%d revisions have been skipped\n") % len(skipped)) finally: @@ -393,7 +393,7 @@ def clearstatus(repo): 'Remove the status files' if os.path.exists(repo.join("rebasestate")): - util.unlink(repo.join("rebasestate")) + util.unlinkpath(repo.join("rebasestate")) def restorestatus(repo): 'Restore a previously stored status'
--- a/hgext/record.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/record.py Tue Feb 01 17:52:25 2011 -0600 @@ -10,7 +10,7 @@ from mercurial.i18n import gettext, _ from mercurial import cmdutil, commands, extensions, hg, mdiff, patch from mercurial import util -import copy, cStringIO, errno, os, re, tempfile +import copy, cStringIO, errno, os, re, shutil, tempfile lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') @@ -42,7 +42,7 @@ line = lr.readline() if not line: break - if line.startswith('diff --git a/'): + if line.startswith('diff --git a/') or line.startswith('diff -r '): def notheader(line): s = line.split(None, 1) return not s or s[0] not in ('---', 'diff') @@ -70,7 +70,8 @@ XXX shoudn't we move this to mercurial/patch.py ? """ - diff_re = re.compile('diff --git a/(.*) b/(.*)$') + diffgit_re = re.compile('diff --git a/(.*) b/(.*)$') + diff_re = re.compile('diff -r .* (.*)$') allhunks_re = re.compile('(?:index|new file|deleted file) ') pretty_re = re.compile('(?:new file|deleted file) ') special_re = re.compile('(?:index|new|deleted|copy|rename) ') @@ -80,9 +81,7 @@ self.hunks = [] def binary(self): - for h in self.header: - if h.startswith('index '): - return True + return util.any(h.startswith('index ') for h in self.header) def pretty(self, fp): for h in self.header: @@ -105,15 +104,17 @@ fp.write(''.join(self.header)) def allhunks(self): - for h in self.header: - if self.allhunks_re.match(h): - return True + return util.any(self.allhunks_re.match(h) for h in self.header) def files(self): - fromfile, tofile = self.diff_re.match(self.header[0]).groups() - if fromfile == tofile: - return [fromfile] - return [fromfile, tofile] + match = self.diffgit_re.match(self.header[0]) + if match: + fromfile, tofile = match.groups() + if fromfile == tofile: + return [fromfile] + return [fromfile, tofile] + else: + return self.diff_re.match(self.header[0]).groups() def filename(self): return self.files()[-1] @@ -122,9 +123,7 @@ return '<header %s>' % (' '.join(map(repr, self.files()))) def special(self): - for h in self.header: - if self.special_re.match(h): - return True + return util.any(self.special_re.match(h) for h in self.header) def countchanges(hunk): """hunk -> (n+,n-)""" @@ -173,7 +172,7 @@ return '<hunk %r@%d>' % (self.filename(), self.fromline) def parsepatch(fp): - """patch -> [] of hunks """ + """patch -> [] of headers -> [] of hunks """ class parser(object): """patch parsing state machine""" def __init__(self): @@ -184,7 +183,7 @@ self.context = [] self.before = [] self.hunk = [] - self.stream = [] + self.headers = [] def addrange(self, limits): fromstart, fromend, tostart, toend, proc = limits @@ -197,7 +196,6 @@ h = hunk(self.header, self.fromline, self.toline, self.proc, self.before, self.hunk, context) self.header.hunks.append(h) - self.stream.append(h) self.fromline += len(self.before) + h.removed self.toline += len(self.before) + h.added self.before = [] @@ -214,12 +212,12 @@ def newfile(self, hdr): self.addcontext([]) h = header(hdr) - self.stream.append(h) + self.headers.append(h) self.header = h def finished(self): self.addcontext([]) - return self.stream + return self.headers transitions = { 'file': {'context': addcontext, @@ -248,27 +246,10 @@ state = newstate return p.finished() -def filterpatch(ui, chunks): +def filterpatch(ui, headers): """Interactively filter patch chunks into applied-only chunks""" - chunks = list(chunks) - chunks.reverse() - seen = set() - def consumefile(): - """fetch next portion from chunks until a 'header' is seen - NB: header == new-file mark - """ - consumed = [] - while chunks: - if isinstance(chunks[-1], header): - break - else: - consumed.append(chunks.pop()) - return consumed - resp_all = [None] # this two are changed from inside prompt, - resp_file = [None] # so can't be usual variables - applied = {} # 'filename' -> [] of chunks - def prompt(query): + def prompt(skipfile, skipall, query): """prompt query, and process base inputs - y/n for the rest of file @@ -276,13 +257,12 @@ - ? (help) - q (quit) - Returns True/False and sets reps_all and resp_file as - appropriate. + Return True/False and possibly updated skipfile and skipall. """ - if resp_all[0] is not None: - return resp_all[0] - if resp_file[0] is not None: - return resp_file[0] + if skipall is not None: + return skipall, skipfile, skipall + if skipfile is not None: + return skipfile, skipfile, skipall while True: resps = _('[Ynsfdaq?]') choices = (_('&Yes, record this change'), @@ -307,47 +287,48 @@ elif r == 1: # no ret = False elif r == 2: # Skip - ret = resp_file[0] = False + ret = skipfile = False elif r == 3: # file (Record remaining) - ret = resp_file[0] = True + ret = skipfile = True elif r == 4: # done, skip remaining - ret = resp_all[0] = False + ret = skipall = False elif r == 5: # all - ret = resp_all[0] = True + ret = skipall = True elif r == 6: # quit raise util.Abort(_('user quit')) - return ret - pos, total = 0, len(chunks) - 1 - while chunks: - pos = total - len(chunks) + 1 - chunk = chunks.pop() - if isinstance(chunk, header): - # new-file mark - resp_file = [None] - fixoffset = 0 - hdr = ''.join(chunk.header) - if hdr in seen: - consumefile() - continue - seen.add(hdr) - if resp_all[0] is None: + return ret, skipfile, skipall + + seen = set() + applied = {} # 'filename' -> [] of chunks + skipfile, skipall = None, None + pos, total = 1, sum(len(h.hunks) for h in headers) + for h in headers: + pos += len(h.hunks) + skipfile = None + fixoffset = 0 + hdr = ''.join(h.header) + if hdr in seen: + continue + seen.add(hdr) + if skipall is None: + h.pretty(ui) + msg = (_('examine changes to %s?') % + _(' and ').join(map(repr, h.files()))) + r, skipfile, skipall = prompt(skipfile, skipall, msg) + if not r: + continue + applied[h.filename()] = [h] + if h.allhunks(): + applied[h.filename()] += h.hunks + continue + for i, chunk in enumerate(h.hunks): + if skipfile is None and skipall is None: chunk.pretty(ui) - r = prompt(_('examine changes to %s?') % - _(' and ').join(map(repr, chunk.files()))) - if r: - applied[chunk.filename()] = [chunk] - if chunk.allhunks(): - applied[chunk.filename()] += consumefile() - else: - consumefile() - else: - # new hunk - if resp_file[0] is None and resp_all[0] is None: - chunk.pretty(ui) - r = total == 1 and prompt(_('record this change to %r?') % - chunk.filename()) \ - or prompt(_('record change %d/%d to %r?') % - (pos, total, chunk.filename())) + msg = (total == 1 + and (_('record this change to %r?') % chunk.filename()) + or (_('record change %d/%d to %r?') % + (pos - len(h.hunks) + i, total, chunk.filename()))) + r, skipfile, skipall = prompt(skipfile, skipall, msg) if r: if fixoffset: chunk = copy.copy(chunk) @@ -403,8 +384,6 @@ def committomq(ui, repo, *pats, **opts): mq.new(ui, repo, patch, *pats, **opts) - opts = opts.copy() - opts['force'] = True # always 'qnew -f' dorecord(ui, repo, committomq, *pats, **opts) @@ -415,21 +394,22 @@ def recordfunc(ui, repo, message, match, opts): """This is generic record driver. - Its job is to interactively filter local changes, and accordingly - prepare working dir into a state, where the job can be delegated to - non-interactive commit command such as 'commit' or 'qrefresh'. + Its job is to interactively filter local changes, and + accordingly prepare working directory into a state in which the + job can be delegated to a non-interactive commit command such as + 'commit' or 'qrefresh'. - After the actual job is done by non-interactive command, working dir - state is restored to original. + After the actual job is done by non-interactive command, the + working directory is restored to its original state. - In the end we'll record interesting changes, and everything else will be - left in place, so the user can continue his work. + In the end we'll record interesting changes, and everything else + will be left in place, so the user can continue working. """ merge = len(repo[None].parents()) > 1 if merge: raise util.Abort(_('cannot partially commit a merge ' - '(use hg commit instead)')) + '(use "hg commit" instead)')) changes = repo.status(match=match)[:3] diffopts = mdiff.diffopts(git=True, nodates=True) @@ -475,6 +455,7 @@ os.close(fd) ui.debug('backup %r as %r\n' % (f, tmpname)) util.copyfile(repo.wjoin(f), tmpname) + shutil.copystat(repo.wjoin(f), tmpname) backups[f] = tmpname fp = cStringIO.StringIO() @@ -502,11 +483,13 @@ raise util.Abort(str(err)) del fp - # 4. We prepared working directory according to filtered patch. - # Now is the time to delegate the job to commit/qrefresh or the like! + # 4. We prepared working directory according to filtered + # patch. Now is the time to delegate the job to + # commit/qrefresh or the like! - # it is important to first chdir to repo root -- we'll call a - # highlevel command with list of pathnames relative to repo root + # it is important to first chdir to repo root -- we'll call + # a highlevel command with list of pathnames relative to + # repo root cwd = os.getcwd() os.chdir(repo.root) try: @@ -521,6 +504,14 @@ for realname, tmpname in backups.iteritems(): ui.debug('restoring %r to %r\n' % (tmpname, realname)) util.copyfile(tmpname, repo.wjoin(realname)) + # Our calls to copystat() here and above are a + # hack to trick any editors that have f open that + # we haven't modified them. + # + # Also note that this racy as an editor could + # notice the file's mtime before we've finished + # writing it. + shutil.copystat(tmpname, repo.wjoin(realname)) os.unlink(tmpname) os.rmdir(backupdir) except OSError: @@ -540,11 +531,7 @@ cmdtable = { "record": - (record, - - # add commit options - commands.table['^commit|ci'][1], - + (record, commands.table['^commit|ci'][1], # same options as commit _('hg record [OPTION]... [FILE]...')), } @@ -557,11 +544,7 @@ qcmdtable = { "qrecord": - (qrecord, - - # add qnew options, except '--force' - [opt for opt in mq.cmdtable['^qnew'][1] if opt[1] != 'force'], - + (qrecord, mq.cmdtable['^qnew'][1], # same options as qnew _('hg qrecord [OPTION]... PATCH [FILE]...')), }
--- a/hgext/transplant.py Tue Feb 01 17:30:13 2011 -0600 +++ b/hgext/transplant.py Tue Feb 01 17:52:25 2011 -0600 @@ -401,7 +401,7 @@ def hasnode(repo, node): try: - return repo.changelog.rev(node) != None + return repo.changelog.rev(node) is not None except error.RevlogError: return False
--- a/i18n/da.po Tue Feb 01 17:30:13 2011 -0600 +++ b/i18n/da.po Tue Feb 01 17:52:25 2011 -0600 @@ -17,8 +17,8 @@ msgstr "" "Project-Id-Version: Mercurial\n" "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n" -"POT-Creation-Date: 2010-12-10 12:44+0100\n" -"PO-Revision-Date: 2010-12-10 12:46+0100\n" +"POT-Creation-Date: 2011-01-04 12:03+0100\n" +"PO-Revision-Date: 2011-01-04 12:15+0100\n" "Last-Translator: <mg@lazybytes.net>\n" "Language-Team: Danish\n" "Language: Danish\n" @@ -1142,10 +1142,10 @@ msgstr "" msgid "" -" If <REVMAP> isn't given, it will be put in a default location\n" -" (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file\n" -" that maps each source commit ID to the destination ID for that\n" -" revision, like so::" +" If ``REVMAP`` isn't given, it will be put in a default location\n" +" (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple\n" +" text file that maps each source commit ID to the destination ID\n" +" for that revision, like so::" msgstr "" msgid " <source ID> <destination ID>" @@ -1251,15 +1251,25 @@ msgstr "" msgid "" -" --config convert.hg.ignoreerrors=False (boolean)\n" -" ignore integrity errors when reading. Use it to fix Mercurial\n" -" repositories with missing revlogs, by converting from and to\n" -" Mercurial.\n" -" --config convert.hg.saverev=False (boolean)\n" -" store original revision ID in changeset (forces target IDs to\n" -" change)\n" -" --config convert.hg.startrev=0 (hg revision identifier)\n" -" convert start revision and its descendants" +" The Mercurial source recognizes the following configuration\n" +" options, which you can set on the command line with ``--config``:" +msgstr "" + +msgid "" +" :convert.hg.ignoreerrors: ignore integrity errors when reading.\n" +" Use it to fix Mercurial repositories with missing revlogs, by\n" +" converting from and to Mercurial. Default is False." +msgstr "" + +msgid "" +" :convert.hg.saverev: store original. revision ID in changeset\n" +" (forces target IDs to change). It takes and boolean argument\n" +" and defaults to False." +msgstr "" + +msgid "" +" :convert.hg.startrev: convert start revision and its descendants.\n" +" It takes a hg revision identifier and defaults to 0." msgstr "" msgid "" @@ -1271,45 +1281,59 @@ " CVS source will use a sandbox (i.e. a checked-out copy) from CVS\n" " to indicate the starting point of what will be converted. Direct\n" " access to the repository files is not needed, unless of course the\n" -" repository is :local:. The conversion uses the top level directory\n" -" in the sandbox to find the CVS repository, and then uses CVS rlog\n" -" commands to find files to convert. This means that unless a\n" -" filemap is given, all files under the starting directory will be\n" +" repository is ``:local:``. The conversion uses the top level\n" +" directory in the sandbox to find the CVS repository, and then uses\n" +" CVS rlog commands to find files to convert. This means that unless\n" +" a filemap is given, all files under the starting directory will be\n" " converted, and that any directory reorganization in the CVS\n" " sandbox is ignored." msgstr "" -msgid " The options shown are the defaults." -msgstr "" - -msgid "" -" --config convert.cvsps.cache=True (boolean)\n" -" Set to False to disable remote log caching, for testing and\n" -" debugging purposes.\n" -" --config convert.cvsps.fuzz=60 (integer)\n" -" Specify the maximum time (in seconds) that is allowed between\n" -" commits with identical user and log message in a single\n" -" changeset. When very large files were checked in as part of a\n" -" changeset then the default may not be long enough.\n" -" --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'\n" -" Specify a regular expression to which commit log messages are\n" -" matched. If a match occurs, then the conversion process will\n" -" insert a dummy revision merging the branch on which this log\n" -" message occurs to the branch indicated in the regex.\n" -" --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'\n" -" Specify a regular expression to which commit log messages are\n" -" matched. If a match occurs, then the conversion process will\n" -" add the most recent revision on the branch indicated in the\n" -" regex as the second parent of the changeset.\n" -" --config hook.cvslog\n" -" Specify a Python function to be called at the end of gathering\n" -" the CVS log. The function is passed a list with the log entries,\n" -" and can modify the entries in-place, or add or delete them.\n" -" --config hook.cvschangesets\n" -" Specify a Python function to be called after the changesets\n" -" are calculated from the the CVS log. The function is passed\n" -" a list with the changeset entries, and can modify the changesets\n" -" in-place, or add or delete them." +msgid " The following options can be used with ``--config``:" +msgstr "" + +msgid "" +" :convert.cvsps.cache: Set to False to disable remote log caching,\n" +" for testing and debugging purposes. Default is True." +msgstr "" + +msgid "" +" :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is\n" +" allowed between commits with identical user and log message in\n" +" a single changeset. When very large files were checked in as\n" +" part of a changeset then the default may not be long enough.\n" +" The default is 60." +msgstr "" + +msgid "" +" :convert.cvsps.mergeto: Specify a regular expression to which\n" +" commit log messages are matched. If a match occurs, then the\n" +" conversion process will insert a dummy revision merging the\n" +" branch on which this log message occurs to the branch\n" +" indicated in the regex. Default is ``{{mergetobranch\n" +" ([-\\w]+)}}``" +msgstr "" + +msgid "" +" :convert.cvsps.mergefrom: Specify a regular expression to which\n" +" commit log messages are matched. If a match occurs, then the\n" +" conversion process will add the most recent revision on the\n" +" branch indicated in the regex as the second parent of the\n" +" changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``" +msgstr "" + +msgid "" +" :hook.cvslog: Specify a Python function to be called at the end of\n" +" gathering the CVS log. The function is passed a list with the\n" +" log entries, and can modify the entries in-place, or add or\n" +" delete them." +msgstr "" + +msgid "" +" :hook.cvschangesets: Specify a Python function to be called after\n" +" the changesets are calculated from the the CVS log. The\n" +" function is passed a list with the changeset entries, and can\n" +" modify the changesets in-place, or add or delete them." msgstr "" msgid "" @@ -1326,24 +1350,33 @@ msgid "" " Subversion source detects classical trunk/branches/tags layouts.\n" -" By default, the supplied \"svn://repo/path/\" source URL is\n" -" converted as a single branch. If \"svn://repo/path/trunk\" exists it\n" -" replaces the default branch. If \"svn://repo/path/branches\" exists,\n" -" its subdirectories are listed as possible branches. If\n" -" \"svn://repo/path/tags\" exists, it is looked for tags referencing\n" -" converted branches. Default \"trunk\", \"branches\" and \"tags\" values\n" -" can be overridden with following options. Set them to paths\n" +" By default, the supplied ``svn://repo/path/`` source URL is\n" +" converted as a single branch. If ``svn://repo/path/trunk`` exists\n" +" it replaces the default branch. If ``svn://repo/path/branches``\n" +" exists, its subdirectories are listed as possible branches. If\n" +" ``svn://repo/path/tags`` exists, it is looked for tags referencing\n" +" converted branches. Default ``trunk``, ``branches`` and ``tags``\n" +" values can be overridden with following options. Set them to paths\n" " relative to the source URL, or leave them blank to disable auto\n" " detection." msgstr "" -msgid "" -" --config convert.svn.branches=branches (directory name)\n" -" specify the directory containing branches\n" -" --config convert.svn.tags=tags (directory name)\n" -" specify the directory containing tags\n" -" --config convert.svn.trunk=trunk (directory name)\n" -" specify the name of the trunk branch" +msgid " The following options can be set with ``--config``:" +msgstr "" + +msgid "" +" :convert.svn.branches: specify the directory containing branches.\n" +" The defaults is ``branches``." +msgstr "" + +msgid "" +" :convert.svn.tags: specify the directory containing tags. The\n" +" default is ``tags``." +msgstr "" + +msgid "" +" :convert.svn.trunk: specify the name of the trunk branch The\n" +" defauls is ``trunk``." msgstr "" msgid "" @@ -1353,8 +1386,8 @@ msgstr "" msgid "" -" --config convert.svn.startrev=0 (svn revision number)\n" -" specify start Subversion revision." +" :convert.svn.startrev: specify start Subversion revision number.\n" +" The default is 0." msgstr "" msgid "" @@ -1368,17 +1401,17 @@ " source to a flat Mercurial repository, ignoring labels, branches\n" " and integrations. Note that when a depot path is given you then\n" " usually should specify a target directory, because otherwise the\n" -" target may be named ...-hg." +" target may be named ``...-hg``." msgstr "" msgid "" " It is possible to limit the amount of source history to be\n" -" converted by specifying an initial Perforce revision." -msgstr "" - -msgid "" -" --config convert.p4.startrev=0 (perforce changelist number)\n" -" specify initial Perforce revision." +" converted by specifying an initial Perforce revision:" +msgstr "" + +msgid "" +" :convert.p4.startrev: specify initial Perforce revision, a\n" +" Perforce changelist number)." msgstr "" msgid "" @@ -1386,17 +1419,24 @@ " '''''''''''''''''''''" msgstr "" -msgid "" -" --config convert.hg.clonebranches=False (boolean)\n" -" dispatch source branches in separate clones.\n" -" --config convert.hg.tagsbranch=default (branch name)\n" -" tag revisions branch name\n" -" --config convert.hg.usebranchnames=True (boolean)\n" -" preserve branch names" -msgstr "" - -msgid " " -msgstr " " +msgid " The following options are supported:" +msgstr "" + +msgid "" +" :convert.hg.clonebranches: dispatch source branches in separate\n" +" clones. The default is False." +msgstr "" + +msgid "" +" :convert.hg.tagsbranch: branch name for tag revisions, defaults to\n" +" ``default``." +msgstr "" + +msgid "" +" :convert.hg.usebranchnames: preserve branch names. The default is\n" +" True\n" +" " +msgstr "" msgid "create changeset information from CVS" msgstr "" @@ -1962,32 +2002,35 @@ "``[repository]``." msgid "" -"The ``[patterns]`` section specifies the line endings used in the\n" -"working directory. The format is specified by a file pattern. The\n" -"first match is used, so put more specific patterns first. The\n" -"available line endings are ``LF``, ``CRLF``, and ``BIN``." -msgstr "" -"Sektionen ``[patterns]`` angiver hvilken type linieskift der skal\n" -"bruges i arbejdskataloget. Typen er angivet ved et filmønster. Den\n" -"første træffer bliver brugt, så skriv mere specifikke mønstre først.\n" -"De mulige linieskifttyper er ``LF``, ``CRLF`` og ``BIN``." +"The ``[patterns]`` section specifies how line endings should be\n" +"converted between the working copy and the repository. The format is\n" +"specified by a file pattern. The first match is used, so put more\n" +"specific patterns first. The available line endings are ``LF``,\n" +"``CRLF``, and ``BIN``." +msgstr "" +"Sektionen ``[patterns]`` angiver hvordan linieskift skal konverteres\n" +"mellem arbejdskataloget og depotet. Formatet angives med et\n" +"filmønster. Den første træffer bliver brugt, så skriv mere specifikke\n" +"mønstre først. De mulige linieskifttyper er ``LF``, ``CRLF`` og\n" +"``BIN``." msgid "" "Files with the declared format of ``CRLF`` or ``LF`` are always\n" -"checked out in that format and files declared to be binary (``BIN``)\n" -"are left unchanged. Additionally, ``native`` is an alias for the\n" -"platform's default line ending: ``LF`` on Unix (including Mac OS X)\n" -"and ``CRLF`` on Windows. Note that ``BIN`` (do nothing to line\n" -"endings) is Mercurial's default behaviour; it is only needed if you\n" -"need to override a later, more general pattern." -msgstr "" -"Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud i\n" -"dette format og filer deklareret som binære (``BIN``) bliver ikke\n" -"ændret. Desuden er ``native`` et alias for platforms normale\n" -"linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på Windows.\n" -"Bemærk at ``BIN`` (gør ingenting ved linieskift) er Mercurials\n" -"standardopførsel; det er kun nødvendigt at bruge den hvis du skal\n" -"overskrive et senere og mere generelt mønster." +"checked out and stored in the repository in that format and files\n" +"declared to be binary (``BIN``) are left unchanged. Additionally,\n" +"``native`` is an alias for checking out in the platform's default line\n" +"ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on\n" +"Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's\n" +"default behaviour; it is only needed if you need to override a later,\n" +"more general pattern." +msgstr "" +"Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud og\n" +"gemt i depotet i dette format og filer deklareret som binære (``BIN``)\n" +"bliver ikke ændret. Desuden er ``native`` et alias for platformens\n" +"normale linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på\n" +"Windows. Bemærk at ``BIN`` (gør ingenting ved linieskift) er\n" +"Mercurials standardopførsel; det er kun nødvendigt at bruge den hvis\n" +"du skal overskrive et senere og mere generelt mønster." msgid "" "The optional ``[repository]`` section specifies the line endings to\n" @@ -2036,6 +2079,12 @@ " native = LF" msgid "" +".. note::\n" +" The rules will first apply when files are touched in the working\n" +" copy, e.g. by updating to null and back to tip to touch all files." +msgstr "" + +msgid "" "The extension uses an optional ``[eol]`` section in your hgrc file\n" "(not the ``.hgeol`` file) for settings that control the overall\n" "behavior. There are two settings:" @@ -2070,6 +2119,13 @@ " antagelse af at de har miksede linieskift med vilje." msgid "" +"The extension provides ``cleverencode:`` and ``cleverdecode:`` filters\n" +"like the deprecated win32text extension does. This means that you can\n" +"disable win32text and enable eol and your filters will still work. You\n" +"only need to these filters until you have prepared a ``.hgeol`` file." +msgstr "" + +msgid "" "The ``win32text.forbid*`` hooks provided by the win32text extension\n" "have been unified into a single hook named ``eol.hook``. The hook will\n" "lookup the expected line endings from the ``.hgeol`` file, which means\n" @@ -3392,6 +3448,10 @@ msgid "working directory revision is not qtip" msgstr "arbejdskatalogets revision er ikke qtip" +#, python-format +msgid "uncommitted changes in subrepository %s" +msgstr "udeponerede ændringer i underdepot %s" + msgid "local changes found, refresh first" msgstr "lokale ændringer fundet, genopfrisk først" @@ -3460,6 +3520,16 @@ msgid "patch series already fully applied\n" msgstr "serien af rettelser er allerede anvendt fuldt ud\n" +msgid "cannot use --exact and --move together" +msgstr "kan ikke bruge --exact og --move sammen" + +msgid "cannot push --exact with applied patches" +msgstr "kan push --exact med anvendte rettelser" + +#, python-format +msgid "%s does not have a parent recorded" +msgstr "%s har ikke gemt nogen forælder" + msgid "please specify the patch to move" msgstr "angiv venligst lappen der skal flyttes" @@ -3508,18 +3578,6 @@ msgid "cannot refresh a revision with children" msgstr "kan ikke genopfriske en revision som har børn" -#, python-format -msgid "warning: not refreshing %s\n" -msgstr "advarsel: genopfrisker ikke %s\n" - -#, python-format -msgid "warning: not adding %s\n" -msgstr "advarsel: tilføjer ikke %s\n" - -#, python-format -msgid "warning: not removing %s\n" -msgstr "advarsel: fjerner ikke %s\n" - msgid "" "refresh interrupted while patch was popped! (revert --all, qpush to " "recover)\n" @@ -4486,6 +4544,9 @@ msgid "apply on top of local changes" msgstr "anvend ovenpå lokale ændringer" +msgid "apply the target patch to its recorded parent" +msgstr "" + msgid "list patch name in commit text" msgstr "" @@ -4987,6 +5048,11 @@ msgstr "" msgid "" +" In case email sending fails, you will find a backup of your series\n" +" introductory message in ``.hg/last-email.txt``." +msgstr "" + +msgid "" " hg email -r 3000 # send patch 3000 only\n" " hg email -r 3000 -r 3001 # send patches 3000 and 3001\n" " hg email -r 3000:3005 # send patches 3000 through 3005\n" @@ -5177,7 +5243,7 @@ " [progress]\n" " delay = 3 # number of seconds (float) before showing the progress bar\n" " refresh = 0.1 # time in seconds between refreshes of the progress bar\n" -" format = topic bar number # format of the progress bar\n" +" format = topic bar number estimate # format of the progress bar\n" " width = <none> # if set, the maximum width of the progress information\n" " # (that is, min(width, term width) will be used)\n" " clear-complete = True # clear the progress bar after it's done\n" @@ -5187,10 +5253,26 @@ msgstr "" msgid "" -"Valid entries for the format field are topic, bar, number, unit, and\n" -"item. item defaults to the last 20 characters of the item, but this\n" -"can be changed by adding either ``-<num>`` which would take the last\n" -"num characters, or ``+<num>`` for the first num characters.\n" +"Valid entries for the format field are topic, bar, number, unit,\n" +"estimate, and item. item defaults to the last 20 characters of the\n" +"item, but this can be changed by adding either ``-<num>`` which would\n" +"take the last num characters, or ``+<num>`` for the first num\n" +"characters.\n" +msgstr "" + +#. i18n: format XX seconds as "XXs" +#, python-format +msgid "%02ds" +msgstr "" + +#. i18n: format X minutes and YY seconds as "XmYYs" +#, python-format +msgid "%dm%02ds" +msgstr "" + +#. i18n: format X hours and YY minutes as "XhYYm" +#, python-format +msgid "%dh%02dm" msgstr "" msgid "command to delete untracked files from the working directory" @@ -5382,7 +5464,8 @@ msgstr "ændringer" msgid "unresolved conflicts (see hg resolve, then hg rebase --continue)" -msgstr "uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" +msgstr "" +"uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" #, python-format msgid "no changes, revision %d skipped\n" @@ -5578,8 +5661,8 @@ msgid "running non-interactively, use commit instead" msgstr "kører ikke interaktivt, brug commit i stedet" -msgid "cannot partially commit a merge (use hg commit instead)" -msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet hg commit)" +msgid "cannot partially commit a merge (use \"hg commit\" instead)" +msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet \"hg commit\")" msgid "no changes to record\n" msgstr "ingen ændringer at optage\n" @@ -6176,6 +6259,9 @@ msgid "unknown archive type '%s'" msgstr "ukendt depottype '%s'" +msgid "archiving" +msgstr "arkiverer" + msgid "invalid changegroup" msgstr "ugyldig changegroup" @@ -8502,6 +8588,9 @@ msgid "you can't specify a revision and a date" msgstr "du kan ikke specificeret en revision og en dato" +msgid "uncommitted merge - use \"hg update\", see \"hg help revert\"" +msgstr "" + msgid "no files or directories specified; use --all to revert the whole repo" msgstr "" "ingen filer eller mapper specificeret; brug --all for at føre hele repo'et " @@ -8813,7 +8902,8 @@ msgid "" " Tags are used to name particular revisions of the repository and are\n" " very useful to compare different revisions, to go back to significant\n" -" earlier versions or to mark branch points as releases, etc." +" earlier versions or to mark branch points as releases, etc. Changing\n" +" an existing tag is normally disallowed; use -f/--force to override." msgstr "" msgid "" @@ -8823,10 +8913,18 @@ msgid "" " To facilitate version control, distribution, and merging of tags,\n" -" they are stored as a file named \".hgtags\" which is managed\n" -" similarly to other project files and can be hand-edited if\n" -" necessary. The file '.hg/localtags' is used for local tags (not\n" -" shared among repositories)." +" they are stored as a file named \".hgtags\" which is managed similarly\n" +" to other project files and can be hand-edited if necessary. This\n" +" also means that tagging creates a new commit. The file\n" +" \".hg/localtags\" is used for local tags (not shared among\n" +" repositories)." +msgstr "" + +msgid "" +" Tag commits are usually made at the head of a branch. If the parent\n" +" of the working directory is not a branch head, :hg:`tag` aborts; use\n" +" -f/--force to force the tag commit to be based on a non-head\n" +" changeset." msgstr "" msgid "" @@ -8859,6 +8957,12 @@ msgid "tag '%s' already exists (use -f to force)" msgstr "mærkaten '%s' eksisterer allerede (brug -f for at gennemtvinge)" +msgid "uncommitted merge" +msgstr "udeponeret sammenføjning" + +msgid "not at a branch head (use -f to force)" +msgstr "ej ved et grenhoved (brug -f for at gennemtvinge)" + msgid "list repository tags" msgstr "vis depotmærkater" @@ -9752,8 +9856,8 @@ msgid "list the changed files of a revision" msgstr "vis de ændrede filer i en revision" -msgid "replace existing tag" -msgstr "erstat eksisterende mærkat" +msgid "force tag" +msgstr "gennemtving markering" msgid "make the tag local" msgstr "gør mærkaten lokal" @@ -10133,6 +10237,10 @@ msgstr "*** import af udvidelse %s fejlede: %s\n" #, python-format +msgid "warning: error finding commands in %s\n" +msgstr "advarsel: fejl ved søgning efter kommandoer i %s\n" + +#, python-format msgid "couldn't find merge tool %s\n" msgstr "kunne ikke finde sammenføjningsværktøj %s\n" @@ -11514,6 +11622,13 @@ "Regexp pattern matching is anchored at the root of the repository." msgstr "" +msgid "" +"To read name patterns from a file, use ``listfile:`` or ``listfile0:``.\n" +"The latter expects null delimited patterns while the former expects line\n" +"feeds. Each string read from the file is itself treated as a file\n" +"pattern." +msgstr "" + msgid "Plain examples::" msgstr "" @@ -11539,8 +11654,15 @@ msgid "Regexp examples::" msgstr "" -msgid "" -" re:.*\\.c$ any name ending in \".c\", anywhere in the repository\n" +msgid " re:.*\\.c$ any name ending in \".c\", anywhere in the repository" +msgstr "" + +msgid "File examples::" +msgstr "Fillisteeksempler::" + +msgid "" +" listfile:list.txt read list from list.txt with one file pattern per line\n" +" listfile0:list.txt read list from list.txt with null byte delimiters\n" msgstr "" msgid "Mercurial supports several ways to specify individual revisions." @@ -11959,8 +12081,9 @@ msgstr "" msgid "" -":branches: String. The name of the branch on which the changeset was\n" -" committed. Will be empty if the branch name was default." +":branches: List of strings. The name of the branch on which the\n" +" changeset was committed. Will be empty if the branch name was\n" +" default." msgstr "" msgid ":children: List of strings. The children of the changeset." @@ -12344,6 +12467,10 @@ msgstr "(grensammenføjning, glem ikke at deponere)\n" #, python-format +msgid "config file %s not found!" +msgstr "konfigurationsfilen %s blev ikke fundet!" + +#, python-format msgid "error reading %s/.hg/hgrc: %s\n" msgstr "fejl ved læsning af %s/.hg/hgrc: %s\n" @@ -12490,6 +12617,10 @@ msgstr ".hg/sharedpath peger på et ikke-eksisterende katalog %s" #, python-format +msgid "warning: ignoring unknown working parent %s!\n" +msgstr "advarsel: ignorerer ukendt forælder %s til arbejdsbiblioteket!\n" + +#, python-format msgid "%r cannot be used in a tag name" msgstr "%r kan ikke bruges i et mærkatnavnet" @@ -12596,34 +12727,28 @@ msgid "%d changesets found\n" msgstr "fandt %d ændringer\n" -msgid "bundling changes" -msgstr "bundter ændringer" - -msgid "chunks" -msgstr "" - -msgid "bundling manifests" -msgstr "bundter manifester" +msgid "bundling" +msgstr "bundter" + +msgid "manifests" +msgstr "manifester" #, python-format msgid "empty or missing revlog for %s" msgstr "tom eller manglende revlog for %s" -msgid "bundling files" -msgstr "bundter filer" - msgid "adding changesets\n" msgstr "tilføjer ændringer\n" +msgid "chunks" +msgstr "" + msgid "received changelog group is empty" msgstr "modtagen changelog-gruppe er tom" msgid "adding manifests\n" msgstr "tilføjer manifester\n" -msgid "manifests" -msgstr "manifester" - msgid "adding file changes\n" msgstr "tilføjer filændringer\n" @@ -12665,6 +12790,12 @@ msgid "transferred %s in %.1f seconds (%s/sec)\n" msgstr "overførte %s i %.1f sekunder (%s/sek)\n" +msgid "can't use TLS: Python SSL support not installed" +msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" + +msgid "(using smtps)\n" +msgstr "(bruger smtps)\n" + msgid "smtp.host not configured - cannot send mail" msgstr "" @@ -12672,11 +12803,8 @@ msgid "sending mail: smtp host %s, port %s\n" msgstr "sender mail: smtp host %s, port %s\n" -msgid "can't use TLS: Python SSL support not installed" -msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" - -msgid "(using tls)\n" -msgstr "(bruger tsl)\n" +msgid "(using starttls)\n" +msgstr "(bruger starttls)\n" #, python-format msgid "(authenticating to mail server as %s)\n" @@ -12717,6 +12845,10 @@ msgstr "ugyldig mønster" #, python-format +msgid "unable to read file list (%s)" +msgstr "kan ikke læse filliste (%s)" + +#, python-format msgid "diff context lines count must be an integer, not %r" msgstr "" @@ -13008,10 +13140,10 @@ #, python-format msgid "can't use %s here" -msgstr "" +msgstr "kan ikke bruge %s her" msgid "can't use a list in this context" -msgstr "" +msgstr "en liste kan ikke bruges i denne konteks" #, python-format msgid "not a function: %s" @@ -13028,7 +13160,7 @@ #. i18n: "id" is a keyword msgid "id requires a string" -msgstr "" +msgstr "id kræver en streng" msgid "" "``rev(number)``\n" @@ -13037,29 +13169,30 @@ #. i18n: "rev" is a keyword msgid "rev requires one argument" -msgstr "" +msgstr "rev kræver et argument" #. i18n: "rev" is a keyword msgid "rev requires a number" -msgstr "" +msgstr "rev kræver et tal" #. i18n: "rev" is a keyword msgid "rev expects a number" msgstr "rev forventer et revisionsnummer" msgid "" -"``p1(set)``\n" -" First parent of changesets in set." -msgstr "" - -msgid "" -"``p2(set)``\n" -" Second parent of changesets in set." -msgstr "" - -msgid "" -"``parents(set)``\n" -" The set of all parents for all changesets in set." +"``p1([set])``\n" +" First parent of changesets in set, or the working directory." +msgstr "" + +msgid "" +"``p2([set])``\n" +" Second parent of changesets in set, or the working directory." +msgstr "" + +msgid "" +"``parents([set])``\n" +" The set of all parents for all changesets in set, or the working " +"directory." msgstr "" msgid "" @@ -13322,20 +13455,23 @@ "``tag(name)``\n" " The specified tag by name, or all tagged revisions if no name is given." msgstr "" +"``tag(navn)``\n" +" Den navngivne mærkat eller alle revisioner med en mærkat hvis der\n" +" ikke angives noget navn." #. i18n: "tag" is a keyword msgid "tag takes one or no arguments" -msgstr "" +msgstr "tag tager et eller to argumenter" #. i18n: "tag" is a keyword msgid "the argument to tag must be a string" -msgstr "" +msgstr "argumentet til tag skal være en streng" msgid "can't negate that" msgstr "" msgid "not a symbol" -msgstr "" +msgstr "ikke et symbol" msgid "empty query" msgstr "tomt forespørgsel" @@ -13435,6 +13571,10 @@ msgstr "ukendt underdepottype %s" #, python-format +msgid "archiving (%s)" +msgstr "arkiverer (%s)" + +#, python-format msgid "warning: error \"%s\" in subrepository \"%s\"\n" msgstr "advarsel: fejl \"%s\" i underdepot \"%s\"\n" @@ -13458,6 +13598,39 @@ msgstr "fjerner ikke depotet %s fordi det er ændret.\n" #, python-format +msgid "cloning subrepo %s\n" +msgstr "kloner underdepot %s\n" + +#, python-format +msgid "pulling subrepo %s\n" +msgstr "hiver underdepot %s\n" + +#, python-format +msgid "revision %s does not exist in subrepo %s\n" +msgstr "revision %s findes ikke i underdepot %s\n" + +#, python-format +msgid "checking out detached HEAD in subrepo %s\n" +msgstr "" + +msgid "check out a git branch if you intend to make changes\n" +msgstr "" + +#, python-format +msgid "unrelated git branch checked out in subrepo %s\n" +msgstr "" + +#, python-format +msgid "pushing branch %s of subrepo %s\n" +msgstr "skubber gren %s af underdepot %s\n" + +#, python-format +msgid "" +"no branch checked out in subrepo %s\n" +"cannot push revision %s" +msgstr "" + +#, python-format msgid "%s, line %s: %s\n" msgstr "%s, linie %s: %s\n" @@ -13471,22 +13644,36 @@ msgid ".hg/tags.cache is corrupt, rebuilding it\n" msgstr "" +#, python-format +msgid "unknown method '%s'" +msgstr "ukendt metode '%s'" + +msgid "expected a symbol" +msgstr "forventede et symbol" + +#, python-format +msgid "unknown function '%s'" +msgstr "ukendt funktion '%s'" + +msgid "expected template specifier" +msgstr "" + +#, python-format +msgid "filter %s expects one argument" +msgstr "filter %s kræver et argument" + msgid "unmatched quotes" msgstr "" #, python-format -msgid "error expanding '%s%%%s'" -msgstr "fejl ved ekspansion af '%s%%%s'" - -#, python-format -msgid "unknown filter '%s'" -msgstr "ukendt filter '%s'" - -#, python-format msgid "style not found: %s" msgstr "" #, python-format +msgid "\"%s\" not in template map" +msgstr "\"%s\" er ikke i skabelon-fil" + +#, python-format msgid "template file %s: %s" msgstr "skabelon-fil %s: %s" @@ -13577,6 +13764,9 @@ msgid "ignoring invalid [auth] key '%s'\n" msgstr "ignorerer ugyldig [auth] nøgle '%s'\n" +msgid "kb" +msgstr "" + msgid "certificate checking requires Python 2.6" msgstr "" @@ -13588,10 +13778,15 @@ msgstr "certifikatet er for %s" msgid "no commonName found in certificate" -msgstr "" +msgstr "fandt ikke noget commonName i certifikatet" #, python-format msgid "%s certificate error: %s" +msgstr "%s certifikatfejl: %s" + +#, python-format +msgid "" +"warning: %s certificate not verified (check web.cacerts config setting)\n" msgstr "" #, python-format @@ -13599,6 +13794,10 @@ msgstr "kommandoen '%s' fejlede: %s" #, python-format +msgid "path ends in directory separator: %s" +msgstr "" + +#, python-format msgid "path contains illegal component: %s" msgstr "stien indeholder ugyldig komponent: %s" @@ -13686,7 +13885,7 @@ #, python-format msgid "no port number associated with service '%s'" -msgstr "" +msgstr "der er ikke knyttet noget portnummer til servicen '%s'" msgid "cannot verify bundle or remote repos" msgstr "kan ikke verificere bundt eller fjerndepoter" @@ -13743,7 +13942,7 @@ msgstr "duplikeret revision %d (%d)" msgid "abandoned transaction found - run hg recover\n" -msgstr "" +msgstr "fandt efterladt transaktion - kør hg recover\n" #, python-format msgid "repository uses revlog format %d\n" @@ -13777,7 +13976,7 @@ msgstr "krydstjekker filer i ændringer og manifester\n" msgid "crosschecking" -msgstr "" +msgstr "krydstjekker" #, python-format msgid "changeset refers to unknown manifest %s" @@ -13805,7 +14004,7 @@ #, python-format msgid "%s not in manifests" -msgstr "" +msgstr "%s findes ikke i manifestet" #, python-format msgid "unpacked size is %s, %s expected"
--- a/i18n/polib.py Tue Feb 01 17:30:13 2011 -0600 +++ b/i18n/polib.py Tue Feb 01 17:52:25 2011 -0600 @@ -105,7 +105,7 @@ ... finally: ... os.unlink(tmpf) """ - if kwargs.get('autodetect_encoding', True) == True: + if kwargs.get('autodetect_encoding', True): enc = detect_encoding(fpath) else: enc = kwargs.get('encoding', default_encoding) @@ -159,7 +159,7 @@ ... finally: ... os.unlink(tmpf) """ - if kwargs.get('autodetect_encoding', True) == True: + if kwargs.get('autodetect_encoding', True): enc = detect_encoding(fpath, True) else: enc = kwargs.get('encoding', default_encoding)
--- a/mercurial/archival.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/archival.py Tue Feb 01 17:52:25 2011 -0600 @@ -8,7 +8,7 @@ from i18n import _ from node import hex import cmdutil -import util +import util, encoding import cStringIO, os, stat, tarfile, time, zipfile import zlib, gzip @@ -245,7 +245,7 @@ if repo.ui.configbool("ui", "archivemeta", True): def metadata(): base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( - repo[0].hex(), hex(node), ctx.branch()) + repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch())) tags = ''.join('tag: %s\n' % t for t in ctx.tags() if repo.tagtype(t) == 'global') @@ -262,13 +262,18 @@ write('.hg_archival.txt', 0644, False, metadata) - for f in ctx: + total = len(ctx.manifest()) + repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total) + for i, f in enumerate(ctx): ff = ctx.flags(f) write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data) + repo.ui.progress(_('archiving'), i + 1, item=f, + unit=_('files'), total=total) + repo.ui.progress(_('archiving'), None) if subrepos: for subpath in ctx.substate: sub = ctx.sub(subpath) - sub.archive(archiver, prefix) + sub.archive(repo.ui, archiver, prefix) archiver.done()
--- a/mercurial/bdiff.c Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/bdiff.c Tue Feb 01 17:52:25 2011 -0600 @@ -57,12 +57,10 @@ int pos, len; }; +struct hunk; struct hunk { int a1, a2, b1, b2; -}; - -struct hunklist { - struct hunk *base, *head; + struct hunk *next; }; int splitlines(const char *a, int len, struct line **lr) @@ -223,8 +221,8 @@ return mk + mb; } -static void recurse(struct line *a, struct line *b, struct pos *pos, - int a1, int a2, int b1, int b2, struct hunklist *l) +static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos, + int a1, int a2, int b1, int b2, struct hunk *l) { int i, j, k; @@ -232,51 +230,66 @@ /* find the longest match in this chunk */ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); if (!k) - return; + return l; /* and recurse on the remaining chunks on either side */ - recurse(a, b, pos, a1, i, b1, j, l); - l->head->a1 = i; - l->head->a2 = i + k; - l->head->b1 = j; - l->head->b2 = j + k; - l->head++; - /* tail-recursion didn't happen, so doing equivalent iteration */ + l = recurse(a, b, pos, a1, i, b1, j, l); + if (!l) + return NULL; + + l->next = (struct hunk *)malloc(sizeof(struct hunk)); + if (!l->next) + return NULL; + + l = l->next; + l->a1 = i; + l->a2 = i + k; + l->b1 = j; + l->b2 = j + k; + l->next = NULL; + + /* tail-recursion didn't happen, so do equivalent iteration */ a1 = i + k; b1 = j + k; } } -static struct hunklist diff(struct line *a, int an, struct line *b, int bn) +static int diff(struct line *a, int an, struct line *b, int bn, + struct hunk *base) { - struct hunklist l; struct hunk *curr; struct pos *pos; - int t; + int t, count = 0; /* allocate and fill arrays */ t = equatelines(a, an, b, bn); pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos)); - /* we can't have more matches than lines in the shorter file */ - l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) * - ((an<bn ? an:bn) + 1)); + + if (pos && t) { + /* generate the matching block list */ + + curr = recurse(a, b, pos, 0, an, 0, bn, base); + if (!curr) + return -1; - if (pos && l.base && t) { - /* generate the matching block list */ - recurse(a, b, pos, 0, an, 0, bn, &l); - l.head->a1 = l.head->a2 = an; - l.head->b1 = l.head->b2 = bn; - l.head++; + /* sentinel end hunk */ + curr->next = (struct hunk *)malloc(sizeof(struct hunk)); + if (!curr->next) + return -1; + curr = curr->next; + curr->a1 = curr->a2 = an; + curr->b1 = curr->b2 = bn; + curr->next = NULL; } free(pos); /* normalize the hunk list, try to push each hunk towards the end */ - for (curr = l.base; curr != l.head; curr++) { - struct hunk *next = curr + 1; + for (curr = base->next; curr; curr = curr->next) { + struct hunk *next = curr->next; int shift = 0; - if (next == l.head) + if (!next) break; if (curr->a2 == next->a1) @@ -297,16 +310,26 @@ next->a1 += shift; } - return l; + for (curr = base->next; curr; curr = curr->next) + count++; + return count; +} + +static void freehunks(struct hunk *l) +{ + struct hunk *n; + for (; l; l = n) { + n = l->next; + free(l); + } } static PyObject *blocks(PyObject *self, PyObject *args) { PyObject *sa, *sb, *rl = NULL, *m; struct line *a, *b; - struct hunklist l = {NULL, NULL}; - struct hunk *h; - int an, bn, pos = 0; + struct hunk l, *h; + int an, bn, count, pos = 0; if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) return NULL; @@ -317,12 +340,16 @@ if (!a || !b) goto nomem; - l = diff(a, an, b, bn); - rl = PyList_New(l.head - l.base); - if (!l.head || !rl) + l.next = NULL; + count = diff(a, an, b, bn, &l); + if (count < 0) goto nomem; - for (h = l.base; h != l.head; h++) { + rl = PyList_New(count); + if (!rl) + goto nomem; + + for (h = l.next; h; h = h->next) { m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); PyList_SetItem(rl, pos, m); pos++; @@ -331,7 +358,7 @@ nomem: free(a); free(b); - free(l.base); + freehunks(l.next); return rl ? rl : PyErr_NoMemory(); } @@ -340,10 +367,9 @@ char *sa, *sb; PyObject *result = NULL; struct line *al, *bl; - struct hunklist l = {NULL, NULL}; - struct hunk *h; + struct hunk l, *h; char encode[12], *rb; - int an, bn, len = 0, la, lb; + int an, bn, len = 0, la, lb, count; if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb)) return NULL; @@ -353,13 +379,14 @@ if (!al || !bl) goto nomem; - l = diff(al, an, bl, bn); - if (!l.head) + l.next = NULL; + count = diff(al, an, bl, bn, &l); + if (count < 0) goto nomem; /* calculate length of output */ la = lb = 0; - for (h = l.base; h != l.head; h++) { + for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) len += 12 + bl[h->b1].l - bl[lb].l; la = h->a2; @@ -375,7 +402,7 @@ rb = PyBytes_AsString(result); la = lb = 0; - for (h = l.base; h != l.head; h++) { + for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) { len = bl[h->b1].l - bl[lb].l; *(uint32_t *)(encode) = htonl(al[la].l - al->l); @@ -392,7 +419,7 @@ nomem: free(al); free(bl); - free(l.base); + freehunks(l.next); return result ? result : PyErr_NoMemory(); }
--- a/mercurial/bundlerepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/bundlerepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -174,7 +174,7 @@ self._url = 'bundle:' + bundlename self.tempfile = None - f = open(bundlename, "rb") + f = util.posixfile(bundlename, "rb") self.bundle = changegroup.readbundle(f, bundlename) if self.bundle.compressed(): fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", @@ -192,7 +192,7 @@ finally: fptemp.close() - f = open(self.tempfile, "rb") + f = util.posixfile(self.tempfile, "rb") self.bundle = changegroup.readbundle(f, bundlename) # dict with the mapping 'filename' -> position in the bundle
--- a/mercurial/cmdutil.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/cmdutil.py Tue Feb 01 17:52:25 2011 -0600 @@ -147,6 +147,11 @@ # attempt to parse old-style ranges first to deal with # things like old-tag which contain query metacharacters try: + if isinstance(spec, int): + seen.add(spec) + l.append(spec) + continue + if revrangesep in spec: start, end = spec.split(revrangesep, 1) start = revfix(repo, start, 0) @@ -228,7 +233,8 @@ writable = 'w' in mode or 'a' in mode if not pat or pat == '-': - return writable and sys.stdout or sys.stdin + fp = writable and sys.stdout or sys.stdin + return os.fdopen(os.dup(fp.fileno()), mode) if hasattr(pat, 'write') and writable: return pat if hasattr(pat, 'read') and 'r' in mode: @@ -694,6 +700,8 @@ for chunk in patch.diff(repo, prev, node, opts=opts): fp.write(chunk) + fp.flush() + for seqno, rev in enumerate(revs): single(rev, seqno + 1, fp) @@ -796,7 +804,6 @@ branch = ctx.branch() # don't show the default branch name if branch != 'default': - branch = encoding.tolocal(branch) self.ui.write(_("branch: %s\n") % branch, label='log.branch') for tag in self.repo.nodetags(changenode): @@ -1352,8 +1359,7 @@ if ctx.p2(): edittext.append(_("HG: branch merge")) if ctx.branch(): - edittext.append(_("HG: branch '%s'") - % encoding.tolocal(ctx.branch())) + edittext.append(_("HG: branch '%s'") % ctx.branch()) edittext.extend([_("HG: subrepo %s") % s for s in subs]) edittext.extend([_("HG: added %s") % f for f in added]) edittext.extend([_("HG: changed %s") % f for f in modified])
--- a/mercurial/commands.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/commands.py Tue Feb 01 17:52:25 2011 -0600 @@ -126,7 +126,7 @@ lastfunc = funcmap[-1] funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) m = cmdutil.match(repo, pats, opts) follow = not opts.get('no_follow') for abs in ctx.walk(m): @@ -178,7 +178,7 @@ Returns 0 on success. ''' - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) if not ctx: raise util.Abort(_('no working directory: please specify a revision')) node = ctx.node() @@ -243,7 +243,7 @@ opts['date'] = util.parsedate(date) cmdutil.bail_if_changed(repo) - node = repo.lookup(rev) + node = cmdutil.revsingle(repo, rev).node() op1, op2 = repo.dirstate.parents() a = repo.changelog.ancestor(op1, node) @@ -408,7 +408,8 @@ raise util.Abort(_("%s killed") % command) else: transition = "bad" - ctx = repo[rev or '.'] + ctx = cmdutil.revsingle(repo, rev) + rev = None # clear for future iterations state[transition].append(ctx.node()) ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) check_state(state, interactive=False) @@ -487,15 +488,14 @@ repo.dirstate.setbranch(label) ui.status(_('reset working directory to branch %s\n') % label) elif label: - utflabel = encoding.fromlocal(label) - if not opts.get('force') and utflabel in repo.branchtags(): + if not opts.get('force') and label in repo.branchtags(): if label not in [p.branch() for p in repo.parents()]: raise util.Abort(_('a branch of the same name already exists' " (use 'hg update' to switch to it)")) - repo.dirstate.setbranch(utflabel) + repo.dirstate.setbranch(label) ui.status(_('marked working directory as branch %s\n') % label) else: - ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch())) + ui.write("%s\n" % repo.dirstate.branch()) def branches(ui, repo, active=False, closed=False): """list repository named branches @@ -524,9 +524,8 @@ for isactive, node, tag in branches: if (not active) or isactive: - encodedtag = encoding.tolocal(tag) if ui.quiet: - ui.write("%s\n" % encodedtag) + ui.write("%s\n" % tag) else: hn = repo.lookup(node) if isactive: @@ -542,10 +541,10 @@ notice = _(' (inactive)') if tag == repo.dirstate.branch(): label = 'branches.current' - rev = str(node).rjust(31 - encoding.colwidth(encodedtag)) + rev = str(node).rjust(31 - encoding.colwidth(tag)) rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset') - encodedtag = ui.label(encodedtag, label) - ui.write("%s %s%s\n" % (encodedtag, rev, notice)) + tag = ui.label(tag, label) + ui.write("%s %s%s\n" % (tag, rev, notice)) def bundle(ui, repo, fname, dest=None, **opts): """create a changegroup file @@ -572,11 +571,14 @@ Returns 0 on success, 1 if no changes found. """ - revs = opts.get('rev') or None + revs = None + if 'rev' in opts: + revs = cmdutil.revrange(repo, opts['rev']) + if opts.get('all'): base = ['null'] else: - base = opts.get('base') + base = cmdutil.revrange(repo, opts.get('base')) if base: if dest: raise util.Abort(_("--base is incompatible with specifying " @@ -741,7 +743,7 @@ """commit the specified files or all outstanding changes Commit changes to the given files into the repository. Unlike a - centralized RCS, this operation is a local operation. See + centralized SCM, this operation is a local operation. See :hg:`push` for a way to actively distribute your changes. If a list of files is omitted, all changes reported by :hg:`status` @@ -1026,7 +1028,7 @@ def debugrebuildstate(ui, repo, rev="tip"): """rebuild the dirstate as it would look like for the given revision""" - ctx = repo[rev] + ctx = cmdutil.revsingle(repo, rev) wlock = repo.wlock() try: repo.dirstate.rebuild(ctx.node(), ctx.manifest()) @@ -1116,7 +1118,7 @@ key, old, new = keyinfo r = target.pushkey(namespace, key, old, new) ui.status(str(r) + '\n') - return not(r) + return not r else: for k, v in target.listkeys(namespace).iteritems(): ui.write("%s\t%s\n" % (k.encode('string-escape'), @@ -1140,12 +1142,12 @@ Returns 0 on success. """ - if not rev2: - rev2 = hex(nullid) + r1 = cmdutil.revsingle(repo, rev1).node() + r2 = cmdutil.revsingle(repo, rev2, 'null').node() wlock = repo.wlock() try: - repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) + repo.dirstate.setparents(r1, r2) finally: wlock.release() @@ -1174,9 +1176,8 @@ ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) def debugsub(ui, repo, rev=None): - if rev == '': - rev = None - for k, v in sorted(repo[rev].substate.items()): + ctx = cmdutil.revsingle(repo, rev, None) + for k, v in sorted(ctx.substate.items()): ui.write('path %s\n' % k) ui.write(' source %s\n' % v[0]) ui.write(' revision %s\n' % v[1]) @@ -1435,7 +1436,7 @@ def debugrename(ui, repo, file1, *pats, **opts): """dump rename information""" - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) m = cmdutil.match(repo, (file1,) + pats, opts) for abs in ctx.walk(m): fctx = ctx[abs] @@ -1808,10 +1809,9 @@ Returns 0 if matching heads are found, 1 if not. """ - if opts.get('rev'): - start = repo.lookup(opts['rev']) - else: - start = None + start = None + if 'rev' in opts: + start = cmdutil.revsingle(repo, opts['rev'], None).node() if opts.get('topo'): heads = [repo[h] for h in repo.heads(start)] @@ -1828,8 +1828,7 @@ heads += [repo[h] for h in ls if rev(h) in descendants] if branchrevs: - decode, encode = encoding.fromlocal, encoding.tolocal - branches = set(repo[decode(br)].branch() for br in branchrevs) + branches = set(repo[br].branch() for br in branchrevs) heads = [h for h in heads if h.branch() in branches] if not opts.get('closed'): @@ -1842,7 +1841,7 @@ if branchrevs: haveheads = set(h.branch() for h in heads) if branches - haveheads: - headless = ', '.join(encode(b) for b in branches - haveheads) + headless = ', '.join(b for b in branches - haveheads) msg = _('no open branch heads found on branches %s') if opts.get('rev'): msg += _(' (started at %s)' % opts['rev']) @@ -2035,7 +2034,7 @@ 'extensions\n')) def helpextcmd(name): - cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) + cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict')) doc = gettext(mod.__doc__).splitlines()[0] msg = help.listexts(_("'%s' is provided by the following " @@ -2200,14 +2199,14 @@ output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), (changed) and "+" or "")) else: - ctx = repo[rev] + ctx = cmdutil.revsingle(repo, rev) if default or id: output = [hexfunc(ctx.node())] if num: output.append(str(ctx.rev())) if repo.local() and default and not ui.quiet: - b = encoding.tolocal(ctx.branch()) + b = ctx.branch() if b != 'default': output.append("(%s)" % b) @@ -2217,7 +2216,7 @@ output.append(t) if branch: - output.append(encoding.tolocal(ctx.branch())) + output.append(ctx.branch()) if tags: output.extend(ctx.tags()) @@ -2279,6 +2278,7 @@ d = opts["base"] strip = opts["strip"] wlock = lock = None + msgs = [] def tryone(ui, hunk): tmpname, message, user, date, branch, nodeid, p1, p2 = \ @@ -2329,7 +2329,10 @@ finally: files = cmdutil.updatedir(ui, repo, files, similarity=sim / 100.0) - if not opts.get('no_commit'): + if opts.get('no_commit'): + if message: + msgs.append(message) + else: if opts.get('exact'): m = None else: @@ -2378,6 +2381,8 @@ if not haspatch: raise util.Abort(_('no diffs found')) + if msgs: + repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs)) finally: release(lock, wlock) @@ -2437,7 +2442,7 @@ Returns 0 if a match is found, 1 otherwise. """ end = opts.get('print0') and '\0' or '\n' - rev = opts.get('rev') or None + rev = cmdutil.revsingle(repo, opts.get('rev'), None).node() ret = 1 m = cmdutil.match(repo, pats, opts, default='relglob') @@ -2572,7 +2577,7 @@ node = rev decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} - ctx = repo[node] + ctx = cmdutil.revsingle(repo, node) for f in ctx: if ui.debugflag: ui.write("%40s " % hex(ctx.manifest()[f])) @@ -2615,7 +2620,7 @@ node = opts.get('rev') if not node: - branch = repo.changectx(None).branch() + branch = repo[None].branch() bheads = repo.branchheads(branch) if len(bheads) > 2: raise util.Abort(_( @@ -2641,6 +2646,8 @@ raise util.Abort(_('working dir not at a head rev - ' 'use "hg update" or merge with an explicit rev')) node = parent == bheads[0] and bheads[-1] or bheads[0] + else: + node = cmdutil.revsingle(repo, node).node() if opts.get('preview'): # find nodes that are ancestors of p2 but not of p1 @@ -2686,11 +2693,8 @@ Returns 0 on success. """ - rev = opts.get('rev') - if rev: - ctx = repo[rev] - else: - ctx = repo[None] + + ctx = cmdutil.revsingle(repo, opts.get('rev'), None) if file_: m = cmdutil.match(repo, (file_,), opts) @@ -3098,15 +3102,16 @@ raise util.Abort(_("you can't specify a revision and a date")) opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) + parent, p2 = repo.dirstate.parents() + if not opts.get('rev') and p2 != nullid: + raise util.Abort(_('uncommitted merge - ' + 'use "hg update", see "hg help revert"')) + if not pats and not opts.get('all'): raise util.Abort(_('no files or directories specified; ' 'use --all to revert the whole repo')) - parent, p2 = repo.dirstate.parents() - if not opts.get('rev') and p2 != nullid: - raise util.Abort(_('uncommitted merge - please provide a ' - 'specific revision')) - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) node = ctx.node() mf = ctx.manifest() if node == parent: @@ -3245,7 +3250,7 @@ continue audit_path(f) try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError: pass repo.dirstate.remove(f) @@ -3726,7 +3731,7 @@ bheads = repo.branchheads() if not opts.get('force') and bheads and p1 not in bheads: raise util.Abort(_('not at a branch head (use -f to force)')) - r = repo[rev_].node() + r = cmdutil.revsingle(repo, rev_).node() if not message: # we don't translate commit messages
--- a/mercurial/config.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/config.py Tue Feb 01 17:52:25 2011 -0600 @@ -130,7 +130,7 @@ name = m.group(1) if sections and section not in sections: continue - if self.get(section, name) != None: + if self.get(section, name) is not None: del self._data[section][name] continue
--- a/mercurial/context.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/context.py Tue Feb 01 17:52:25 2011 -0600 @@ -7,7 +7,7 @@ from node import nullid, nullrev, short, hex from i18n import _ -import ancestor, bdiff, error, util, subrepo, patch +import ancestor, bdiff, error, util, subrepo, patch, encoding import os, errno, stat propertycache = util.propertycache @@ -109,7 +109,7 @@ def description(self): return self._changeset[4] def branch(self): - return self._changeset[5].get("branch") + return encoding.tolocal(self._changeset[5].get("branch")) def extra(self): return self._changeset[5] def tags(self): @@ -179,7 +179,7 @@ """ # deal with workingctxs n2 = c2._node - if n2 == None: + if n2 is None: n2 = c2._parents[0]._node n = self._repo.changelog.ancestor(self._node, n2) return changectx(self._repo, n) @@ -591,9 +591,8 @@ if extra: self._extra = extra.copy() if 'branch' not in self._extra: - branch = self._repo.dirstate.branch() try: - branch = branch.decode('UTF-8').encode('UTF-8') + branch = encoding.fromlocal(self._repo.dirstate.branch()) except UnicodeDecodeError: raise util.Abort(_('branch name not in UTF-8!')) self._extra['branch'] = branch @@ -603,6 +602,9 @@ def __str__(self): return str(self._parents[0]) + "+" + def __repr__(self): + return "<workingctx %s>" % str(self) + def __nonzero__(self): return True @@ -712,7 +714,7 @@ assert self._clean is not None # must call status first return self._clean def branch(self): - return self._extra['branch'] + return encoding.tolocal(self._extra['branch']) def extra(self): return self._extra @@ -827,7 +829,7 @@ if unlink: for f in list: try: - util.unlink(self._repo.wjoin(f)) + util.unlinkpath(self._repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: raise @@ -902,6 +904,9 @@ def __str__(self): return "%s@%s" % (self.path(), self._changectx) + def __repr__(self): + return "<workingfilectx %s>" % str(self) + def data(self): return self._repo.wread(self._path) def renamed(self): @@ -1042,7 +1047,7 @@ def clean(self): return self._status[6] def branch(self): - return self._extra['branch'] + return encoding.tolocal(self._extra['branch']) def extra(self): return self._extra def flags(self, f):
--- a/mercurial/demandimport.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/demandimport.py Tue Feb 01 17:52:25 2011 -0600 @@ -78,10 +78,10 @@ self._load() setattr(self._module, attr, val) -def _demandimport(name, globals=None, locals=None, fromlist=None, level=None): +def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1): if not locals or name in ignore or fromlist == ('*',): # these cases we can't really delay - if level is None: + if level == -1: return _origimport(name, globals, locals, fromlist) else: return _origimport(name, globals, locals, fromlist, level) @@ -91,7 +91,10 @@ base, rest = name.split('.', 1) # email.__init__ loading email.mime if globals and globals.get('__name__', None) == base: - return _origimport(name, globals, locals, fromlist) + if level != -1: + return _origimport(name, globals, locals, fromlist, level) + else: + return _origimport(name, globals, locals, fromlist) # if a is already demand-loaded, add b to its submodule list if base in locals: if isinstance(locals[base], _demandmod): @@ -99,7 +102,7 @@ return locals[base] return _demandmod(name, globals, locals) else: - if level is not None: + if level != -1: # from . import b,c,d or from .a import b,c,d return _origimport(name, globals, locals, fromlist, level) # from a import b,c,d @@ -111,7 +114,7 @@ mod = getattr(mod, comp) for x in fromlist: # set requested submodules for demand load - if not(hasattr(mod, x)): + if not hasattr(mod, x): setattr(mod, x, _demandmod(x, mod.__dict__, locals)) return mod
--- a/mercurial/dirstate.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/dirstate.py Tue Feb 01 17:52:25 2011 -0600 @@ -7,7 +7,7 @@ from node import nullid from i18n import _ -import util, ignore, osutil, parsers +import util, ignore, osutil, parsers, encoding import struct, os, stat, errno import cStringIO @@ -36,7 +36,7 @@ class dirstate(object): - def __init__(self, opener, ui, root): + def __init__(self, opener, ui, root, validate): '''Create a new dirstate object. opener is an open()-like callable that can be used to open the @@ -44,6 +44,7 @@ the dirstate. ''' self._opener = opener + self._validate = validate self._root = root self._rootdir = os.path.join(root, '') self._dirty = False @@ -197,10 +198,10 @@ yield x def parents(self): - return self._pl + return [self._validate(p) for p in self._pl] def branch(self): - return self._branch + return encoding.tolocal(self._branch) def setparents(self, p1, p2=nullid): self._dirty = self._dirtypl = True @@ -209,8 +210,8 @@ def setbranch(self, branch): if branch in ['tip', '.', 'null']: raise util.Abort(_('the name \'%s\' is reserved') % branch) - self._branch = branch - self._opener("branch", "w").write(branch + '\n') + self._branch = encoding.fromlocal(branch) + self._opener("branch", "w").write(self._branch + '\n') def _read(self): self._map = {} @@ -229,7 +230,8 @@ self._pl = p def invalidate(self): - for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): + for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs", + "_ignore"): if a in self.__dict__: delattr(self, a) self._dirty = False
--- a/mercurial/discovery.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/discovery.py Tue Feb 01 17:52:25 2011 -0600 @@ -220,8 +220,6 @@ # - a local outgoing head descended from update # - a remote head that's known locally and not # ancestral to an outgoing head - # - # New named branches cannot be created without --force. # 1. Create set of branches involved in the push. branches = set(repo[n].branch() for n in outg) @@ -280,20 +278,30 @@ # 5. Check for new heads. # If there are more heads after the push than before, a suitable - # warning, depending on unsynced status, is displayed. + # error message, depending on unsynced status, is displayed. + error = None for branch in branches: - if len(newmap[branch]) > len(oldmap[branch]): + newhs = set(newmap[branch]) + oldhs = set(oldmap[branch]) + if len(newhs) > len(oldhs): + if error is None: + if branch: + error = _("push creates new remote heads " + "on branch '%s'!") % branch + else: + error = _("push creates new remote heads!") + if branch in unsynced: + hint = _("you should pull and merge or " + "use push -f to force") + else: + hint = _("did you forget to merge? " + "use push -f to force") if branch: - msg = _("push creates new remote heads " - "on branch '%s'!") % branch - else: - msg = _("push creates new remote heads!") - - if branch in unsynced: - hint = _("you should pull and merge or use push -f to force") - else: - hint = _("did you forget to merge? use push -f to force") - raise util.Abort(msg, hint=hint) + repo.ui.debug("new remote heads on branch '%s'\n" % branch) + for h in (newhs - oldhs): + repo.ui.debug("new remote head %s\n" % short(h)) + if error: + raise util.Abort(error, hint=hint) # 6. Check for unsynced changes on involved branches. if unsynced:
--- a/mercurial/encoding.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/encoding.py Tue Feb 01 17:52:25 2011 -0600 @@ -48,6 +48,16 @@ encodingmode = os.environ.get("HGENCODINGMODE", "strict") fallbackencoding = 'ISO-8859-1' +class localstr(str): + '''This class allows strings that are unmodified to be + round-tripped to the local encoding and back''' + def __new__(cls, u, l): + s = str.__new__(cls, l) + s._utf8 = u + return s + def __hash__(self): + return hash(self._utf8) # avoid collisions in local string space + def tolocal(s): """ Convert a string from internal UTF-8 to local encoding @@ -57,17 +67,45 @@ other character sets. We attempt to decode everything strictly using UTF-8, then Latin-1, and failing that, we use UTF-8 and replace unknown characters. + + The localstr class is used to cache the known UTF-8 encoding of + strings next to their local representation to allow lossless + round-trip conversion back to UTF-8. + + >>> u = 'foo: \\xc3\\xa4' # utf-8 + >>> l = tolocal(u) + >>> l + 'foo: ?' + >>> fromlocal(l) + 'foo: \\xc3\\xa4' + >>> u2 = 'foo: \\xc3\\xa1' + >>> d = { l: 1, tolocal(u2): 2 } + >>> d # no collision + {'foo: ?': 1, 'foo: ?': 2} + >>> 'foo: ?' in d + False + >>> l1 = 'foo: \\xe4' # historical latin1 fallback + >>> l = tolocal(l1) + >>> l + 'foo: ?' + >>> fromlocal(l) # magically in utf-8 + 'foo: \\xc3\\xa4' """ + for e in ('UTF-8', fallbackencoding): try: u = s.decode(e) # attempt strict decoding - return u.encode(encoding, "replace") + if e == 'UTF-8': + return localstr(s, u.encode(encoding, "replace")) + else: + return localstr(u.encode('UTF-8'), + u.encode(encoding, "replace")) except LookupError, k: raise error.Abort("%s, please check your locale settings" % k) except UnicodeDecodeError: pass u = s.decode("utf-8", "replace") # last ditch - return u.encode(encoding, "replace") + return u.encode(encoding, "replace") # can't round-trip def fromlocal(s): """ @@ -79,6 +117,11 @@ 'replace', which replaces unknown characters with a special Unicode character, and 'ignore', which drops the character. """ + + # can we do a lossless round-trip? + if isinstance(s, localstr): + return s._utf8 + try: return s.decode(encoding, encodingmode).encode("utf-8") except UnicodeDecodeError, inst:
--- a/mercurial/extensions.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/extensions.py Tue Feb 01 17:52:25 2011 -0600 @@ -248,7 +248,7 @@ if name in paths: return _disabledhelp(paths[name]) -def disabledcmd(cmd, strict=False): +def disabledcmd(ui, cmd, strict=False): '''import disabled extensions until cmd is found. returns (cmdname, extname, doc)''' @@ -266,6 +266,10 @@ getattr(mod, 'cmdtable', {}), strict) except (error.AmbiguousCommand, error.UnknownCommand): return + except Exception: + ui.warn(_('warning: error finding commands in %s\n') % path) + ui.traceback() + return for c in aliases: if c.startswith(cmd): cmd = c
--- a/mercurial/filelog.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/filelog.py Tue Feb 01 17:52:25 2011 -0600 @@ -7,6 +7,17 @@ import revlog +def _parsemeta(text): + if not text.startswith('\1\n'): + return {} + s = text.index('\1\n', 2) + mt = text[2:s] + m = {} + for l in mt.splitlines(): + k, v = l.split(": ", 1) + m[k] = v + return m + class filelog(revlog.revlog): def __init__(self, opener, path): revlog.revlog.__init__(self, opener, @@ -19,18 +30,6 @@ s = t.index('\1\n', 2) return t[s + 2:] - def _readmeta(self, node): - t = self.revision(node) - if not t.startswith('\1\n'): - return {} - s = t.index('\1\n', 2) - mt = t[2:s] - m = {} - for l in mt.splitlines(): - k, v = l.split(": ", 1) - m[k] = v - return m - def add(self, text, meta, transaction, link, p1=None, p2=None): if meta or text.startswith('\1\n'): mt = ["%s: %s\n" % (k, v) for k, v in sorted(meta.iteritems())] @@ -40,7 +39,8 @@ def renamed(self, node): if self.parents(node)[0] != revlog.nullid: return False - m = self._readmeta(node) + t = self.revision(node) + m = _parsemeta(t) if m and "copy" in m: return (m["copy"], revlog.bin(m["copyrev"])) return False
--- a/mercurial/help/patterns.txt Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/help/patterns.txt Tue Feb 01 17:52:25 2011 -0600 @@ -20,6 +20,11 @@ To use a Perl/Python regular expression, start a name with ``re:``. Regexp pattern matching is anchored at the root of the repository. +To read name patterns from a file, use ``listfile:`` or ``listfile0:``. +The latter expects null delimited patterns while the former expects line +feeds. Each string read from the file is itself treated as a file +pattern. + Plain examples:: path:foo/bar a name bar in a directory named foo in the root @@ -39,3 +44,8 @@ Regexp examples:: re:.*\.c$ any name ending in ".c", anywhere in the repository + +File examples:: + + listfile:list.txt read list from list.txt with one file pattern per line + listfile0:list.txt read list from list.txt with null byte delimiters
--- a/mercurial/help/urls.txt Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/help/urls.txt Tue Feb 01 17:52:25 2011 -0600 @@ -4,7 +4,7 @@ file://local/filesystem/path[#revision] http://[user[:pass]@]host[:port]/[path][#revision] https://[user[:pass]@]host[:port]/[path][#revision] - ssh://[user[:pass]@]host[:port]/[path][#revision] + ssh://[user@]host[:port]/[path][#revision] Paths in the local filesystem can either point to Mercurial repositories or to bundle files (as created by :hg:`bundle` or :hg:`
--- a/mercurial/hg.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/hg.py Tue Feb 01 17:52:25 2011 -0600 @@ -32,24 +32,22 @@ return revs, revs[0] branchmap = repo.branchmap() - def primary(butf8): - if butf8 == '.': + def primary(branch): + if branch == '.': if not lrepo or not lrepo.local(): raise util.Abort(_("dirstate branch not accessible")) - butf8 = lrepo.dirstate.branch() - if butf8 in branchmap: - revs.extend(node.hex(r) for r in reversed(branchmap[butf8])) + branch = lrepo.dirstate.branch() + if branch in branchmap: + revs.extend(node.hex(r) for r in reversed(branchmap[branch])) return True else: return False for branch in branches: - butf8 = encoding.fromlocal(branch) - if not primary(butf8): + if not primary(branch): raise error.RepoLookupError(_("unknown branch '%s'") % branch) if hashbranch: - butf8 = encoding.fromlocal(hashbranch) - if not primary(butf8): + if not primary(hashbranch): revs.append(hashbranch) return revs, revs[0] @@ -365,8 +363,7 @@ except error.RepoLookupError: continue bn = dest_repo[uprev].branch() - dest_repo.ui.status(_("updating to branch %s\n") - % encoding.tolocal(bn)) + dest_repo.ui.status(_("updating to branch %s\n") % bn) _update(dest_repo, uprev) return src_repo, dest_repo @@ -398,7 +395,8 @@ return stats[3] > 0 def merge(repo, node, force=None, remind=True): - """branch merge with node, resolving changes""" + """Branch merge with node, resolving changes. Return true if any + unresolved conflicts.""" stats = mergemod.update(repo, node, True, force, False) _showstats(repo, stats) if stats[3]:
--- a/mercurial/hgweb/webcommands.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/hgweb/webcommands.py Tue Feb 01 17:52:25 2011 -0600 @@ -550,7 +550,8 @@ "targetline": targetline, "line": l, "lineid": "l%d" % (lineno + 1), - "linenumber": "% 6d" % (lineno + 1)} + "linenumber": "% 6d" % (lineno + 1), + "revdate": f.date()} return tmpl("fileannotate", file=f,
--- a/mercurial/hook.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/hook.py Tue Feb 01 17:52:25 2011 -0600 @@ -92,6 +92,12 @@ for k, v in args.iteritems(): if hasattr(v, '__call__'): v = v() + if isinstance(v, dict): + # make the dictionary element order stable across Python + # implementations + v = ('{' + + ', '.join('%r: %r' % i for i in sorted(v.iteritems())) + + '}') env['HG_' + k.upper()] = v if repo:
--- a/mercurial/httprepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/httprepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -160,7 +160,7 @@ break tempname = changegroup.writebundle(cg, None, type) - fp = url.httpsendfile(tempname, "rb") + fp = url.httpsendfile(self.ui, tempname, "rb") headers = {'Content-Type': 'application/mercurial-0.1'} try:
--- a/mercurial/localrepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/localrepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -105,7 +105,7 @@ self._tags = None self._tagtypes = None - self._branchcache = None # in UTF-8 + self._branchcache = None self._branchcachetip = None self.nodetagscache = None self.filterpats = {} @@ -178,7 +178,19 @@ @propertycache def dirstate(self): - return dirstate.dirstate(self.opener, self.ui, self.root) + warned = [0] + def validate(node): + try: + r = self.changelog.rev(node) + return node + except error.LookupError: + if not warned[0]: + warned[0] = True + self.ui.warn(_("warning: ignoring unknown" + " working parent %s!\n") % short(node)) + return nullid + + return dirstate.dirstate(self.opener, self.ui, self.root, validate) def __getitem__(self, changeid): if changeid is None: @@ -424,11 +436,10 @@ bt[bn] = tip return bt - def _readbranchcache(self): partial = {} try: - f = self.opener("branchheads.cache") + f = self.opener(os.path.join("cache", "branchheads")) lines = f.read().split('\n') f.close() except (IOError, OSError): @@ -444,7 +455,8 @@ if not l: continue node, label = l.split(" ", 1) - partial.setdefault(label.strip(), []).append(bin(node)) + label = encoding.tolocal(label.strip()) + partial.setdefault(label, []).append(bin(node)) except KeyboardInterrupt: raise except Exception, inst: @@ -455,11 +467,12 @@ def _writebranchcache(self, branches, tip, tiprev): try: - f = self.opener("branchheads.cache", "w", atomictemp=True) + f = self.opener(os.path.join("cache", "branchheads"), "w", + atomictemp=True) f.write("%s %s\n" % (hex(tip), tiprev)) for label, nodes in branches.iteritems(): for node in nodes: - f.write("%s %s\n" % (hex(node), label)) + f.write("%s %s\n" % (hex(node), encoding.fromlocal(label))) f.rename() except (IOError, OSError): pass @@ -618,10 +631,6 @@ def wwrite(self, filename, data, flags): data = self._filter(self._decodefilterpats, filename, data) - try: - os.unlink(self.wjoin(filename)) - except OSError: - pass if 'l' in flags: self.wopener.symlink(data, filename) else: @@ -648,7 +657,8 @@ except IOError: ds = "" self.opener("journal.dirstate", "w").write(ds) - self.opener("journal.branch", "w").write(self.dirstate.branch()) + self.opener("journal.branch", "w").write( + encoding.fromlocal(self.dirstate.branch())) self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc)) renames = [(self.sjoin("journal"), self.sjoin("undo")), @@ -706,7 +716,7 @@ except IOError: self.ui.warn(_("Named branch could not be reset, " "current branch still is: %s\n") - % encoding.tolocal(self.dirstate.branch())) + % self.dirstate.branch()) self.invalidate() self.dirstate.invalidate() self.destroyed() @@ -724,7 +734,7 @@ self._branchcachetip = None def invalidate(self): - for a in "changelog manifest".split(): + for a in ("changelog", "manifest"): if a in self.__dict__: delattr(self, a) self.invalidatecaches() @@ -1208,8 +1218,7 @@ def heads(self, start=None): heads = self.changelog.heads(start) # sort the output in rev descending order - heads = [(-self.changelog.rev(h), h) for h in heads] - return [n for (r, n) in sorted(heads)] + return sorted(heads, key=self.changelog.rev, reverse=True) def branchheads(self, branch=None, start=None, closed=False): '''return a (possibly filtered) list of heads for the given branch @@ -1296,6 +1305,13 @@ finally: lock.release() + def checkpush(self, force, revs): + """Extensions can override this function if additional checks have + to be performed before pushing, or call it if they override push + command. + """ + pass + def push(self, remote, force=False, revs=None, newbranch=False): '''Push outgoing changesets (limited by revs) from the current repository to remote. Return an integer: @@ -1312,6 +1328,7 @@ # unbundle assumes local user cannot lock remote repo (new ssh # servers, http servers). + self.checkpush(force, revs) lock = None unbundle = remote.capable('unbundle') if not unbundle: @@ -1404,9 +1421,6 @@ # Nor do we know which filenodes are missing. msng_filenode_set = {} - junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex - junk = None - # A changeset always belongs to itself, so the changenode lookup # function for a changenode is identity. def identity(x): @@ -1494,8 +1508,13 @@ group = cl.group(msng_cl_lst, identity, collect) for cnt, chnk in enumerate(group): yield chnk - self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) - self.ui.progress(_('bundling changes'), None) + # revlog.group yields three entries per node, so + # dividing by 3 gives an approximation of how many + # nodes have been processed. + self.ui.progress(_('bundling'), cnt / 3, + unit=_('changesets')) + changecount = cnt / 3 + self.ui.progress(_('bundling'), None) prune(mnfst, msng_mnfst_set) add_extra_nodes(1, msng_mnfst_set) @@ -1507,10 +1526,17 @@ group = mnfst.group(msng_mnfst_lst, lambda mnode: msng_mnfst_set[mnode], filenode_collector(changedfiles)) + efiles = {} for cnt, chnk in enumerate(group): + if cnt % 3 == 1: + mnode = chnk[:20] + efiles.update(mnfst.readdelta(mnode)) yield chnk - self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) - self.ui.progress(_('bundling manifests'), None) + # see above comment for why we divide by 3 + self.ui.progress(_('bundling'), cnt / 3, + unit=_('manifests'), total=changecount) + self.ui.progress(_('bundling'), None) + efiles = len(efiles) # These are no longer needed, dereference and toss the memory for # them. @@ -1524,8 +1550,7 @@ msng_filenode_set.setdefault(fname, {}) changedfiles.add(fname) # Go through all our files in order sorted by name. - cnt = 0 - for fname in sorted(changedfiles): + for idx, fname in enumerate(sorted(changedfiles)): filerevlog = self.file(fname) if not len(filerevlog): raise util.Abort(_("empty or missing revlog for %s") % fname) @@ -1548,13 +1573,16 @@ group = filerevlog.group(nodeiter, lambda fnode: missingfnodes[fnode]) for chnk in group: + # even though we print the same progress on + # most loop iterations, put the progress call + # here so that time estimates (if any) can be updated self.ui.progress( - _('bundling files'), cnt, item=fname, unit=_('chunks')) - cnt += 1 + _('bundling'), idx, item=fname, + unit=_('files'), total=efiles) yield chnk # Signal that no more groups are left. yield changegroup.closechunk() - self.ui.progress(_('bundling files'), None) + self.ui.progress(_('bundling'), None) if msng_cl_lst: self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source) @@ -1602,20 +1630,30 @@ collect = changegroup.collector(cl, mmfs, changedfiles) for cnt, chnk in enumerate(cl.group(nodes, identity, collect)): - self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) + # revlog.group yields three entries per node, so + # dividing by 3 gives an approximation of how many + # nodes have been processed. + self.ui.progress(_('bundling'), cnt / 3, unit=_('changesets')) yield chnk - self.ui.progress(_('bundling changes'), None) + changecount = cnt / 3 + self.ui.progress(_('bundling'), None) mnfst = self.manifest nodeiter = gennodelst(mnfst) + efiles = {} for cnt, chnk in enumerate(mnfst.group(nodeiter, lookuplinkrev_func(mnfst))): - self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) + if cnt % 3 == 1: + mnode = chnk[:20] + efiles.update(mnfst.readdelta(mnode)) + # see above comment for why we divide by 3 + self.ui.progress(_('bundling'), cnt / 3, + unit=_('manifests'), total=changecount) yield chnk - self.ui.progress(_('bundling manifests'), None) + efiles = len(efiles) + self.ui.progress(_('bundling'), None) - cnt = 0 - for fname in sorted(changedfiles): + for idx, fname in enumerate(sorted(changedfiles)): filerevlog = self.file(fname) if not len(filerevlog): raise util.Abort(_("empty or missing revlog for %s") % fname) @@ -1627,10 +1665,10 @@ lookup = lookuplinkrev_func(filerevlog) for chnk in filerevlog.group(nodeiter, lookup): self.ui.progress( - _('bundling files'), cnt, item=fname, unit=_('chunks')) - cnt += 1 + _('bundling'), idx, item=fname, + total=efiles, unit=_('files')) yield chnk - self.ui.progress(_('bundling files'), None) + self.ui.progress(_('bundling'), None) yield changegroup.closechunk() @@ -1643,6 +1681,8 @@ """Add the changegroup returned by source.read() to this repo. srctype is a string like 'push', 'pull', or 'unbundle'. url is the URL of the repo where this changegroup is coming from. + If lock is not None, the function takes ownership of the lock + and releases it after the changegroup is added. Return an integer summarizing the change to this repo: - nothing changed or no source: 0
--- a/mercurial/lock.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/lock.py Tue Feb 01 17:52:25 2011 -0600 @@ -113,7 +113,7 @@ # held, or can race and break valid lock. try: l = lock(self.f + '.break', timeout=0) - os.unlink(self.f) + util.unlink(self.f) l.release() except error.LockError: return locker @@ -126,7 +126,7 @@ if self.releasefn: self.releasefn() try: - os.unlink(self.f) + util.unlink(self.f) except OSError: pass
--- a/mercurial/mail.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/mail.py Tue Feb 01 17:52:25 2011 -0600 @@ -33,7 +33,17 @@ def _smtp(ui): '''build an smtp connection and return a function to send mail''' local_hostname = ui.config('smtp', 'local_hostname') - s = smtplib.SMTP(local_hostname=local_hostname) + tls = ui.config('smtp', 'tls', 'none') + # backward compatible: when tls = true, we use starttls. + starttls = tls == 'starttls' or util.parsebool(tls) + smtps = tls == 'smtps' + if (starttls or smtps) and not hasattr(socket, 'ssl'): + raise util.Abort(_("can't use TLS: Python SSL support not installed")) + if smtps: + ui.note(_('(using smtps)\n')) + s = smtplib.SMTP_SSL(local_hostname=local_hostname) + else: + s = smtplib.SMTP(local_hostname=local_hostname) mailhost = ui.config('smtp', 'host') if not mailhost: raise util.Abort(_('smtp.host not configured - cannot send mail')) @@ -41,11 +51,8 @@ ui.note(_('sending mail: smtp host %s, port %s\n') % (mailhost, mailport)) s.connect(host=mailhost, port=mailport) - if ui.configbool('smtp', 'tls'): - if not hasattr(socket, 'ssl'): - raise util.Abort(_("can't use TLS: Python SSL support " - "not installed")) - ui.note(_('(using tls)\n')) + if starttls: + ui.note(_('(using starttls)\n')) s.ehlo() s.starttls() s.ehlo()
--- a/mercurial/manifest.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/manifest.py Tue Feb 01 17:52:25 2011 -0600 @@ -171,19 +171,19 @@ raise AssertionError( _("failed to remove %s from manifest") % f) l = "" - if dstart != None and dstart <= start and dend >= start: + if dstart is not None and dstart <= start and dend >= start: if dend < end: dend = end if l: dline.append(l) else: - if dstart != None: + if dstart is not None: delta.append([dstart, dend, "".join(dline)]) dstart = start dend = end dline = [l] - if dstart != None: + if dstart is not None: delta.append([dstart, dend, "".join(dline)]) # apply the delta to the addlist, and get a delta for addrevision cachedelta = (self.rev(p1), addlistdelta(addlist, delta))
--- a/mercurial/match.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/match.py Tue Feb 01 17:52:25 2011 -0600 @@ -161,7 +161,8 @@ actual pattern.""" if ':' in pat: kind, val = pat.split(':', 1) - if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre'): + if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre', + 'listfile', 'listfile0'): return kind, val return default, pat @@ -270,6 +271,15 @@ name = util.canonpath(root, cwd, name, auditor) elif kind in ('relglob', 'path'): name = util.normpath(name) + elif kind in ('listfile', 'listfile0'): + delimiter = kind == 'listfile0' and '\0' or '\n' + try: + files = open(name, 'r').read().split(delimiter) + files = [f for f in files if f] + except EnvironmentError: + raise util.Abort(_("unable to read file list (%s)") % name) + pats += _normalize(files, default, root, cwd, auditor) + continue pats.append((kind, name)) return pats
--- a/mercurial/merge.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/merge.py Tue Feb 01 17:52:25 2011 -0600 @@ -255,6 +255,9 @@ wctx is the working copy context mctx is the context to be merged into the working copy actx is the context of the common ancestor + + Return a tuple of counts (updated, merged, removed, unresolved) that + describes how many files were affected by the update. """ updated, merged, removed, unresolved = 0, 0, 0, 0 @@ -309,7 +312,7 @@ if f == '.hgsubstate': # subrepo states need updating subrepo.submerge(repo, wctx, mctx, wctx, overwrite) try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: repo.ui.warn(_("update failed to remove %s: %s!\n") % @@ -347,7 +350,7 @@ repo.ui.note(_("moving %s to %s\n") % (f, fd)) t = wctx.filectx(f).data() repo.wwrite(fd, t, flags) - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) if f2: repo.ui.note(_("getting %s to %s\n") % (f2, fd)) t = mctx.filectx(f2).data() @@ -462,6 +465,8 @@ use 'hg update -C' to discard changes) 3 = abort: uncommitted local changes 4 = incompatible options (checked in commands.py) + + Return the same tuple as applyupdates(). """ onode = node @@ -524,7 +529,7 @@ action += manifestmerge(repo, wc, p2, pa, overwrite, partial) ### apply phase - if not branchmerge: # just jump to the new rev + if not branchmerge or fastforward: # just jump to the new rev fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' if not partial: repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) @@ -533,7 +538,7 @@ if not partial: repo.dirstate.setparents(fp1, fp2) - recordupdates(repo, action, branchmerge) + recordupdates(repo, action, branchmerge and not fastforward) if not branchmerge and not fastforward: repo.dirstate.setbranch(p2.branch()) finally:
--- a/mercurial/minirst.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/minirst.py Tue Feb 01 17:52:25 2011 -0600 @@ -14,27 +14,8 @@ are just indented blocks that look like they are nested. This relies on the user to keep the right indentation for the blocks. -It only supports a small subset of reStructuredText: - -- sections - -- paragraphs - -- literal blocks - -- definition lists - -- specific admonitions - -- bullet lists (items must start with '-') - -- enumerated lists (no autonumbering) - -- field lists (colons cannot be escaped) - -- option lists (supports only long options without arguments) - -- inline literals (no other inline markup is not recognized) +Remember to update http://mercurial.selenic.com/wiki/HelpStyleGuide +when adding support for new constructs. """ import re, sys @@ -118,7 +99,8 @@ return blocks _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') -_optionre = re.compile(r'^(--[a-z-]+)((?:[ =][a-zA-Z][\w-]*)? +)(.*)$') +_optionre = re.compile(r'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' + r'((.*) +)(.*)$') _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') _definitionre = re.compile(r'[^ ]') @@ -192,6 +174,42 @@ return blocks +def updateoptionlists(blocks): + i = 0 + while i < len(blocks): + if blocks[i]['type'] != 'option': + i += 1 + continue + + optstrwidth = 0 + j = i + while j < len(blocks) and blocks[j]['type'] == 'option': + m = _optionre.match(blocks[j]['lines'][0]) + + shortoption = m.group(2) + group3 = m.group(3) + longoption = group3[2:].strip() + desc = m.group(6).strip() + longoptionarg = m.group(5).strip() + blocks[j]['lines'][0] = desc + + noshortop = '' + if not shortoption: + noshortop = ' ' + + opt = "%s%s" % (shortoption and "-%s " % shortoption or '', + ("%s--%s %s") % (noshortop, longoption, + longoptionarg)) + opt = opt.rstrip() + blocks[j]['optstr'] = opt + optstrwidth = max(optstrwidth, encoding.colwidth(opt)) + j += 1 + + for block in blocks[i:j]: + block['optstrwidth'] = optstrwidth + i = j + 1 + return blocks + def prunecontainers(blocks, keep): """Prune unwanted containers. @@ -297,8 +315,11 @@ i = 0 while i < len(blocks): b = blocks[i] - if b['type'] == 'paragraph' and b['lines'][0].startswith('.. '): + if b['type'] == 'paragraph' and (b['lines'][0].startswith('.. ') or + b['lines'] == ['..']): del blocks[i] + if i < len(blocks) and blocks[i]['type'] == 'margin': + del blocks[i] else: i += 1 return blocks @@ -338,6 +359,17 @@ 'tip': _('Tip:'), 'warning': _('Warning!')} +def formatoption(block, width): + desc = ' '.join(map(str.strip, block['lines'])) + colwidth = encoding.colwidth(block['optstr']) + usablewidth = width - 1 + hanging = block['optstrwidth'] + initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth))) + hangindent = ' ' * (encoding.colwidth(initindent) + 1) + return ' %s' % (util.wrap(desc, usablewidth, + initindent=initindent, + hangindent=hangindent)) + def formatblock(block, width): """Format a block according to width.""" if width <= 0: @@ -394,9 +426,7 @@ key = key.ljust(_fieldwidth) block['lines'][0] = key + block['lines'][0] elif block['type'] == 'option': - m = _optionre.match(block['lines'][0]) - option, arg, rest = m.groups() - subindent = indent + (len(option) + len(arg)) * ' ' + return formatoption(block, width) text = ' '.join(map(str.strip, block['lines'])) return util.wrap(text, width=width, @@ -416,8 +446,9 @@ blocks = hgrole(blocks) blocks = splitparagraphs(blocks) blocks = updatefieldlists(blocks) + blocks = updateoptionlists(blocks) + blocks = addmargins(blocks) blocks = prunecomments(blocks) - blocks = addmargins(blocks) blocks = findadmonitions(blocks) text = '\n'.join(formatblock(b, width) for b in blocks) if keep is None: @@ -443,8 +474,9 @@ blocks = debug(inlineliterals, blocks) blocks = debug(splitparagraphs, blocks) blocks = debug(updatefieldlists, blocks) + blocks = debug(updateoptionlists, blocks) blocks = debug(findsections, blocks) + blocks = debug(addmargins, blocks) blocks = debug(prunecomments, blocks) - blocks = debug(addmargins, blocks) blocks = debug(findadmonitions, blocks) print '\n'.join(formatblock(b, 30) for b in blocks)
--- a/mercurial/osutil.c Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/osutil.c Tue Feb 01 17:52:25 2011 -0600 @@ -436,7 +436,14 @@ } else flags = _O_TEXT; - if (plus) { + if (m0 == 'r' && !plus) { + flags |= _O_RDONLY; + access = GENERIC_READ; + } else { + /* + work around http://support.microsoft.com/kb/899149 and + set _O_RDWR for 'w' and 'a', even if mode has no '+' + */ flags |= _O_RDWR; access = GENERIC_READ | GENERIC_WRITE; fpmode[fppos++] = '+'; @@ -446,25 +453,13 @@ switch (m0) { case 'r': creation = OPEN_EXISTING; - if (!plus) { - flags |= _O_RDONLY; - access = GENERIC_READ; - } break; case 'w': creation = CREATE_ALWAYS; - if (!plus) { - access = GENERIC_WRITE; - flags |= _O_WRONLY; - } break; case 'a': creation = OPEN_ALWAYS; flags |= _O_APPEND; - if (!plus) { - flags |= _O_WRONLY; - access = GENERIC_WRITE; - } break; default: PyErr_Format(PyExc_ValueError,
--- a/mercurial/parser.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/parser.py Tue Feb 01 17:52:25 2011 -0600 @@ -22,6 +22,7 @@ self._tokenizer = tokenizer self._elements = elements self._methods = methods + self.current = None def _advance(self): 'advance the tokenizer' t = self.current @@ -76,7 +77,7 @@ def parse(self, message): 'generate a parse tree from a message' self._iter = self._tokenizer(message) - self.current = self._iter.next() + self._advance() return self._parse() def eval(self, tree): 'recursively evaluate a parse tree using node methods'
--- a/mercurial/parsers.c Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/parsers.c Tue Feb 01 17:52:25 2011 -0600 @@ -244,41 +244,6 @@ const char nullid[20]; const int nullrev = -1; -/* create an index tuple, insert into the nodemap */ -static PyObject * _build_idx_entry(PyObject *nodemap, int n, uint64_t offset_flags, - int comp_len, int uncomp_len, int base_rev, - int link_rev, int parent_1, int parent_2, - const char *c_node_id) -{ - int err; - PyObject *entry, *node_id, *n_obj; - - node_id = PyBytes_FromStringAndSize(c_node_id, 20); - n_obj = PyInt_FromLong(n); - - if (!node_id || !n_obj) - err = -1; - else - err = PyDict_SetItem(nodemap, node_id, n_obj); - - Py_XDECREF(n_obj); - if (err) - goto error_dealloc; - - entry = Py_BuildValue("LiiiiiiN", offset_flags, comp_len, - uncomp_len, base_rev, link_rev, - parent_1, parent_2, node_id); - if (!entry) - goto error_dealloc; - PyObject_GC_UnTrack(entry); /* don't waste time with this */ - - return entry; - -error_dealloc: - Py_XDECREF(node_id); - return NULL; -} - /* RevlogNG format (all in big endian, data may be inlined): * 6 bytes: offset * 2 bytes: flags @@ -290,8 +255,8 @@ * 4 bytes: parent 2 revision * 32 bytes: nodeid (only 20 bytes used) */ -static int _parse_index_ng (const char *data, int size, int inlined, - PyObject *index, PyObject *nodemap) +static int _parse_index_ng(const char *data, int size, int inlined, + PyObject *index) { PyObject *entry; int n = 0, err; @@ -321,13 +286,15 @@ parent_2 = ntohl(*((uint32_t *)(decode + 28))); c_node_id = decode + 32; - entry = _build_idx_entry(nodemap, n, offset_flags, - comp_len, uncomp_len, base_rev, - link_rev, parent_1, parent_2, - c_node_id); + entry = Py_BuildValue("Liiiiiis#", offset_flags, comp_len, + uncomp_len, base_rev, link_rev, + parent_1, parent_2, c_node_id, 20); + if (!entry) return 0; + PyObject_GC_UnTrack(entry); /* don't waste time with this */ + if (inlined) { err = PyList_Append(index, entry); Py_DECREF(entry); @@ -348,12 +315,14 @@ return 0; } - /* create the nullid/nullrev entry in the nodemap and the - * magic nullid entry in the index at [-1] */ - entry = _build_idx_entry(nodemap, - nullrev, 0, 0, 0, -1, -1, -1, -1, nullid); + /* create the magic nullid entry in the index at [-1] */ + entry = Py_BuildValue("Liiiiiis#", (uint64_t)0, 0, 0, -1, -1, -1, -1, nullid, 20); + if (!entry) return 0; + + PyObject_GC_UnTrack(entry); /* don't waste time with this */ + if (inlined) { err = PyList_Append(index, entry); Py_DECREF(entry); @@ -366,17 +335,16 @@ } /* This function parses a index file and returns a Python tuple of the - * following format: (index, nodemap, cache) + * following format: (index, cache) * * index: a list of tuples containing the RevlogNG records - * nodemap: a dict mapping node ids to indices in the index list * cache: if data is inlined, a tuple (index_file_content, 0) else None */ -static PyObject *parse_index(PyObject *self, PyObject *args) +static PyObject *parse_index2(PyObject *self, PyObject *args) { const char *data; int size, inlined; - PyObject *rval = NULL, *index = NULL, *nodemap = NULL, *cache = NULL; + PyObject *rval = NULL, *index = NULL, *cache = NULL; PyObject *data_obj = NULL, *inlined_obj; if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj)) @@ -384,16 +352,12 @@ inlined = inlined_obj && PyObject_IsTrue(inlined_obj); /* If no data is inlined, we know the size of the index list in - * advance: size divided by size of one one revlog record (64 bytes) - * plus one for the nullid */ + * advance: size divided by the size of one revlog record (64 bytes) + * plus one for nullid */ index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1); if (!index) goto quit; - nodemap = PyDict_New(); - if (!nodemap) - goto quit; - /* set up the cache return value */ if (inlined) { /* Note that the reference to data_obj is only borrowed */ @@ -406,18 +370,17 @@ Py_INCREF(Py_None); } - /* actually populate the index and the nodemap with data */ - if (!_parse_index_ng (data, size, inlined, index, nodemap)) + /* actually populate the index with data */ + if (!_parse_index_ng(data, size, inlined, index)) goto quit; - rval = Py_BuildValue("NNN", index, nodemap, cache); + rval = Py_BuildValue("NN", index, cache); if (!rval) goto quit; return rval; quit: Py_XDECREF(index); - Py_XDECREF(nodemap); Py_XDECREF(cache); Py_XDECREF(rval); return NULL; @@ -429,7 +392,7 @@ static PyMethodDef methods[] = { {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, - {"parse_index", parse_index, METH_VARARGS, "parse a revlog index\n"}, + {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, {NULL, NULL} };
--- a/mercurial/patch.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/patch.py Tue Feb 01 17:52:25 2011 -0600 @@ -6,7 +6,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import cStringIO, email.Parser, os, re +import cStringIO, email.Parser, os, errno, re import tempfile, zlib from i18n import _ @@ -429,10 +429,16 @@ # Ensure supplied data ends in fname, being a regular file or # a symlink. cmdutil.updatedir will -too magically- take care # of setting it to the proper type afterwards. + st_mode = None islink = os.path.islink(fname) if islink: fp = cStringIO.StringIO() else: + try: + st_mode = os.lstat(fname).st_mode & 0777 + except OSError, e: + if e.errno != errno.ENOENT: + raise fp = self.opener(fname, 'w') try: if self.eolmode == 'auto': @@ -451,6 +457,8 @@ fp.writelines(lines) if islink: self.opener.symlink(fp.getvalue(), fname) + if st_mode is not None: + os.chmod(fname, st_mode) finally: fp.close() @@ -976,7 +984,7 @@ fp.seek(pos) return gitpatches -def iterhunks(ui, fp, sourcefile=None): +def iterhunks(ui, fp): """Read a patch and yield the following events: - ("file", afile, bfile, firsthunk): select a new target file. - ("hunk", hunk): a new hunk is ready to be applied, follows a @@ -997,10 +1005,6 @@ BFILE = 1 context = None lr = linereader(fp) - # gitworkdone is True if a git operation (copy, rename, ...) was - # performed already for the current file. Useful when the file - # section may have no hunk. - gitworkdone = False while True: newfile = newgitfile = False @@ -1012,7 +1016,7 @@ current_hunk.fix_newline() yield 'hunk', current_hunk current_hunk = None - if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or + if (state == BFILE and ((not context and x[0] == '@') or ((context is not False) and x.startswith('***************')))): if context is None and x.startswith('***************'): context = True @@ -1034,7 +1038,6 @@ elif x.startswith('diff --git'): # check for git diff, scanning the whole patch file if needed m = gitre.match(x) - gitworkdone = False if m: afile, bfile = m.group(1, 2) if not git: @@ -1049,7 +1052,6 @@ if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD') or gp.mode): afile = bfile - gitworkdone = True newgitfile = True elif x.startswith('---'): # check for a unified diff @@ -1077,9 +1079,6 @@ afile = parsefilename(x) bfile = parsefilename(l2) - if newfile: - gitworkdone = False - if newgitfile or newfile: emitfile = True state = BFILE @@ -1091,7 +1090,7 @@ raise PatchError(_("malformed patch %s %s") % (afile, current_hunk.desc)) -def applydiff(ui, fp, changed, strip=1, sourcefile=None, eolmode='strict'): +def applydiff(ui, fp, changed, strip=1, eolmode='strict'): """Reads a patch from fp and tries to apply it. The dict 'changed' is filled in with all of the filenames changed @@ -1105,13 +1104,10 @@ Callers probably want to call 'cmdutil.updatedir' after this to apply certain categories of changes not done by this function. """ - return _applydiff( - ui, fp, patchfile, copyfile, - changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode) + return _applydiff(ui, fp, patchfile, copyfile, changed, strip=strip, + eolmode=eolmode) - -def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, - sourcefile=None, eolmode='strict'): +def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, eolmode='strict'): rejects = 0 err = 0 current_file = None @@ -1126,7 +1122,7 @@ current_file.write_rej() return len(current_file.rej) - for state, values in iterhunks(ui, fp, sourcefile): + for state, values in iterhunks(ui, fp): if state == 'hunk': if not current_file: continue @@ -1139,14 +1135,10 @@ rejects += closefile() afile, bfile, first_hunk = values try: - if sourcefile: - current_file = patcher(ui, sourcefile, opener, - eolmode=eolmode) - else: - current_file, missing = selectfile(afile, bfile, - first_hunk, strip) - current_file = patcher(ui, current_file, opener, - missing=missing, eolmode=eolmode) + current_file, missing = selectfile(afile, bfile, + first_hunk, strip) + current_file = patcher(ui, current_file, opener, + missing=missing, eolmode=eolmode) except PatchError, err: ui.warn(str(err) + '\n') current_file = None
--- a/mercurial/posix.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/posix.py Tue Feb 01 17:52:25 2011 -0600 @@ -13,6 +13,7 @@ nulldev = '/dev/null' normpath = os.path.normpath samestat = os.path.samestat +unlink = os.unlink rename = os.rename expandglobs = False
--- a/mercurial/pure/parsers.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/pure/parsers.py Tue Feb 01 17:52:25 2011 -0600 @@ -24,7 +24,7 @@ else: mfdict[f] = bin(n) -def parse_index(data, inline): +def parse_index2(data, inline): def gettype(q): return int(q & 0xFFFF) @@ -36,16 +36,14 @@ s = struct.calcsize(indexformatng) index = [] cache = None - nodemap = {nullid: nullrev} n = off = 0 - # if we're not using lazymap, always read the whole index + l = len(data) - s append = index.append if inline: cache = (0, data) while off <= l: e = _unpack(indexformatng, data[off:off + s]) - nodemap[e[7]] = n append(e) n += 1 if e[1] < 0: @@ -54,7 +52,6 @@ else: while off <= l: e = _unpack(indexformatng, data[off:off + s]) - nodemap[e[7]] = n append(e) n += 1 off += s @@ -67,7 +64,7 @@ # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - return index, nodemap, cache + return index, cache def parse_dirstate(dmap, copymap, st): parents = [st[:20], st[20: 40]]
--- a/mercurial/revlog.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/revlog.py Tue Feb 01 17:52:25 2011 -0600 @@ -38,11 +38,9 @@ REVIDX_PUNCHED_FLAG = 2 REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA -# amount of data read unconditionally, should be >= 4 -# when not inline: threshold for using lazy index -_prereadsize = 1048576 # max size of revlog with inline data _maxinline = 131072 +_chunksize = 1048576 RevlogError = error.RevlogError LookupError = error.LookupError @@ -121,209 +119,6 @@ return bin[1:] raise RevlogError(_("unknown compression type %r") % t) -class lazyparser(object): - """ - this class avoids the need to parse the entirety of large indices - """ - - # lazyparser is not safe to use on windows if win32 extensions not - # available. it keeps file handle open, which make it not possible - # to break hardlinks on local cloned repos. - - def __init__(self, dataf): - try: - size = util.fstat(dataf).st_size - except AttributeError: - size = 0 - self.dataf = dataf - self.s = struct.calcsize(indexformatng) - self.datasize = size - self.l = size // self.s - self.index = [None] * self.l - self.map = {nullid: nullrev} - self.allmap = 0 - self.all = 0 - self.mapfind_count = 0 - - def loadmap(self): - """ - during a commit, we need to make sure the rev being added is - not a duplicate. This requires loading the entire index, - which is fairly slow. loadmap can load up just the node map, - which takes much less time. - """ - if self.allmap: - return - end = self.datasize - self.allmap = 1 - cur = 0 - count = 0 - blocksize = self.s * 256 - self.dataf.seek(0) - while cur < end: - data = self.dataf.read(blocksize) - off = 0 - for x in xrange(256): - n = data[off + ngshaoffset:off + ngshaoffset + 20] - self.map[n] = count - count += 1 - if count >= self.l: - break - off += self.s - cur += blocksize - - def loadblock(self, blockstart, blocksize, data=None): - if self.all: - return - if data is None: - self.dataf.seek(blockstart) - if blockstart + blocksize > self.datasize: - # the revlog may have grown since we've started running, - # but we don't have space in self.index for more entries. - # limit blocksize so that we don't get too much data. - blocksize = max(self.datasize - blockstart, 0) - data = self.dataf.read(blocksize) - lend = len(data) // self.s - i = blockstart // self.s - off = 0 - # lazyindex supports __delitem__ - if lend > len(self.index) - i: - lend = len(self.index) - i - for x in xrange(lend): - if self.index[i + x] is None: - b = data[off : off + self.s] - self.index[i + x] = b - n = b[ngshaoffset:ngshaoffset + 20] - self.map[n] = i + x - off += self.s - - def findnode(self, node): - """search backwards through the index file for a specific node""" - if self.allmap: - return None - - # hg log will cause many many searches for the manifest - # nodes. After we get called a few times, just load the whole - # thing. - if self.mapfind_count > 8: - self.loadmap() - if node in self.map: - return node - return None - self.mapfind_count += 1 - last = self.l - 1 - while self.index[last] != None: - if last == 0: - self.all = 1 - self.allmap = 1 - return None - last -= 1 - end = (last + 1) * self.s - blocksize = self.s * 256 - while end >= 0: - start = max(end - blocksize, 0) - self.dataf.seek(start) - data = self.dataf.read(end - start) - findend = end - start - while True: - # we're searching backwards, so we have to make sure - # we don't find a changeset where this node is a parent - off = data.find(node, 0, findend) - findend = off - if off >= 0: - i = off / self.s - off = i * self.s - n = data[off + ngshaoffset:off + ngshaoffset + 20] - if n == node: - self.map[n] = i + start / self.s - return node - else: - break - end -= blocksize - return None - - def loadindex(self, i=None, end=None): - if self.all: - return - all = False - if i is None: - blockstart = 0 - blocksize = (65536 / self.s) * self.s - end = self.datasize - all = True - else: - if end: - blockstart = i * self.s - end = end * self.s - blocksize = end - blockstart - else: - blockstart = (i & ~1023) * self.s - blocksize = self.s * 1024 - end = blockstart + blocksize - while blockstart < end: - self.loadblock(blockstart, blocksize) - blockstart += blocksize - if all: - self.all = True - -class lazyindex(object): - """a lazy version of the index array""" - def __init__(self, parser): - self.p = parser - def __len__(self): - return len(self.p.index) - def load(self, pos): - if pos < 0: - pos += len(self.p.index) - self.p.loadindex(pos) - return self.p.index[pos] - def __getitem__(self, pos): - return _unpack(indexformatng, self.p.index[pos] or self.load(pos)) - def __setitem__(self, pos, item): - self.p.index[pos] = _pack(indexformatng, *item) - def __delitem__(self, pos): - del self.p.index[pos] - def insert(self, pos, e): - self.p.index.insert(pos, _pack(indexformatng, *e)) - def append(self, e): - self.p.index.append(_pack(indexformatng, *e)) - -class lazymap(object): - """a lazy version of the node map""" - def __init__(self, parser): - self.p = parser - def load(self, key): - n = self.p.findnode(key) - if n is None: - raise KeyError(key) - def __contains__(self, key): - if key in self.p.map: - return True - self.p.loadmap() - return key in self.p.map - def __iter__(self): - yield nullid - for i, ret in enumerate(self.p.index): - if not ret: - self.p.loadindex(i) - ret = self.p.index[i] - if isinstance(ret, str): - ret = _unpack(indexformatng, ret) - yield ret[7] - def __getitem__(self, key): - try: - return self.p.map[key] - except KeyError: - try: - self.load(key) - return self.p.map[key] - except KeyError: - raise KeyError("node " + hex(key)) - def __setitem__(self, key, val): - self.p.map[key] = val - def __delitem__(self, key): - del self.p.map[key] - indexformatv0 = ">4l20s20s20s" v0shaoffset = 56 @@ -331,13 +126,11 @@ def __init__(self): self.size = struct.calcsize(indexformatv0) - def parseindex(self, fp, data, inline): + def parseindex(self, data, inline): s = self.size index = [] nodemap = {nullid: nullrev} n = off = 0 - if len(data) == _prereadsize: - data += fp.read() # read the rest l = len(data) while off + s <= l: cur = data[off:off + s] @@ -350,6 +143,9 @@ nodemap[e[6]] = n n += 1 + # add the magic null revision at -1 + index.append((0, 0, 0, -1, -1, -1, -1, nullid)) + return index, nodemap, None def packentry(self, entry, node, version, rev): @@ -377,24 +173,10 @@ def __init__(self): self.size = struct.calcsize(indexformatng) - def parseindex(self, fp, data, inline): - if len(data) == _prereadsize: - if util.openhardlinks() and not inline: - # big index, let's parse it on demand - parser = lazyparser(fp) - index = lazyindex(parser) - nodemap = lazymap(parser) - e = list(index[0]) - type = gettype(e[0]) - e[0] = offset_type(0, type) - index[0] = e - return index, nodemap, None - else: - data += fp.read() - + def parseindex(self, data, inline): # call the C implementation to parse the index data - index, nodemap, cache = parsers.parse_index(data, inline) - return index, nodemap, cache + index, cache = parsers.parse_index2(data, inline) + return index, None, cache def packentry(self, entry, node, version, rev): p = _pack(indexformatng, *entry) @@ -439,10 +221,12 @@ self.opener = opener self._cache = None self._chunkcache = (0, '') - self.nodemap = {nullid: nullrev} self.index = [] self._shallowroot = shallowroot self._parentdelta = 0 + self._pcache = {} + self._nodecache = {nullid: nullrev} + self._nodepos = None v = REVLOG_DEFAULT_VERSION if hasattr(opener, 'options') and 'defversion' in opener.options: @@ -458,10 +242,7 @@ i = '' try: f = self.opener(self.indexfile) - if "nonlazy" in getattr(self.opener, 'options', {}): - i = f.read() - else: - i = f.read(_prereadsize) + i = f.read() if len(i) > 0: v = struct.unpack(versionformat, i[:4])[0] except IOError, inst: @@ -486,37 +267,15 @@ self._io = revlogio() if self.version == REVLOGV0: self._io = revlogoldio() - if i: - try: - d = self._io.parseindex(f, i, self._inline) - except (ValueError, IndexError): - raise RevlogError(_("index %s is corrupted") % (self.indexfile)) - self.index, self.nodemap, self._chunkcache = d - if not self._chunkcache: - self._chunkclear() - - # add the magic null revision at -1 (if it hasn't been done already) - if (self.index == [] or isinstance(self.index, lazyindex) or - self.index[-1][7] != nullid) : - self.index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - - def _loadindex(self, start, end): - """load a block of indexes all at once from the lazy parser""" - if isinstance(self.index, lazyindex): - self.index.p.loadindex(start, end) - - def _loadindexmap(self): - """loads both the map and the index from the lazy parser""" - if isinstance(self.index, lazyindex): - p = self.index.p - p.loadindex() - self.nodemap = p.map - - def _loadmap(self): - """loads the map from the lazy parser""" - if isinstance(self.nodemap, lazymap): - self.nodemap.p.loadmap() - self.nodemap = self.nodemap.p.map + try: + d = self._io.parseindex(i, self._inline) + except (ValueError, IndexError): + raise RevlogError(_("index %s is corrupted") % (self.indexfile)) + self.index, nodemap, self._chunkcache = d + if nodemap is not None: + self.nodemap = self._nodecache = nodemap + if not self._chunkcache: + self._chunkclear() def tip(self): return self.node(len(self.index) - 2) @@ -525,11 +284,29 @@ def __iter__(self): for i in xrange(len(self)): yield i + + @util.propertycache + def nodemap(self): + n = self.rev(self.node(0)) + return self._nodecache + def rev(self, node): try: - return self.nodemap[node] + return self._nodecache[node] except KeyError: + n = self._nodecache + i = self.index + p = self._nodepos + if p is None: + p = len(i) - 2 + for r in xrange(p, -1, -1): + v = i[r][7] + n[v] = r + if v == node: + self._nodepos = r - 1 + return r raise LookupError(node, self.indexfile, _('no node')) + def node(self, rev): return self.index[rev][7] def linkrev(self, rev): @@ -937,15 +714,19 @@ pass def _partialmatch(self, id): + if id in self._pcache: + return self._pcache[id] + if len(id) < 40: try: # hex(node)[:...] l = len(id) // 2 # grab an even number of digits - bin_id = bin(id[:l * 2]) - nl = [n for n in self.nodemap if n[:l] == bin_id] + prefix = bin(id[:l * 2]) + nl = [e[7] for e in self.index if e[7].startswith(prefix)] nl = [n for n in nl if hex(n).startswith(id)] if len(nl) > 0: if len(nl) == 1: + self._pcache[id] = nl[0] return nl[0] raise LookupError(id, self.indexfile, _('ambiguous identifier')) @@ -978,7 +759,7 @@ def _addchunk(self, offset, data): o, d = self._chunkcache # try to add to existing cache - if o + len(d) == offset and len(d) + len(data) < _prereadsize: + if o + len(d) == offset and len(d) + len(data) < _chunksize: self._chunkcache = o, d + data else: self._chunkcache = offset, data @@ -1060,7 +841,6 @@ (self.flags(rev) & ~REVIDX_KNOWN_FLAGS)) # build delta chain - self._loadindex(base, rev + 1) chain = [] index = self.index # for performance iterrev = rev @@ -1088,13 +868,18 @@ bins = [self._chunk(r) for r in chain] text = mdiff.patches(text, bins) + + text = self._checkhash(text, node, rev) + + self._cache = (node, rev, text) + return text + + def _checkhash(self, text, node, rev): p1, p2 = self.parents(node) if (node != hash(text, p1, p2) and not (self.flags(rev) & REVIDX_PUNCHED_FLAG)): raise RevlogError(_("integrity check failed on %s:%d") % (self.indexfile, rev)) - - self._cache = (node, rev, text) return text def checkinlinesize(self, tr, fp=None): @@ -1408,9 +1193,6 @@ if len(self) == 0: return - if isinstance(self.index, lazyindex): - self._loadindexmap() - for rev in self: if self.index[rev][4] >= minlink: break
--- a/mercurial/revset.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/revset.py Tue Feb 01 17:52:25 2011 -0600 @@ -202,9 +202,13 @@ return [r for r in subset if r == l] def p1(repo, subset, x): - """``p1(set)`` - First parent of changesets in set. + """``p1([set])`` + First parent of changesets in set, or the working directory. """ + if x is None: + p = repo[x].parents()[0].rev() + return [r for r in subset if r == p] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -212,9 +216,17 @@ return [r for r in subset if r in ps] def p2(repo, subset, x): - """``p2(set)`` - Second parent of changesets in set. + """``p2([set])`` + Second parent of changesets in set, or the working directory. """ + if x is None: + ps = repo[x].parents() + try: + p = ps[1].rev() + return [r for r in subset if r == p] + except IndexError: + return [] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -222,9 +234,13 @@ return [r for r in subset if r in ps] def parents(repo, subset, x): - """``parents(set)`` - The set of all parents for all changesets in set. + """``parents([set])`` + The set of all parents for all changesets in set, or the working directory. """ + if x is None: + ps = tuple(p.rev() for p in repo[x].parents()) + return [r for r in subset if r in ps] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -699,7 +715,7 @@ } def optimize(x, small): - if x == None: + if x is None: return 0, x smallbonus = 1
--- a/mercurial/sshrepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/sshrepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -91,10 +91,11 @@ size = util.fstat(self.pipee).st_size if size == 0: break - l = self.pipee.readline() - if not l: + s = self.pipee.read(size) + if not s: break - self.ui.status(_("remote: "), l) + for l in s.splitlines(): + self.ui.status(_("remote: "), l, '\n') def _abort(self, exception): self.cleanup()
--- a/mercurial/statichttprepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/statichttprepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -77,7 +77,6 @@ return httprangereader(f, urlopener) return o - opener.options = {'nonlazy': 1} return opener class statichttprepository(localrepo.localrepository):
--- a/mercurial/subrepo.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/subrepo.py Tue Feb 01 17:52:25 2011 -0600 @@ -6,7 +6,7 @@ # GNU General Public License version 2 or any later version. import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath -import stat, subprocess +import stat, subprocess, tarfile from i18n import _ import config, util, node, error, cmdutil hg = None @@ -172,6 +172,8 @@ def subrelpath(sub): """return path to this subrepo as seen from outermost repo""" + if hasattr(sub, '_relpath'): + return sub._relpath if not hasattr(sub, '_repo'): return sub._path return reporelpath(sub._repo) @@ -236,9 +238,10 @@ class abstractsubrepo(object): - def dirty(self): - """returns true if the dirstate of the subrepo does not match - current stored state + def dirty(self, ignoreupdate=False): + """returns true if the dirstate of the subrepo is dirty or does not + match current stored state. If ignoreupdate is true, only check + whether the subrepo has uncommitted changes in its dirstate. """ raise NotImplementedError @@ -304,13 +307,21 @@ """return file flags""" return '' - def archive(self, archiver, prefix): - for name in self.files(): + def archive(self, ui, archiver, prefix): + files = self.files() + total = len(files) + relpath = subrelpath(self) + ui.progress(_('archiving (%s)') % relpath, 0, + unit=_('files'), total=total) + for i, name in enumerate(files): flags = self.fileflags(name) mode = 'x' in flags and 0755 or 0644 symlink = 'l' in flags archiver.addfile(os.path.join(prefix, self._path, name), mode, symlink, self.filedata(name)) + ui.progress(_('archiving (%s)') % relpath, i + 1, + unit=_('files'), total=total) + ui.progress(_('archiving (%s)') % relpath, None) class hgsubrepo(abstractsubrepo): @@ -373,21 +384,22 @@ self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) - def archive(self, archiver, prefix): - abstractsubrepo.archive(self, archiver, prefix) + def archive(self, ui, archiver, prefix): + abstractsubrepo.archive(self, ui, archiver, prefix) rev = self._state[1] ctx = self._repo[rev] for subpath in ctx.substate: s = subrepo(ctx, subpath) - s.archive(archiver, os.path.join(prefix, self._path)) + s.archive(ui, archiver, os.path.join(prefix, self._path)) - def dirty(self): + def dirty(self, ignoreupdate=False): r = self._state[1] - if r == '': + if r == '' and not ignoreupdate: # no state recorded return True w = self._repo[None] - if w.p1() != self._repo[r]: # version checked out change + if w.p1() != self._repo[r] and not ignoreupdate: + # different version checked out return True return w.dirty() # working directory changed @@ -484,13 +496,10 @@ def _svncommand(self, commands, filename=''): path = os.path.join(self._ctx._repo.origroot, self._path, filename) cmd = ['svn'] + commands + [path] - cmd = [util.shellquote(arg) for arg in cmd] - cmd = util.quotecommand(' '.join(cmd)) env = dict(os.environ) # Avoid localized output, preserve current locale for everything else. env['LC_MESSAGES'] = 'C' - p = subprocess.Popen(cmd, shell=True, bufsize=-1, - close_fds=util.closefds, + p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env) stdout, stderr = p.communicate() @@ -543,9 +552,10 @@ return True, True return bool(changes), False - def dirty(self): - if self._state[1] in self._wcrevs() and not self._wcchanged()[0]: - return False + def dirty(self, ignoreupdate=False): + if not self._wcchanged()[0]: + if self._state[1] in self._wcrevs() or ignoreupdate: + return False return True def commit(self, text, user, date): @@ -616,7 +626,336 @@ return self._svncommand(['cat'], name) +class gitsubrepo(abstractsubrepo): + def __init__(self, ctx, path, state): + # TODO add git version check. + self._state = state + self._ctx = ctx + self._path = path + self._relpath = os.path.join(reporelpath(ctx._repo), path) + self._abspath = ctx._repo.wjoin(path) + self._ui = ctx._repo.ui + + def _gitcommand(self, commands, env=None, stream=False): + return self._gitdir(commands, env=env, stream=stream)[0] + + def _gitdir(self, commands, env=None, stream=False): + return self._gitnodir(commands, env=env, stream=stream, + cwd=self._abspath) + + def _gitnodir(self, commands, env=None, stream=False, cwd=None): + """Calls the git command + + The methods tries to call the git command. versions previor to 1.6.0 + are not supported and very probably fail. + """ + self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) + # unless ui.quiet is set, print git's stderr, + # which is mostly progress and useful info + errpipe = None + if self._ui.quiet: + errpipe = open(os.devnull, 'w') + p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env, + close_fds=util.closefds, + stdout=subprocess.PIPE, stderr=errpipe) + if stream: + return p.stdout, None + + retdata = p.stdout.read().strip() + # wait for the child to exit to avoid race condition. + p.wait() + + if p.returncode != 0 and p.returncode != 1: + # there are certain error codes that are ok + command = commands[0] + if command in ('cat-file', 'symbolic-ref'): + return retdata, p.returncode + # for all others, abort + raise util.Abort('git %s error %d in %s' % + (command, p.returncode, self._relpath)) + + return retdata, p.returncode + + def _gitstate(self): + return self._gitcommand(['rev-parse', 'HEAD']) + + def _gitcurrentbranch(self): + current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet']) + if err: + current = None + return current + + def _githavelocally(self, revision): + out, code = self._gitdir(['cat-file', '-e', revision]) + return code == 0 + + def _gitisancestor(self, r1, r2): + base = self._gitcommand(['merge-base', r1, r2]) + return base == r1 + + def _gitbranchmap(self): + '''returns 2 things: + a map from git branch to revision + a map from revision to branches''' + branch2rev = {} + rev2branch = {} + + out = self._gitcommand(['for-each-ref', '--format', + '%(objectname) %(refname)']) + for line in out.split('\n'): + revision, ref = line.split(' ') + if ref.startswith('refs/tags/'): + continue + if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'): + continue # ignore remote/HEAD redirects + branch2rev[ref] = revision + rev2branch.setdefault(revision, []).append(ref) + return branch2rev, rev2branch + + def _gittracking(self, branches): + 'return map of remote branch to local tracking branch' + # assumes no more than one local tracking branch for each remote + tracking = {} + for b in branches: + if b.startswith('refs/remotes/'): + continue + remote = self._gitcommand(['config', 'branch.%s.remote' % b]) + if remote: + ref = self._gitcommand(['config', 'branch.%s.merge' % b]) + tracking['refs/remotes/%s/%s' % + (remote, ref.split('/', 2)[2])] = b + return tracking + + def _fetch(self, source, revision): + if not os.path.exists(os.path.join(self._abspath, '.git')): + self._ui.status(_('cloning subrepo %s\n') % self._relpath) + self._gitnodir(['clone', source, self._abspath]) + if self._githavelocally(revision): + return + self._ui.status(_('pulling subrepo %s\n') % self._relpath) + # first try from origin + self._gitcommand(['fetch']) + if self._githavelocally(revision): + return + # then try from known subrepo source + self._gitcommand(['fetch', source]) + if not self._githavelocally(revision): + raise util.Abort(_("revision %s does not exist in subrepo %s\n") % + (revision, self._relpath)) + + def dirty(self, ignoreupdate=False): + if not ignoreupdate and self._state[1] != self._gitstate(): + # different version checked out + return True + # check for staged changes or modified files; ignore untracked files + out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) + return code == 1 + + def get(self, state, overwrite=False): + source, revision, kind = state + self._fetch(source, revision) + # if the repo was set to be bare, unbare it + if self._gitcommand(['config', '--bool', 'core.bare']) == 'true': + self._gitcommand(['config', 'core.bare', 'false']) + if self._gitstate() == revision: + self._gitcommand(['reset', '--hard', 'HEAD']) + return + elif self._gitstate() == revision: + if overwrite: + # first reset the index to unmark new files for commit, because + # reset --hard will otherwise throw away files added for commit, + # not just unmark them. + self._gitcommand(['reset', 'HEAD']) + self._gitcommand(['reset', '--hard', 'HEAD']) + return + branch2rev, rev2branch = self._gitbranchmap() + + def checkout(args): + cmd = ['checkout'] + if overwrite: + # first reset the index to unmark new files for commit, because + # the -f option will otherwise throw away files added for + # commit, not just unmark them. + self._gitcommand(['reset', 'HEAD']) + cmd.append('-f') + self._gitcommand(cmd + args) + + def rawcheckout(): + # no branch to checkout, check it out with no branch + self._ui.warn(_('checking out detached HEAD in subrepo %s\n') % + self._relpath) + self._ui.warn(_('check out a git branch if you intend ' + 'to make changes\n')) + checkout(['-q', revision]) + + if revision not in rev2branch: + rawcheckout() + return + branches = rev2branch[revision] + firstlocalbranch = None + for b in branches: + if b == 'refs/heads/master': + # master trumps all other branches + checkout(['refs/heads/master']) + return + if not firstlocalbranch and not b.startswith('refs/remotes/'): + firstlocalbranch = b + if firstlocalbranch: + checkout([firstlocalbranch]) + return + + tracking = self._gittracking(branch2rev.keys()) + # choose a remote branch already tracked if possible + remote = branches[0] + if remote not in tracking: + for b in branches: + if b in tracking: + remote = b + break + + if remote not in tracking: + # create a new local tracking branch + local = remote.split('/', 2)[2] + checkout(['-b', local, remote]) + elif self._gitisancestor(branch2rev[tracking[remote]], remote): + # When updating to a tracked remote branch, + # if the local tracking branch is downstream of it, + # a normal `git pull` would have performed a "fast-forward merge" + # which is equivalent to updating the local branch to the remote. + # Since we are only looking at branching at update, we need to + # detect this situation and perform this action lazily. + if tracking[remote] != self._gitcurrentbranch(): + checkout([tracking[remote]]) + self._gitcommand(['merge', '--ff', remote]) + else: + # a real merge would be required, just checkout the revision + rawcheckout() + + def commit(self, text, user, date): + cmd = ['commit', '-a', '-m', text] + env = os.environ.copy() + if user: + cmd += ['--author', user] + if date: + # git's date parser silently ignores when seconds < 1e9 + # convert to ISO8601 + env['GIT_AUTHOR_DATE'] = util.datestr(date, + '%Y-%m-%dT%H:%M:%S %1%2') + self._gitcommand(cmd, env=env) + # make sure commit works otherwise HEAD might not exist under certain + # circumstances + return self._gitstate() + + def merge(self, state): + source, revision, kind = state + self._fetch(source, revision) + base = self._gitcommand(['merge-base', revision, self._state[1]]) + if base == revision: + self.get(state) # fast forward merge + elif base != self._state[1]: + self._gitcommand(['merge', '--no-commit', revision]) + + def push(self, force): + # if a branch in origin contains the revision, nothing to do + branch2rev, rev2branch = self._gitbranchmap() + if self._state[1] in rev2branch: + for b in rev2branch[self._state[1]]: + if b.startswith('refs/remotes/origin/'): + return True + for b, revision in branch2rev.iteritems(): + if b.startswith('refs/remotes/origin/'): + if self._gitisancestor(self._state[1], revision): + return True + # otherwise, try to push the currently checked out branch + cmd = ['push'] + if force: + cmd.append('--force') + + current = self._gitcurrentbranch() + if current: + # determine if the current branch is even useful + if not self._gitisancestor(self._state[1], current): + self._ui.warn(_('unrelated git branch checked out ' + 'in subrepo %s\n') % self._relpath) + return False + self._ui.status(_('pushing branch %s of subrepo %s\n') % + (current.split('/', 2)[2], self._relpath)) + self._gitcommand(cmd + ['origin', current]) + return True + else: + self._ui.warn(_('no branch checked out in subrepo %s\n' + 'cannot push revision %s') % + (self._relpath, self._state[1])) + return False + + def remove(self): + if self.dirty(): + self._ui.warn(_('not removing repo %s because ' + 'it has changes.\n') % self._relpath) + return + # we can't fully delete the repository as it may contain + # local-only history + self._ui.note(_('removing subrepo %s\n') % self._relpath) + self._gitcommand(['config', 'core.bare', 'true']) + for f in os.listdir(self._abspath): + if f == '.git': + continue + path = os.path.join(self._abspath, f) + if os.path.isdir(path) and not os.path.islink(path): + shutil.rmtree(path) + else: + os.remove(path) + + def archive(self, ui, archiver, prefix): + source, revision = self._state + self._fetch(source, revision) + + # Parse git's native archive command. + # This should be much faster than manually traversing the trees + # and objects with many subprocess calls. + tarstream = self._gitcommand(['archive', revision], stream=True) + tar = tarfile.open(fileobj=tarstream, mode='r|') + relpath = subrelpath(self) + ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files')) + for i, info in enumerate(tar): + if info.isdir(): + continue + if info.issym(): + data = info.linkname + else: + data = tar.extractfile(info).read() + archiver.addfile(os.path.join(prefix, self._path, info.name), + info.mode, info.issym(), data) + ui.progress(_('archiving (%s)') % relpath, i + 1, + unit=_('files')) + ui.progress(_('archiving (%s)') % relpath, None) + + + def status(self, rev2, **opts): + rev1 = self._state[1] + modified, added, removed = [], [], [] + if rev2: + command = ['diff-tree', rev1, rev2] + else: + command = ['diff-index', rev1] + out = self._gitcommand(command) + for line in out.split('\n'): + tab = line.find('\t') + if tab == -1: + continue + status, f = line[tab - 1], line[tab + 1:] + if status == 'M': + modified.append(f) + elif status == 'A': + added.append(f) + elif status == 'D': + removed.append(f) + + deleted = unknown = ignored = clean = [] + return modified, added, removed, deleted, unknown, ignored, clean + types = { 'hg': hgsubrepo, 'svn': svnsubrepo, + 'git': gitsubrepo, }
--- a/mercurial/tags.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/tags.py Tue Feb 01 17:52:25 2011 -0600 @@ -12,6 +12,7 @@ from node import nullid, bin, hex, short from i18n import _ +import os.path import encoding import error @@ -99,9 +100,6 @@ except TypeError: warn(_("node '%s' is not well formed") % nodehex) continue - if nodebin not in repo.changelog.nodemap: - # silently ignore as pull -r might cause this - continue # update filetags hist = [] @@ -154,7 +152,7 @@ set, caller is responsible for reading tag info from each head.''' try: - cachefile = repo.opener('tags.cache', 'r') + cachefile = repo.opener(os.path.join('cache', 'tags'), 'r') # force reading the file for static-http cachelines = iter(cachefile) except IOError: @@ -188,8 +186,8 @@ fnode = bin(line[2]) cachefnode[headnode] = fnode except (ValueError, TypeError): - # corruption of tags.cache, just recompute it - ui.warn(_('.hg/tags.cache is corrupt, rebuilding it\n')) + # corruption of the tags cache, just recompute it + ui.warn(_('.hg/cache/tags is corrupt, rebuilding it\n')) cacheheads = [] cacherevs = [] cachefnode = {} @@ -251,7 +249,8 @@ def _writetagcache(ui, repo, heads, tagfnode, cachetags): try: - cachefile = repo.opener('tags.cache', 'w', atomictemp=True) + cachefile = repo.opener(os.path.join('cache', 'tags'), 'w', + atomictemp=True) except (OSError, IOError): return
--- a/mercurial/templatekw.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/templatekw.py Tue Feb 01 17:52:25 2011 -0600 @@ -145,10 +145,12 @@ def showauthor(repo, ctx, templ, **args): return ctx.user() +def showbranch(**args): + return args['ctx'].branch() + def showbranches(**args): branch = args['ctx'].branch() if branch != 'default': - branch = encoding.tolocal(branch) return showlist('branch', [branch], plural='branches', **args) def showchildren(**args): @@ -163,9 +165,8 @@ return ctx.description().strip() def showdiffstat(repo, ctx, templ, **args): - diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node()) files, adds, removes = 0, 0, 0 - for i in patch.diffstatdata(util.iterlines(diff)): + for i in patch.diffstatdata(util.iterlines(ctx.diff())): files += 1 adds += i[1] removes += i[2] @@ -249,6 +250,7 @@ # revcache - a cache dictionary for the current revision keywords = { 'author': showauthor, + 'branch': showbranch, 'branches': showbranches, 'children': showchildren, 'date': showdate,
--- a/mercurial/templater.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/templater.py Tue Feb 01 17:52:25 2011 -0600 @@ -7,7 +7,192 @@ from i18n import _ import sys, os -import util, config, templatefilters +import util, config, templatefilters, parser, error + +# template parsing + +elements = { + "(": (20, ("group", 1, ")"), ("func", 1, ")")), + ",": (2, None, ("list", 2)), + "|": (5, None, ("|", 5)), + "%": (6, None, ("%", 6)), + ")": (0, None, None), + "symbol": (0, ("symbol",), None), + "string": (0, ("string",), None), + "end": (0, None, None), +} + +def tokenizer(data): + program, start, end = data + pos = start + while pos < end: + c = program[pos] + if c.isspace(): # skip inter-token whitespace + pass + elif c in "(,)%|": # handle simple operators + yield (c, None, pos) + elif (c in '"\'' or c == 'r' and + program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings + if c == 'r': + pos += 1 + c = program[pos] + decode = lambda x: x + else: + decode = lambda x: x.decode('string-escape') + pos += 1 + s = pos + while pos < end: # find closing quote + d = program[pos] + if d == '\\': # skip over escaped characters + pos += 2 + continue + if d == c: + yield ('string', decode(program[s:pos]), s) + break + pos += 1 + else: + raise error.ParseError(_("unterminated string"), s) + elif c.isalnum() or c in '_': + s = pos + pos += 1 + while pos < end: # find end of symbol + d = program[pos] + if not (d.isalnum() or d == "_"): + break + pos += 1 + sym = program[s:pos] + yield ('symbol', sym, s) + pos -= 1 + elif c == '}': + pos += 1 + break + else: + raise error.ParseError(_("syntax error"), pos) + pos += 1 + data[2] = pos + yield ('end', None, pos) + +def compiletemplate(tmpl, context): + parsed = [] + pos, stop = 0, len(tmpl) + p = parser.parser(tokenizer, elements) + + while pos < stop: + n = tmpl.find('{', pos) + if n < 0: + parsed.append(("string", tmpl[pos:])) + break + if n > 0 and tmpl[n - 1] == '\\': + # escaped + parsed.append(("string", tmpl[pos:n - 1] + "{")) + pos = n + 1 + continue + if n > pos: + parsed.append(("string", tmpl[pos:n])) + + pd = [tmpl, n + 1, stop] + parsed.append(p.parse(pd)) + pos = pd[2] + + return [compileexp(e, context) for e in parsed] + +def compileexp(exp, context): + t = exp[0] + if t in methods: + return methods[t](exp, context) + raise error.ParseError(_("unknown method '%s'") % t) + +# template evaluation + +def getsymbol(exp): + if exp[0] == 'symbol': + return exp[1] + raise error.ParseError(_("expected a symbol")) + +def getlist(x): + if not x: + return [] + if x[0] == 'list': + return getlist(x[1]) + [x[2]] + return [x] + +def getfilter(exp, context): + f = getsymbol(exp) + if f not in context._filters: + raise error.ParseError(_("unknown function '%s'") % f) + return context._filters[f] + +def gettemplate(exp, context): + if exp[0] == 'string': + return compiletemplate(exp[1], context) + if exp[0] == 'symbol': + return context._load(exp[1]) + raise error.ParseError(_("expected template specifier")) + +def runstring(context, mapping, data): + return data + +def runsymbol(context, mapping, key): + v = mapping.get(key) + if v is None: + v = context._defaults.get(key, '') + if hasattr(v, '__call__'): + return v(**mapping) + return v + +def buildfilter(exp, context): + func, data = compileexp(exp[1], context) + filt = getfilter(exp[2], context) + return (runfilter, (func, data, filt)) + +def runfilter(context, mapping, data): + func, data, filt = data + return filt(func(context, mapping, data)) + +def buildmap(exp, context): + func, data = compileexp(exp[1], context) + ctmpl = gettemplate(exp[2], context) + return (runmap, (func, data, ctmpl)) + +def runmap(context, mapping, data): + func, data, ctmpl = data + d = func(context, mapping, data) + lm = mapping.copy() + + for i in d: + if isinstance(i, dict): + lm.update(i) + for f, d in ctmpl: + yield f(context, lm, d) + else: + # v is not an iterable of dicts, this happen when 'key' + # has been fully expanded already and format is useless. + # If so, return the expanded value. + yield i + +def buildfunc(exp, context): + n = getsymbol(exp[1]) + args = [compileexp(x, context) for x in getlist(exp[2])] + if n in context._filters: + if len(args) != 1: + raise error.ParseError(_("filter %s expects one argument") % n) + f = context._filters[n] + return (runfilter, (args[0][0], args[0][1], f)) + elif n in context._funcs: + f = context._funcs[n] + return (f, args) + +methods = { + "string": lambda e, c: (runstring, e[1]), + "symbol": lambda e, c: (runsymbol, e[1]), + "group": lambda e, c: compileexp(e[1], c), +# ".": buildmember, + "|": buildfilter, + "%": buildmap, + "func": buildfunc, + } + +# template engine path = ['templates', '../templates'] stringify = templatefilters.stringify @@ -66,104 +251,18 @@ self._defaults = defaults self._cache = {} + def _load(self, t): + '''load, parse, and cache a template''' + if t not in self._cache: + self._cache[t] = compiletemplate(self._loader(t), self) + return self._cache[t] + def process(self, t, mapping): '''Perform expansion. t is name of map element to expand. mapping contains added elements for use during expansion. Is a generator.''' - return _flatten(self._process(self._load(t), mapping)) - - def _load(self, t): - '''load, parse, and cache a template''' - if t not in self._cache: - self._cache[t] = self._parse(self._loader(t)) - return self._cache[t] - - def _get(self, mapping, key): - v = mapping.get(key) - if v is None: - v = self._defaults.get(key, '') - if hasattr(v, '__call__'): - v = v(**mapping) - return v - - def _filter(self, mapping, parts): - filters, val = parts - x = self._get(mapping, val) - for f in filters: - x = f(x) - return x - - def _format(self, mapping, args): - key, parsed = args - v = self._get(mapping, key) - if not hasattr(v, '__iter__'): - raise SyntaxError(_("error expanding '%s%%%s'") - % (key, parsed)) - lm = mapping.copy() - for i in v: - if isinstance(i, dict): - lm.update(i) - yield self._process(parsed, lm) - else: - # v is not an iterable of dicts, this happen when 'key' - # has been fully expanded already and format is useless. - # If so, return the expanded value. - yield i - - def _parse(self, tmpl): - '''preparse a template''' - parsed = [] - pos, stop = 0, len(tmpl) - while pos < stop: - n = tmpl.find('{', pos) - if n < 0: - parsed.append((None, tmpl[pos:stop])) - break - if n > 0 and tmpl[n - 1] == '\\': - # escaped - parsed.append((None, tmpl[pos:n - 1] + "{")) - pos = n + 1 - continue - if n > pos: - parsed.append((None, tmpl[pos:n])) - - pos = n - n = tmpl.find('}', pos) - if n < 0: - # no closing - parsed.append((None, tmpl[pos:stop])) - break - - expr = tmpl[pos + 1:n] - pos = n + 1 - - if '%' in expr: - # the keyword should be formatted with a template - key, t = expr.split('%') - parsed.append((self._format, (key.strip(), - self._load(t.strip())))) - elif '|' in expr: - # process the keyword value with one or more filters - parts = expr.split('|') - val = parts[0].strip() - try: - filters = [self._filters[f.strip()] for f in parts[1:]] - except KeyError, i: - raise SyntaxError(_("unknown filter '%s'") % i[0]) - parsed.append((self._filter, (filters, val))) - else: - # just get the keyword - parsed.append((self._get, expr.strip())) - - return parsed - - def _process(self, parsed, mapping): - '''Render a template. Returns a generator.''' - for f, e in parsed: - if f: - yield f(mapping, e) - else: - yield e + return _flatten(func(self, mapping, data) for func, data in + self._load(t)) engines = {'default': engine} @@ -183,7 +282,7 @@ self.filters.update(filters) self.defaults = defaults self.minchunk, self.maxchunk = minchunk, maxchunk - self.engines = {} + self.ecache = {} if not mapfile: return @@ -214,6 +313,8 @@ if not t in self.cache: try: self.cache[t] = open(self.map[t][1]).read() + except KeyError, inst: + raise util.Abort(_('"%s" not in template map') % inst.args[0]) except IOError, inst: raise IOError(inst.args[0], _('template file %s: %s') % (self.map[t][1], inst.args[1])) @@ -221,10 +322,10 @@ def __call__(self, t, **mapping): ttype = t in self.map and self.map[t][0] or 'default' - proc = self.engines.get(ttype) - if proc is None: - proc = engines[ttype](self.load, self.filters, self.defaults) - self.engines[ttype] = proc + if ttype not in self.ecache: + self.ecache[ttype] = engines[ttype](self.load, + self.filters, self.defaults) + proc = self.ecache[ttype] stream = proc.process(t, mapping) if self.minchunk:
--- a/mercurial/templates/paper/branches.tmpl Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/templates/paper/branches.tmpl Tue Feb 01 17:52:25 2011 -0600 @@ -40,7 +40,18 @@ <th>branch</th> <th>node</th> </tr> -{entries%branchentry} +{entries % +' <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}"> + {branch|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +} </table> </div> </div>
--- a/mercurial/templates/paper/shortlogentry.tmpl Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/templates/paper/shortlogentry.tmpl Tue Feb 01 17:52:25 2011 -0600 @@ -1,5 +1,5 @@ <tr class="parity{parity}"> - <td class="age">{date|age}</td> + <td class="age">{age(date)}</td> <td class="author">{author|person}</td> - <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}</td> + <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags % '<span class="tag">{name|escape}</span> '}</td> </tr>
--- a/mercurial/ui.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/ui.py Tue Feb 01 17:52:25 2011 -0600 @@ -153,6 +153,16 @@ "%s.%s = %s\n") % (section, name, uvalue)) return value + def configpath(self, section, name, default=None, untrusted=False): + 'get a path config item, expanded relative to config file' + v = self.config(section, name, default, untrusted) + if not os.path.isabs(v) or "://" not in v: + src = self.configsource(section, name, untrusted) + if ':' in src: + base = os.path.dirname(src.rsplit(':')) + v = os.path.join(base, os.path.expanduser(v)) + return v + def configbool(self, section, name, default=False, untrusted=False): v = self.config(section, name, None, untrusted) if v is None: @@ -589,7 +599,7 @@ termination. ''' - if pos == None or not self.debugflag: + if pos is None or not self.debugflag: return if unit:
--- a/mercurial/url.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/url.py Tue Feb 01 17:52:25 2011 -0600 @@ -258,18 +258,36 @@ defines a __len__ attribute to feed the Content-Length header. """ - def __init__(self, *args, **kwargs): + def __init__(self, ui, *args, **kwargs): # We can't just "self._data = open(*args, **kwargs)" here because there # is an "open" function defined in this module that shadows the global # one + self.ui = ui self._data = __builtin__.open(*args, **kwargs) - self.read = self._data.read self.seek = self._data.seek self.close = self._data.close self.write = self._data.write + self._len = os.fstat(self._data.fileno()).st_size + self._pos = 0 + self._total = len(self) / 1024 * 2 + + def read(self, *args, **kwargs): + try: + ret = self._data.read(*args, **kwargs) + except EOFError: + self.ui.progress(_('sending'), None) + self._pos += len(ret) + # We pass double the max for total because we currently have + # to send the bundle twice in the case of a server that + # requires authentication. Since we can't know until we try + # once whether authentication will be required, just lie to + # the user and maybe the push succeeds suddenly at 50%. + self.ui.progress(_('sending'), self._pos / 1024, + unit=_('kb'), total=self._total) + return ret def __len__(self): - return os.fstat(self._data.fileno()).st_size + return self._len def _gen_sendfile(connection): def _sendfile(self, data):
--- a/mercurial/util.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/util.py Tue Feb 01 17:52:25 2011 -0600 @@ -431,7 +431,7 @@ return check -def unlink(f): +def unlinkpath(f): """unlink and remove the directory if it is empty""" os.unlink(f) # try removing directories that might now be empty @@ -451,7 +451,7 @@ else: try: shutil.copyfile(src, dest) - shutil.copystat(src, dest) + shutil.copymode(src, dest) except shutil.Error, inst: raise Abort(str(inst)) @@ -487,6 +487,7 @@ '''ensure that a filesystem path contains no banned components. the following properties of a path are checked: + - ends with a directory separator - under top-level .hg - starts at the root of a windows drive - contains ".." @@ -504,6 +505,9 @@ def __call__(self, path): if path in self.audited: return + # AIX ignores "/" at end of path, others raise EISDIR. + if endswithsep(path): + raise Abort(_("path ends in directory separator: %s") % path) normpath = os.path.normcase(path) parts = splitpath(normpath) if (os.path.splitdrive(path)[0] @@ -837,7 +841,7 @@ self._fp.close() rename(self.temp, localpath(self.__name)) - def __del__(self): + def close(self): if not self._fp: return if not self._fp.closed: @@ -846,6 +850,9 @@ except: pass self._fp.close() + def __del__(self): + self.close() + def makedirs(name, mode=None): """recursive directory creation with parent mode inheritance""" parent = os.path.abspath(os.path.dirname(name)) @@ -894,7 +901,6 @@ mode += "b" # for that other OS nlink = -1 - st_mode = None dirname, basename = os.path.split(f) # If basename is empty, then the path is malformed because it points # to a directory. Let the posixfile() call below raise IOError. @@ -905,8 +911,7 @@ return atomictempfile(f, mode, self.createmode) try: if 'w' in mode: - st_mode = os.lstat(f).st_mode & 0777 - os.unlink(f) + unlink(f) nlink = 0 else: # nlinks() may behave differently for files on Windows @@ -916,7 +921,9 @@ if nlink < 1: nlink = 2 # force mktempcopy (issue1922) fd.close() - except (OSError, IOError): + except (OSError, IOError), e: + if e.errno != errno.ENOENT: + raise nlink = 0 if not os.path.isdir(dirname): makedirs(dirname, self.createmode) @@ -927,10 +934,7 @@ rename(mktempcopy(f), f) fp = posixfile(f, mode) if nlink == 0: - if st_mode is None: - self._fixfilemode(f) - else: - os.chmod(f, st_mode) + self._fixfilemode(f) return fp def symlink(self, src, dst): @@ -1075,7 +1079,7 @@ # NOTE: unixtime = localunixtime + offset offset, date = timezone(string), string - if offset != None: + if offset is not None: date = " ".join(string.split()[:-1]) # add missing elements from defaults @@ -1120,7 +1124,7 @@ now = makedate() defaults = {} nowmap = {} - for part in "d mb yY HI M S".split(): + for part in ("d", "mb", "yY", "HI", "M", "S"): # this piece is for rounding the specific end of unknowns b = bias.get(part) if b is None: @@ -1190,7 +1194,7 @@ def upper(date): d = dict(mb="12", HI="23", M="59", S="59") - for days in "31 30 29".split(): + for days in ("31", "30", "29"): try: d["d"] = days return parsedate(date, extendeddateformats, d)[0] @@ -1390,15 +1394,26 @@ #### naming convention of below implementation follows 'textwrap' module class MBTextWrapper(textwrap.TextWrapper): + """ + Extend TextWrapper for double-width characters. + + Some Asian characters use two terminal columns instead of one. + A good example of this behavior can be seen with u'\u65e5\u672c', + the two Japanese characters for "Japan": + len() returns 2, but when printed to a terminal, they eat 4 columns. + + (Note that this has nothing to do whatsoever with unicode + representation, or encoding of the underlying string) + """ def __init__(self, **kwargs): textwrap.TextWrapper.__init__(self, **kwargs) def _cutdown(self, str, space_left): l = 0 ucstr = unicode(str, encoding.encoding) - w = unicodedata.east_asian_width + colwidth = unicodedata.east_asian_width for i in xrange(len(ucstr)): - l += w(ucstr[i]) in 'WFA' and 2 or 1 + l += colwidth(ucstr[i]) in 'WFA' and 2 or 1 if space_left < l: return (ucstr[:i].encode(encoding.encoding), ucstr[i:].encode(encoding.encoding))
--- a/mercurial/verify.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/verify.py Tue Feb 01 17:52:25 2011 -0600 @@ -34,7 +34,7 @@ raise util.Abort(_("cannot verify bundle or remote repos")) def err(linkrev, msg, filename=None): - if linkrev != None: + if linkrev is not None: badrevs.add(linkrev) else: linkrev = '?'
--- a/mercurial/windows.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/windows.py Tue Feb 01 17:52:25 2011 -0600 @@ -276,7 +276,7 @@ break head, tail = os.path.split(head) -def unlink(f): +def unlinkpath(f): """unlink and remove the directory if it is empty""" os.unlink(f) # try removing directories that might now be empty @@ -285,40 +285,54 @@ except OSError: pass +def unlink(f): + '''try to implement POSIX' unlink semantics on Windows''' + + # POSIX allows to unlink and rename open files. Windows has serious + # problems with doing that: + # - Calling os.unlink (or os.rename) on a file f fails if f or any + # hardlinked copy of f has been opened with Python's open(). There is no + # way such a file can be deleted or renamed on Windows (other than + # scheduling the delete or rename for the next reboot). + # - Calling os.unlink on a file that has been opened with Mercurial's + # posixfile (or comparable methods) will delay the actual deletion of + # the file for as long as the file is held open. The filename is blocked + # during that time and cannot be used for recreating a new file under + # that same name ("zombie file"). Directories containing such zombie files + # cannot be removed or moved. + # A file that has been opened with posixfile can be renamed, so we rename + # f to a random temporary name before calling os.unlink on it. This allows + # callers to recreate f immediately while having other readers do their + # implicit zombie filename blocking on a temporary name. + + for tries in xrange(10): + temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) + try: + os.rename(f, temp) # raises OSError EEXIST if temp exists + break + except OSError, e: + if e.errno != errno.EEXIST: + raise + else: + raise IOError, (errno.EEXIST, "No usable temporary filename found") + + try: + os.unlink(temp) + except: + # Some very rude AV-scanners on Windows may cause this unlink to fail. + # Not aborting here just leaks the temp file, whereas aborting at this + # point may leave serious inconsistencies. Ideally, we would notify + # the user in this case here. + pass + def rename(src, dst): '''atomically rename file src to dst, replacing dst if it exists''' try: os.rename(src, dst) - except OSError: # FIXME: check err (EEXIST ?) - - # On windows, rename to existing file is not allowed, so we - # must delete destination first. But if a file is open, unlink - # schedules it for delete but does not delete it. Rename - # happens immediately even for open files, so we rename - # destination to a temporary name, then delete that. Then - # rename is safe to do. - # The temporary name is chosen at random to avoid the situation - # where a file is left lying around from a previous aborted run. - - for tries in xrange(10): - temp = '%s-%08x' % (dst, random.randint(0, 0xffffffff)) - try: - os.rename(dst, temp) # raises OSError EEXIST if temp exists - break - except OSError, e: - if e.errno != errno.EEXIST: - raise - else: - raise IOError, (errno.EEXIST, "No usable temporary filename found") - - try: - os.unlink(temp) - except: - # Some rude AV-scanners on Windows may cause the unlink to - # fail. Not aborting here just leaks the temp file, whereas - # aborting at this point may leave serious inconsistencies. - # Ideally, we would notify the user here. - pass + except OSError, e: + if e.errno != errno.EEXIST: + raise + unlink(dst) os.rename(src, dst) def spawndetached(args):
--- a/mercurial/wireproto.py Tue Feb 01 17:30:13 2011 -0600 +++ b/mercurial/wireproto.py Tue Feb 01 17:52:25 2011 -0600 @@ -25,7 +25,7 @@ class wirerepository(repo.repository): def lookup(self, key): self.requirecap('lookup', _('look up remote revision')) - d = self._call("lookup", key=key) + d = self._call("lookup", key=encoding.fromlocal(key)) success, data = d[:-1].split(" ", 1) if int(success): return bin(data) @@ -44,14 +44,7 @@ branchmap = {} for branchpart in d.splitlines(): branchname, branchheads = branchpart.split(' ', 1) - branchname = urllib.unquote(branchname) - # Earlier servers (1.3.x) send branch names in (their) local - # charset. The best we can do is assume it's identical to our - # own local charset, in case it's not utf-8. - try: - branchname.decode('utf-8') - except UnicodeDecodeError: - branchname = encoding.fromlocal(branchname) + branchname = encoding.tolocal(urllib.unquote(branchname)) branchheads = decodelist(branchheads) branchmap[branchname] = branchheads return branchmap @@ -83,17 +76,20 @@ if not self.capable('pushkey'): return False d = self._call("pushkey", - namespace=namespace, key=key, old=old, new=new) + namespace=encoding.fromlocal(namespace), + key=encoding.fromlocal(key), + old=encoding.fromlocal(old), + new=encoding.fromlocal(new)) return bool(int(d)) def listkeys(self, namespace): if not self.capable('pushkey'): return {} - d = self._call("listkeys", namespace=namespace) + d = self._call("listkeys", namespace=encoding.fromlocal(namespace)) r = {} for l in d.splitlines(): k, v = l.split('\t') - r[k.decode('string-escape')] = v.decode('string-escape') + r[encoding.tolocal(k)] = encoding.tolocal(v) return r def stream_out(self): @@ -162,7 +158,7 @@ branchmap = repo.branchmap() heads = [] for branch, nodes in branchmap.iteritems(): - branchname = urllib.quote(branch) + branchname = urllib.quote(encoding.fromlocal(branch)) branchnodes = encodelist(nodes) heads.append('%s %s' % (branchname, branchnodes)) return '\n'.join(heads) @@ -213,14 +209,14 @@ return "capabilities: %s\n" % (capabilities(repo, proto)) def listkeys(repo, proto, namespace): - d = pushkeymod.list(repo, namespace).items() - t = '\n'.join(['%s\t%s' % (k.encode('string-escape'), - v.encode('string-escape')) for k, v in d]) + d = pushkeymod.list(repo, encoding.tolocal(namespace)).items() + t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v)) + for k, v in d]) return t def lookup(repo, proto, key): try: - r = hex(repo.lookup(key)) + r = hex(repo.lookup(encoding.tolocal(key))) success = 1 except Exception, inst: r = str(inst) @@ -228,7 +224,21 @@ return "%s %s\n" % (success, r) def pushkey(repo, proto, namespace, key, old, new): - r = pushkeymod.push(repo, namespace, key, old, new) + # compatibility with pre-1.8 clients which were accidentally + # sending raw binary nodes rather than utf-8-encoded hex + if len(new) == 20 and new.encode('string-escape') != new: + # looks like it could be a binary node + try: + u = new.decode('utf-8') + new = encoding.tolocal(new) # but cleanly decodes as UTF-8 + except UnicodeDecodeError: + pass # binary, leave unmodified + else: + new = encoding.tolocal(new) # normal path + + r = pushkeymod.push(repo, + encoding.tolocal(namespace), encoding.tolocal(key), + encoding.tolocal(old), new) return '%s\n' % int(r) def _allowstream(ui):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/cgienv Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,29 @@ +DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT +GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE +HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT +HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET +HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING +HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE +HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL +HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION +HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST +HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE +HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT +PATH_INFO="/"; export PATH_INFO +PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED +QUERY_STRING=""; export QUERY_STRING +REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR +REMOTE_PORT="44703"; export REMOTE_PORT +REQUEST_METHOD="GET"; export REQUEST_METHOD +REQUEST_URI="/test/"; export REQUEST_URI +SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME +SCRIPT_NAME="/test"; export SCRIPT_NAME +SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI +SCRIPT_URL="/test/"; export SCRIPT_URL +SERVER_ADDR="127.0.0.1"; export SERVER_ADDR +SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN +SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME +SERVER_PORT="80"; export SERVER_PORT +SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL +SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE +SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/filtercr.py Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,10 @@ +#!/usr/bin/env python + +# Filter output by the progress extension to make it readable in tests + +import sys, re + +for line in sys.stdin: + line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) + sys.stdout.write(line) +print
--- a/tests/hghave Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/hghave Tue Feb 01 17:52:25 2011 -0600 @@ -101,15 +101,6 @@ def has_fifo(): return hasattr(os, "mkfifo") -def has_hotshot(): - try: - # hotshot.stats tests hotshot and many problematic dependencies - # like profile. - import hotshot.stats - return True - except ImportError: - return False - def has_lsprof(): try: import _lsprof @@ -198,7 +189,6 @@ "fifo": (has_fifo, "named pipes"), "git": (has_git, "git command line client"), "gpg": (has_gpg, "gpg client"), - "hotshot": (has_hotshot, "python hotshot module"), "icasefs": (has_icasefs, "case insensitive file system"), "inotify": (has_inotify, "inotify extension support"), "lsprof": (has_lsprof, "python lsprof module"),
--- a/tests/run-tests.py Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/run-tests.py Tue Feb 01 17:52:25 2011 -0600 @@ -504,7 +504,8 @@ vlog("# Running", cmd) exitcode, output = run(cmd, options, replacements) # do not merge output if skipped, return hghave message instead - if exitcode == SKIPPED_STATUS: + # similarly, with --debug, output is None + if exitcode == SKIPPED_STATUS or output is None: return exitcode, output finally: os.remove(name) @@ -593,7 +594,7 @@ tochild.close() output = fromchild.read() ret = fromchild.close() - if ret == None: + if ret is None: ret = 0 else: proc = Popen4(cmd) @@ -713,7 +714,7 @@ # If we're not in --debug mode and reference output file exists, # check test output against it. if options.debug: - refout = None # to match out == None + refout = None # to match "out is None" elif os.path.exists(ref): f = open(ref, "r") refout = splitnewlines(f.read())
--- a/tests/test-acl.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-acl.t Tue Feb 01 17:52:25 2011 -0600 @@ -90,38 +90,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -166,38 +166,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -245,38 +245,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -333,38 +333,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -420,38 +420,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -512,38 +512,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -601,38 +601,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -695,38 +695,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -786,38 +786,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -879,38 +879,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -974,38 +974,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1074,38 +1074,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1168,38 +1168,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1274,38 +1274,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1370,38 +1370,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1462,38 +1462,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1558,38 +1558,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1651,38 +1651,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks
--- a/tests/test-archive.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-archive.t Tue Feb 01 17:52:25 2011 -0600 @@ -30,7 +30,6 @@ check http return codes - $ test_archtype gz tar.gz tar.bz2 zip % gz allowed should give 200 200 Script output follows @@ -150,9 +149,8 @@ > print h1 == h2 or "md5 differ: " + repr((h1, h2)) > EOF -archive name is stored in the archive, so create similar - -archives and rename them afterwards. +archive name is stored in the archive, so create similar archives and +rename them afterwards. $ hg archive -t tgz tip.tar.gz $ mv tip.tar.gz tip1.tar.gz @@ -208,6 +206,38 @@ abort: unknown archive type 'bogus' [255] +enable progress extension: + + $ cp $HGRCPATH $HGRCPATH.no-progress + $ cat >> $HGRCPATH <<EOF + > [extensions] + > progress = + > [progress] + > assume-tty = 1 + > format = topic bar number + > delay = 0 + > refresh = 0 + > width = 60 + > EOF + + $ hg archive ../with-progress 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/4 + archiving [ ] 0/4 + archiving [=========> ] 1/4 + archiving [=========> ] 1/4 + archiving [====================> ] 2/4 + archiving [====================> ] 2/4 + archiving [===============================> ] 3/4 + archiving [===============================> ] 3/4 + archiving [==========================================>] 4/4 + archiving [==========================================>] 4/4 + \r (esc) + +cleanup after progress extension test: + + $ cp $HGRCPATH.no-progress $HGRCPATH + server errors $ cat errors.log @@ -219,6 +249,7 @@ $ hg archive ../test-empty abort: no working directory: please specify a revision [255] + old file -- date clamped to 1980 $ touch -t 197501010000 old
--- a/tests/test-bundle.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-bundle.t Tue Feb 01 17:52:25 2011 -0600 @@ -543,26 +543,26 @@ list of changesets: d2ae7f538514cd87c17547b0de4cea71fe1af9fb 5ece8e77363e2b5269e27c66828b72da29e4341a - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling files: b 0 chunks - bundling files: b 1 chunks - bundling files: b 2 chunks - bundling files: b 3 chunks - bundling files: b1 4 chunks - bundling files: b1 5 chunks - bundling files: b1 6 chunks - bundling files: b1 7 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 0/2 manifests (0.00%) + bundling: 0/2 manifests (0.00%) + bundling: 0/2 manifests (0.00%) + bundling: 1/2 manifests (50.00%) + bundling: 1/2 manifests (50.00%) + bundling: 1/2 manifests (50.00%) + bundling: 2/2 manifests (100.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%)
--- a/tests/test-check-code.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-check-code.t Tue Feb 01 17:52:25 2011 -0600 @@ -34,7 +34,7 @@ gratuitous whitespace in () or [] ./wrong.py:2: > del(arg2) - del isn't a function + Python keyword is not a function ./wrong.py:3: > return ( 5+6, 9) missing whitespace in expression @@ -52,3 +52,44 @@ > y = format(x) any/all/format not available in Python 2.4 [1] + + $ cat > is-op.py <<EOF + > # is-operator comparing number or string literal + > x = None + > y = x is 'foo' + > y = x is "foo" + > y = x is 5346 + > y = x is -6 + > y = x is not 'foo' + > y = x is not "foo" + > y = x is not 5346 + > y = x is not -6 + > EOF + + $ "$check_code" ./is-op.py + ./is-op.py:3: + > y = x is 'foo' + object comparison with literal + ./is-op.py:4: + > y = x is "foo" + object comparison with literal + ./is-op.py:5: + > y = x is 5346 + object comparison with literal + ./is-op.py:6: + > y = x is -6 + object comparison with literal + ./is-op.py:7: + > y = x is not 'foo' + object comparison with literal + ./is-op.py:8: + > y = x is not "foo" + object comparison with literal + ./is-op.py:9: + > y = x is not 5346 + object comparison with literal + ./is-op.py:10: + > y = x is not -6 + object comparison with literal + [1] +
--- a/tests/test-clone-cgi.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-clone-cgi.t Tue Feb 01 17:52:25 2011 -0600 @@ -19,37 +19,10 @@ > wsgicgi.launch(application) > HGWEB $ chmod 755 hgweb.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE try hgweb request + $ . "$TESTDIR/cgienv" $ QUERY_STRING="cmd=changegroup&roots=0000000000000000000000000000000000000000"; export QUERY_STRING $ python hgweb.cgi >page1 2>&1 $ python "$TESTDIR/md5sum.py" page1
--- a/tests/test-clone-failure.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-clone-failure.t Tue Feb 01 17:52:25 2011 -0600 @@ -39,7 +39,6 @@ > rm a > else > echo "abort: repository a not found!" - > echo 255 > fi abort: repository a not found!
--- a/tests/test-command-template.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-command-template.t Tue Feb 01 17:52:25 2011 -0600 @@ -449,7 +449,7 @@ $ echo 'q = q' > t $ hg log --style ./t - abort: ./t: no key named 'changeset' + abort: "changeset" not in template map [255] Error if include fails: @@ -570,7 +570,7 @@ Keys work: - $ for key in author branches date desc file_adds file_dels file_mods \ + $ for key in author branch branches date desc file_adds file_dels file_mods \ > file_copies file_copies_switch files \ > manifest node parents rev tags diffstat extras; do > for mode in '' --verbose --debug; do @@ -604,6 +604,33 @@ author--debug: other@place author--debug: A. N. Other <other@place> author--debug: User Name <user@hostname> + branch: default + branch: default + branch: default + branch: default + branch: foo + branch: default + branch: default + branch: default + branch: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: foo + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: foo + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: default branches: branches: branches:
--- a/tests/test-confused-revert.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-confused-revert.t Tue Feb 01 17:52:25 2011 -0600 @@ -58,8 +58,8 @@ Revert should fail: - $ hg revert --all - abort: uncommitted merge - please provide a specific revision + $ hg revert + abort: uncommitted merge - use "hg update", see "hg help revert" [255] Revert should be ok now:
--- a/tests/test-convert-svn-move.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-convert-svn-move.t Tue Feb 01 17:52:25 2011 -0600 @@ -167,83 +167,73 @@ > [progress] > assume-tty = 1 > delay = 0 + > format = topic bar number > refresh = 0 - > EOF - $ cat > filtercr.py <<EOF - > import sys, re - > for line in sys.stdin: - > line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) - > sys.stdout.write(line) + > width = 60 > EOF - $ hg convert svn-repo hg-progress 2>&1 | python filtercr.py + $ hg convert svn-repo hg-progress 2>&1 | $TESTDIR/filtercr.py - scanning [ <=> ] 1 - scanning [ <=> ] 2 - scanning [ <=> ] 3 - scanning [ <=> ] 4 - scanning [ <=> ] 5 - scanning [ <=> ] 6 - scanning [ <=> ] 7 - - converting [ ] 0/7 - getting files [========> ] 1/6 - getting files [==================> ] 2/6 - getting files [============================> ] 3/6 - getting files [======================================> ] 4/6 - getting files [================================================> ] 5/6 - getting files [==========================================================>] 6/6 - - converting [=======> ] 1/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [================> ] 2/7 - scanning paths [ ] 0/2 - scanning paths [============================> ] 1/2 - - getting files [=============> ] 1/4 - getting files [============================> ] 2/4 - getting files [===========================================> ] 3/4 - getting files [==========================================================>] 4/4 - - converting [=========================> ] 3/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [==================================> ] 4/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [===========================================> ] 5/7 - scanning paths [ ] 0/3 - scanning paths [==================> ] 1/3 - scanning paths [=====================================> ] 2/3 - - getting files [======> ] 1/8 - getting files [=============> ] 2/8 - getting files [=====================> ] 3/8 - getting files [============================> ] 4/8 - getting files [===================================> ] 5/8 - getting files [===========================================> ] 6/8 - getting files [==================================================> ] 7/8 - getting files [==========================================================>] 8/8 - - converting [====================================================> ] 6/7 - scanning paths [ ] 0/1 - - getting files [======> ] 1/8 - getting files [=============> ] 2/8 - getting files [=====================> ] 3/8 - getting files [============================> ] 4/8 - getting files [===================================> ] 5/8 - getting files [===========================================> ] 6/8 - getting files [==================================================> ] 7/8 - getting files [==========================================================>] 8/8 - + scanning [ <=> ] 1 + scanning [ <=> ] 2 + scanning [ <=> ] 3 + scanning [ <=> ] 4 + scanning [ <=> ] 5 + scanning [ <=> ] 6 + scanning [ <=> ] 7 + + converting [ ] 0/7 + getting files [=====> ] 1/6 + getting files [============> ] 2/6 + getting files [==================> ] 3/6 + getting files [=========================> ] 4/6 + getting files [===============================> ] 5/6 + getting files [======================================>] 6/6 + + converting [=====> ] 1/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [===========> ] 2/7 + scanning paths [ ] 0/2 + scanning paths [==================> ] 1/2 + getting files [========> ] 1/4 + getting files [==================> ] 2/4 + getting files [============================> ] 3/4 + getting files [======================================>] 4/4 + + converting [=================> ] 3/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [=======================> ] 4/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [=============================> ] 5/7 + scanning paths [ ] 0/3 + scanning paths [===========> ] 1/3 + scanning paths [========================> ] 2/3 + getting files [===> ] 1/8 + getting files [========> ] 2/8 + getting files [=============> ] 3/8 + getting files [==================> ] 4/8 + getting files [=======================> ] 5/8 + getting files [============================> ] 6/8 + getting files [=================================> ] 7/8 + getting files [======================================>] 8/8 + + converting [===================================> ] 6/7 + scanning paths [ ] 0/1 + getting files [===> ] 1/8 + getting files [========> ] 2/8 + getting files [=============> ] 3/8 + getting files [==================> ] 4/8 + getting files [=======================> ] 5/8 + getting files [============================> ] 6/8 + getting files [=================================> ] 7/8 + getting files [======================================>] 8/8 + initializing destination hg-progress repository scanning source... sorting... @@ -255,3 +245,4 @@ 2 adddb 1 branch 0 clobberdir +
--- a/tests/test-convert.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-convert.t Tue Feb 01 17:52:25 2011 -0600 @@ -40,16 +40,16 @@ have the following effects: --branchsort convert from parent to child revision when possible, which - means branches are usually converted one after the other. It - generates more compact repositories. + means branches are usually converted one after the other. + It generates more compact repositories. --datesort sort revisions by date. Converted repositories have good- looking changelogs but are often an order of magnitude larger than the same ones generated by --branchsort. --sourcesort try to preserve source revisions order, only supported by Mercurial sources. - If <REVMAP> isn't given, it will be put in a default location - (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file that + If "REVMAP" isn't given, it will be put in a default location + ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that maps each source commit ID to the destination ID for that revision, like so: @@ -123,16 +123,19 @@ Mercurial Source '''''''''''''''' - --config convert.hg.ignoreerrors=False (boolean) - ignore integrity errors when reading. Use it to fix Mercurial - repositories with missing revlogs, by converting from and to - Mercurial. + The Mercurial source recognizes the following configuration options, which + you can set on the command line with "--config": - --config convert.hg.saverev=False (boolean) - store original revision ID in changeset (forces target IDs to change) - - --config convert.hg.startrev=0 (hg revision identifier) - convert start revision and its descendants + convert.hg.ignoreerrors + ignore integrity errors when reading. Use it to fix Mercurial + repositories with missing revlogs, by converting from and to + Mercurial. Default is False. + convert.hg.saverev + store original. revision ID in changeset (forces target IDs to + change). It takes and boolean argument and defaults to False. + convert.hg.startrev + convert start revision and its descendants. It takes a hg + revision identifier and defaults to 0. CVS Source '''''''''' @@ -140,46 +143,45 @@ CVS source will use a sandbox (i.e. a checked-out copy) from CVS to indicate the starting point of what will be converted. Direct access to the repository files is not needed, unless of course the repository is - :local:. The conversion uses the top level directory in the sandbox to + ":local:". The conversion uses the top level directory in the sandbox to find the CVS repository, and then uses CVS rlog commands to find files to convert. This means that unless a filemap is given, all files under the starting directory will be converted, and that any directory reorganization in the CVS sandbox is ignored. - The options shown are the defaults. - - --config convert.cvsps.cache=True (boolean) - Set to False to disable remote log caching, for testing and debugging - purposes. - - --config convert.cvsps.fuzz=60 (integer) - Specify the maximum time (in seconds) that is allowed between commits - with identical user and log message in a single changeset. When very - large files were checked in as part of a changeset then the default - may not be long enough. + The following options can be used with "--config": - --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}' - Specify a regular expression to which commit log messages are matched. - If a match occurs, then the conversion process will insert a dummy - revision merging the branch on which this log message occurs to the - branch indicated in the regex. - - --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}' - Specify a regular expression to which commit log messages are matched. - If a match occurs, then the conversion process will add the most - recent revision on the branch indicated in the regex as the second - parent of the changeset. - - --config hook.cvslog - Specify a Python function to be called at the end of gathering the CVS - log. The function is passed a list with the log entries, and can - modify the entries in-place, or add or delete them. - - --config hook.cvschangesets - Specify a Python function to be called after the changesets are - calculated from the the CVS log. The function is passed a list with - the changeset entries, and can modify the changesets in-place, or add - or delete them. + convert.cvsps.cache + Set to False to disable remote log caching, for testing and + debugging purposes. Default is True. + convert.cvsps.fuzz + Specify the maximum time (in seconds) that is allowed between + commits with identical user and log message in a single + changeset. When very large files were checked in as part of a + changeset then the default may not be long enough. The default + is 60. + convert.cvsps.mergeto + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + insert a dummy revision merging the branch on which this log + message occurs to the branch indicated in the regex. Default + is "{{mergetobranch ([-\w]+)}}" + convert.cvsps.mergefrom + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + add the most recent revision on the branch indicated in the + regex as the second parent of the changeset. Default is + "{{mergefrombranch ([-\w]+)}}" + hook.cvslog + Specify a Python function to be called at the end of gathering + the CVS log. The function is passed a list with the log + entries, and can modify the entries in-place, or add or delete + them. + hook.cvschangesets + Specify a Python function to be called after the changesets + are calculated from the the CVS log. The function is passed a + list with the changeset entries, and can modify the changesets + in-place, or add or delete them. An additional "debugcvsps" Mercurial command allows the builtin changeset merging code to be run without doing a conversion. Its parameters and @@ -199,21 +201,22 @@ them to paths relative to the source URL, or leave them blank to disable auto detection. - --config convert.svn.branches=branches (directory name) - specify the directory containing branches + The following options can be set with "--config": - --config convert.svn.tags=tags (directory name) - specify the directory containing tags - - --config convert.svn.trunk=trunk (directory name) - specify the name of the trunk branch + convert.svn.branches + specify the directory containing branches. The defaults is + "branches". + convert.svn.tags + specify the directory containing tags. The default is "tags". + convert.svn.trunk + specify the name of the trunk branch The defauls is "trunk". Source history can be retrieved starting at a specific revision, instead of being integrally converted. Only single branch conversions are supported. - --config convert.svn.startrev=0 (svn revision number) - specify start Subversion revision. + convert.svn.startrev + specify start Subversion revision number. The default is 0. Perforce Source ''''''''''''''' @@ -222,25 +225,27 @@ specification as source. It will convert all files in the source to a flat Mercurial repository, ignoring labels, branches and integrations. Note that when a depot path is given you then usually should specify a target - directory, because otherwise the target may be named ...-hg. + directory, because otherwise the target may be named "...-hg". It is possible to limit the amount of source history to be converted by - specifying an initial Perforce revision. + specifying an initial Perforce revision: - --config convert.p4.startrev=0 (perforce changelist number) - specify initial Perforce revision. + convert.p4.startrev + specify initial Perforce revision, a Perforce changelist + number). Mercurial Destination ''''''''''''''''''''' - --config convert.hg.clonebranches=False (boolean) - dispatch source branches in separate clones. + The following options are supported: - --config convert.hg.tagsbranch=default (branch name) - tag revisions branch name - - --config convert.hg.usebranchnames=True (boolean) - preserve branch names + convert.hg.clonebranches + dispatch source branches in separate clones. The default is + False. + convert.hg.tagsbranch + branch name for tag revisions, defaults to "default". + convert.hg.usebranchnames + preserve branch names. The default is True options:
--- a/tests/test-demandimport.py Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-demandimport.py Tue Feb 01 17:52:25 2011 -0600 @@ -8,6 +8,7 @@ l = repr(obj) l = rsub("0x[0-9a-fA-F]+", "0x?", l) l = rsub("from '.*'", "from '?'", l) + l = rsub("'<[a-z]*>'", "'<whatever>'", l) return l import os
--- a/tests/test-demandimport.py.out Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-demandimport.py.out Tue Feb 01 17:52:25 2011 -0600 @@ -11,5 +11,5 @@ fred.sub = <function sub at 0x?> fred = <proxied module 're'> re = <unloaded module 'sys'> -re.stderr = <open file '<stderr>', mode 'w' at 0x?> +re.stderr = <open file '<whatever>', mode 'w' at 0x?> re = <proxied module 'sys'>
--- a/tests/test-doctest.py Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-doctest.py Tue Feb 01 17:52:25 2011 -0600 @@ -19,5 +19,8 @@ import mercurial.util doctest.testmod(mercurial.util) +import mercurial.encoding +doctest.testmod(mercurial.encoding) + import hgext.convert.cvsps doctest.testmod(hgext.convert.cvsps)
--- a/tests/test-encoding.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-encoding.t Tue Feb 01 17:52:25 2011 -0600 @@ -240,6 +240,4 @@ abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc) [255] $ cp latin-1-tag .hg/branch - $ HGENCODING=latin-1 hg ci -m 'should fail' - abort: branch name not in UTF-8! - [255] + $ HGENCODING=latin-1 hg ci -m 'auto-promote legacy name'
--- a/tests/test-extension.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-extension.t Tue Feb 01 17:52:25 2011 -0600 @@ -315,6 +315,11 @@ use "hg help extensions" for information on enabling extensions + $ cat > hgext/forest.py <<EOF + > cmdtable = None + > EOF $ hg --config extensions.path=./path.py help foo > /dev/null + warning: error finding commands in $TESTTMP/hgext/forest.py hg: unknown command 'foo' + warning: error finding commands in $TESTTMP/hgext/forest.py [255]
--- a/tests/test-hardlinks-safety.t Tue Feb 01 17:30:13 2011 -0600 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -some implementations of cp can't create hardlinks - - $ cat > cp.py <<EOF - > from mercurial import util - > import sys - > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) - > EOF - -Test hardlinking outside hg: - - $ mkdir x - $ echo foo > x/a - - $ python cp.py x y - $ echo bar >> y/a - -No diff if hardlink: - - $ diff x/a y/a - -Test mq hardlinking: - - $ echo "[extensions]" >> $HGRCPATH - $ echo "mq=" >> $HGRCPATH - - $ hg init a - $ cd a - - $ hg qimport -n foo - << EOF - > # HG changeset patch - > # Date 1 0 - > diff -r 2588a8b53d66 a - > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 - > +++ b/a Wed Jul 23 15:54:29 2008 +0200 - > @@ -0,0 +1,1 @@ - > +a - > EOF - adding foo to series file - - $ hg qpush - applying foo - now at: foo - - $ cd .. - $ python cp.py a b - $ cd b - - $ hg qimport -n bar - << EOF - > # HG changeset patch - > # Date 2 0 - > diff -r 2588a8b53d66 a - > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 - > +++ b/b Wed Jul 23 15:54:29 2008 +0200 - > @@ -0,0 +1,1 @@ - > +b - > EOF - adding bar to series file - - $ hg qpush - applying bar - now at: bar - - $ cat .hg/patches/status - 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar - - $ cat .hg/patches/series - foo - bar - - $ cat ../a/.hg/patches/status - 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo - - $ cat ../a/.hg/patches/series - foo - -Test tags hardlinking: - - $ hg qdel -r qbase:qtip - patch foo finalized without changeset message - patch bar finalized without changeset message - - $ hg tag -l lfoo - $ hg tag foo - - $ cd .. - $ python cp.py b c - $ cd c - - $ hg tag -l -r 0 lbar - $ hg tag -r 0 bar - - $ cat .hgtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo - 430ed4828a74fa4047bc816a25500f7472ab4bfe bar - - $ cat .hg/localtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo - 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar - - $ cat ../b/.hgtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo - - $ cat ../b/.hg/localtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo -
--- a/tests/test-hardlinks.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-hardlinks.t Tue Feb 01 17:52:25 2011 -0600 @@ -10,6 +10,19 @@ > find $1 -type f | python $TESTTMP/nlinks.py > } +Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux): + + $ cat > linkcp.py <<EOF + > from mercurial import util + > import sys + > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) + > EOF + + $ linkcp() + > { + > python $TESTTMP/linkcp.py $1 $2 + > } + Prepare repo r1: $ mkdir r1 @@ -152,3 +165,167 @@ 1 r2/.hg/store/data/f1.i 1 r2/.hg/store/fncache + + $ cd r3 + $ hg tip --template '{rev}:{node|short}\n' + 11:a6451b6bc41f + $ echo bla > f1 + $ hg ci -m1 + $ cd .. + +Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'): + + $ linkcp r3 r4 + +r4 has hardlinks in the working dir (not just inside .hg): + + $ nlinksdir r4 + 2 r4/.hg/00changelog.i + 2 r4/.hg/branch + 2 r4/.hg/cache/branchheads + 2 r4/.hg/cache/tags + 2 r4/.hg/dirstate + 2 r4/.hg/hgrc + 2 r4/.hg/last-message.txt + 2 r4/.hg/requires + 2 r4/.hg/store/00changelog.i + 2 r4/.hg/store/00manifest.i + 2 r4/.hg/store/data/d1/f2.d + 2 r4/.hg/store/data/d1/f2.i + 2 r4/.hg/store/data/f1.i + 2 r4/.hg/store/fncache + 2 r4/.hg/store/undo + 2 r4/.hg/undo.branch + 2 r4/.hg/undo.desc + 2 r4/.hg/undo.dirstate + 2 r4/d1/data1 + 2 r4/d1/f2 + 2 r4/f1 + +Update back to revision 11 in r4 should break hardlink of file f1: + + $ hg -R r4 up 11 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ nlinksdir r4 + 2 r4/.hg/00changelog.i + 1 r4/.hg/branch + 2 r4/.hg/cache/branchheads + 2 r4/.hg/cache/tags + 1 r4/.hg/dirstate + 2 r4/.hg/hgrc + 2 r4/.hg/last-message.txt + 2 r4/.hg/requires + 2 r4/.hg/store/00changelog.i + 2 r4/.hg/store/00manifest.i + 2 r4/.hg/store/data/d1/f2.d + 2 r4/.hg/store/data/d1/f2.i + 2 r4/.hg/store/data/f1.i + 2 r4/.hg/store/fncache + 2 r4/.hg/store/undo + 2 r4/.hg/undo.branch + 2 r4/.hg/undo.desc + 2 r4/.hg/undo.dirstate + 2 r4/d1/data1 + 2 r4/d1/f2 + 1 r4/f1 + + +Test hardlinking outside hg: + + $ mkdir x + $ echo foo > x/a + + $ linkcp x y + $ echo bar >> y/a + +No diff if hardlink: + + $ diff x/a y/a + +Test mq hardlinking: + + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + + $ hg init a + $ cd a + + $ hg qimport -n foo - << EOF + > # HG changeset patch + > # Date 1 0 + > diff -r 2588a8b53d66 a + > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + > +++ b/a Wed Jul 23 15:54:29 2008 +0200 + > @@ -0,0 +1,1 @@ + > +a + > EOF + adding foo to series file + + $ hg qpush + applying foo + now at: foo + + $ cd .. + $ linkcp a b + $ cd b + + $ hg qimport -n bar - << EOF + > # HG changeset patch + > # Date 2 0 + > diff -r 2588a8b53d66 a + > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + > +++ b/b Wed Jul 23 15:54:29 2008 +0200 + > @@ -0,0 +1,1 @@ + > +b + > EOF + adding bar to series file + + $ hg qpush + applying bar + now at: bar + + $ cat .hg/patches/status + 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar + + $ cat .hg/patches/series + foo + bar + + $ cat ../a/.hg/patches/status + 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo + + $ cat ../a/.hg/patches/series + foo + +Test tags hardlinking: + + $ hg qdel -r qbase:qtip + patch foo finalized without changeset message + patch bar finalized without changeset message + + $ hg tag -l lfoo + $ hg tag foo + + $ cd .. + $ linkcp b c + $ cd c + + $ hg tag -l -r 0 lbar + $ hg tag -r 0 bar + + $ cat .hgtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo + 430ed4828a74fa4047bc816a25500f7472ab4bfe bar + + $ cat .hg/localtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo + 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar + + $ cat ../b/.hgtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo + + $ cat ../b/.hg/localtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo +
--- a/tests/test-hook.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-hook.t Tue Feb 01 17:52:25 2011 -0600 @@ -68,13 +68,13 @@ test generic hooks $ hg id - pre-identify hook: HG_ARGS=id HG_OPTS={'tags': None, 'rev': '', 'num': None, 'branch': None, 'id': None} HG_PATS=[] + pre-identify hook: HG_ARGS=id HG_OPTS={'branch': None, 'id': None, 'num': None, 'rev': '', 'tags': None} HG_PATS=[] warning: pre-identify hook exited with status 1 [1] $ hg cat b - pre-cat hook: HG_ARGS=cat b HG_OPTS={'rev': '', 'decode': None, 'exclude': [], 'output': '', 'include': []} HG_PATS=['b'] - post-cat hook: HG_ARGS=cat b HG_OPTS={'rev': '', 'decode': None, 'exclude': [], 'output': '', 'include': []} HG_PATS=['b'] HG_RESULT=0 + pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] b + post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0 $ cd ../b $ hg pull ../a
--- a/tests/test-import.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-import.t Tue Feb 01 17:52:25 2011 -0600 @@ -437,6 +437,13 @@ $ hg revert -a reverting a + +import with --no-commit should have written .hg/last-message.txt + + $ cat .hg/last-message.txt + change (no-eol) + + test fuzziness with eol=auto $ hg --config patch.eol=auto import --no-commit -v tip.patch
--- a/tests/test-inherit-mode.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-inherit-mode.t Tue Feb 01 17:52:25 2011 -0600 @@ -105,7 +105,8 @@ $ python ../printmodes.py ../push 00770 ../push/.hg/ 00660 ../push/.hg/00changelog.i - 00660 ../push/.hg/branchheads.cache + 00770 ../push/.hg/cache/ + 00660 ../push/.hg/cache/branchheads 00660 ../push/.hg/requires 00770 ../push/.hg/store/ 00660 ../push/.hg/store/00changelog.i
--- a/tests/test-issue619.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-issue619.t Tue Feb 01 17:52:25 2011 -0600 @@ -19,7 +19,12 @@ $ hg merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) + $ hg branch + default + $ hg parent --template '{rev}:{node|short} {branches}: {desc}\n' + 1:06c2121185be b: b $ hg ci -Ammerge + created new head Bogus fast-forward should fail:
--- a/tests/test-keyword.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-keyword.t Tue Feb 01 17:52:25 2011 -0600 @@ -17,6 +17,8 @@ keyword = [keyword] demo.txt = + [keywordset] + svn = False [keywordmaps] Author = {author|user} Date = {date|utcdate} @@ -40,6 +42,8 @@ keyword = [keyword] demo.txt = + [keywordset] + svn = False [keywordmaps] Branch = {branches} $Branch: demobranch $ @@ -633,6 +637,8 @@ b = ignore demo.txt = i = ignore + [keywordset] + svn = False [keywordmaps] Xinfo = {author}: {desc} $Xinfo: test: hg keyword configuration and expansion example $
--- a/tests/test-minirst.py Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-minirst.py Tue Feb 01 17:52:25 2011 -0600 @@ -120,16 +120,19 @@ There is support for simple option lists, but only with long options: ---all Output all. ---both Output both (this description is - quite long). ---long Output all day long. +-X, --exclude filter an option with a short and long option with an argument +-I, --include an option with both a short option and a long option +--all Output all. +--both Output both (this description is + quite long). +--long Output all day long. ---par This option has two paragraphs in its description. - This is the first. +--par This option has two paragraphs in its description. + This is the first. - This is the second. Blank lines may be omitted between - options (as above) or left in (as here). + This is the second. Blank lines may be omitted between + options (as above) or left in (as here). + The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph: @@ -221,6 +224,10 @@ .. An indented comment Some indented text. + +.. + +Empty comment above """ debugformat('comments', comments, 30)
--- a/tests/test-minirst.py.out Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-minirst.py.out Tue Feb 01 17:52:25 2011 -0600 @@ -180,14 +180,20 @@ There is support for simple option lists, but only with long options: ---all Output all. ---both Output both (this description is quite long). ---long Output all day long. ---par This option has two paragraphs in its - description. This is the first. + -X --exclude filter an option with a short and long option + with an argument + -I --include an option with both a short option and + a long option + --all Output all. + --both Output both (this description is quite + long). + --long Output all day long. + --par This option has two paragraphs in its + description. This is the first. - This is the second. Blank lines may be omitted - between options (as above) or left in (as here). + This is the second. Blank lines may + be omitted between options (as above) + or left in (as here). The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal @@ -202,23 +208,62 @@ option lists, but only with long options: ---all Output all. ---both Output both (this - description is - quite long). ---long Output all day - long. ---par This option has two - paragraphs in its - description. This - is the first. + -X --exclude filter an + option + with a + short + and + long + option + with an + argumen + t + -I --include an + option + with + both a + short + option + and a + long + option + --all Output + all. + --both Output + both + (this d + escript + ion is + quite + long). + --long Output + all day + long. + --par This + option + has two + paragra + phs in + its des + criptio + n. This + is the + first. - This is the second. - Blank lines may be - omitted between - options (as above) - or left in (as - here). + This is + the + second. + Blank + lines + may be + omitted + between + options + (as + above) + or left + in (as + here). The next paragraph looks like an option list, but lacks the @@ -339,5 +384,7 @@ Some text. Some indented text. + +Empty comment above ----------------------------------------------------------------------
--- a/tests/test-mq-caches.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-mq-caches.t Tue Feb 01 17:52:25 2011 -0600 @@ -1,4 +1,4 @@ - $ branches=.hg/branchheads.cache + $ branches=.hg/cache/branchheads $ echo '[extensions]' >> $HGRCPATH $ echo 'mq =' >> $HGRCPATH
--- a/tests/test-mq-qnew.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-mq-qnew.t Tue Feb 01 17:52:25 2011 -0600 @@ -107,7 +107,7 @@ abort: "foo#bar" cannot be used as the name of a patch abort: "foo:bar" cannot be used as the name of a patch % qnew with name containing slash - abort: cannot write patch "foo/": (Is a|No such file or) directory (re) + abort: path ends in directory separator: foo/ abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch @@ -172,7 +172,7 @@ abort: "foo#bar" cannot be used as the name of a patch abort: "foo:bar" cannot be used as the name of a patch % qnew with name containing slash - abort: cannot write patch "foo/": (Is a|No such file or) directory (re) + abort: path ends in directory separator: foo/ abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-qpush-exact.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,290 @@ + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "graphlog=" >> $HGRCPATH + +make a test repository that looks like this: + +o 2:28bc7b1afd6a +| +| @ 1:d7fe2034f71b +|/ +o 0/62ecad8b70e5 + + $ hg init r0 + $ cd r0 + $ touch f0 + $ hg ci -m0 -Aq + $ touch f1 + $ hg ci -m1 -Aq + + $ hg update 0 -q + $ touch f2 + $ hg ci -m2 -Aq + $ hg update 1 -q + +make some patches with a parent: 1:d7fe2034f71b -> p0 -> p1 + + $ echo cp0 >> fp0 + $ hg add fp0 + $ hg qnew p0 -d "0 0" + + $ echo cp1 >> fp1 + $ hg add fp1 + $ hg qnew p1 -d "0 0" + + $ hg qpop -aq + patch queue now empty + +qpush --exact when at the parent + + $ hg update 1 -q + $ hg qpush -e + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -e p0 + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -e p1 + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -ea + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact when at another rev + + $ hg update 0 -q + $ hg qpush -e + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -e p0 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -e p1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -ea + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact while crossing branches + + $ hg update 2 -q + $ hg qpush -e + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -e p0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -e p1 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -ea + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact --force with changes to an unpatched file + + $ hg update 1 -q + $ echo c0 >> f0 + $ hg qpush -e + abort: local changes found, refresh first + [255] + $ hg qpush -ef + applying p0 + now at: p0 + $ cat f0 + c0 + $ rm f0 + $ touch f0 + $ hg qpop -aq + patch queue now empty + + $ hg update 1 -q + $ echo c0 >> f0 + $ hg qpush -e p1 + abort: local changes found, refresh first + [255] + $ hg qpush -e p1 -f + applying p0 + applying p1 + now at: p1 + $ cat f0 + c0 + $ rm f0 + $ touch f0 + $ hg qpop -aq + patch queue now empty + +qpush --exact --force with changes to a patched file + + $ hg update 1 -q + $ echo cp0-bad >> fp0 + $ hg add fp0 + $ hg qpush -e + abort: local changes found, refresh first + [255] + $ hg qpush -ef + applying p0 + file fp0 already exists + 1 out of 1 hunks FAILED -- saving rejects to file fp0.rej + patch failed, unable to continue (try -v) + patch failed, rejects left in working dir + errors during apply, please fix and refresh p0 + [2] + $ cat fp0 + cp0-bad + $ cat fp0.rej + --- fp0 + +++ fp0 + @@ -0,0 +1,1 @@ + +cp0 + $ hg qpop -aqf + patch queue now empty + $ rm fp0 + $ rm fp0.rej + + $ hg update 1 -q + $ echo cp1-bad >> fp1 + $ hg add fp1 + $ hg qpush -e p1 + abort: local changes found, refresh first + [255] + $ hg qpush -e p1 -f + applying p0 + applying p1 + file fp1 already exists + 1 out of 1 hunks FAILED -- saving rejects to file fp1.rej + patch failed, unable to continue (try -v) + patch failed, rejects left in working dir + errors during apply, please fix and refresh p1 + [2] + $ cat fp1 + cp1-bad + $ cat fp1.rej + --- fp1 + +++ fp1 + @@ -0,0 +1,1 @@ + +cp1 + $ hg qpop -aqf + patch queue now empty + $ rm fp1 + $ rm fp1.rej + +qpush --exact when already at a patch + + $ hg update 1 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg qpush -e p0 + applying p0 + now at: p0 + $ hg qpush -e p1 + abort: cannot push --exact with applied patches + [255] + $ hg qpop -aq + patch queue now empty + +qpush --exact --move should fail + + $ hg qpush -e --move p1 + abort: cannot use --exact and --move together + [255] + +qpush --exact a patch without a parent recorded + + $ hg qpush -q + now at: p0 + $ grep -v '# Parent' .hg/patches/p0 > p0.new + $ mv p0.new .hg/patches/p0 + $ hg qpop -aq + patch queue now empty + $ hg qpush -e + abort: p0 does not have a parent recorded + [255] + $ hg qpush -e p0 + abort: p0 does not have a parent recorded + [255] + $ hg qpush -e p1 + abort: p0 does not have a parent recorded + [255] + $ hg qpush -ea + abort: p0 does not have a parent recorded + [255] +
--- a/tests/test-mq-qrefresh.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-mq-qrefresh.t Tue Feb 01 17:52:25 2011 -0600 @@ -487,74 +487,3 @@ $ cd .. - -Issue2499: refuse to add .hgsub{,state} to a patch - - $ hg init repo-2499 - $ cd repo-2499 - $ hg qinit - $ hg qnew -m 0 0.diff - $ echo a > a - $ hg init sub - $ cd sub - $ echo b > b - $ hg ci -Am 0sub - adding b - $ cd .. - -test when adding - $ echo sub = sub > .hgsub - $ echo `hg id -i --debug sub` sub > .hgsubstate - $ hg add - adding .hgsub - adding .hgsubstate - adding a - $ hg qrefresh - warning: not adding .hgsub - warning: not adding .hgsubstate - $ hg qfinish -a - $ hg status - A .hgsub - A .hgsubstate - $ hg forget .hgsubstate - $ rm .hgsubstate - -add subrepo with a real commit - $ hg ci -m 1 - committing subrepository sub - $ hg qnew -m 2 2.diff - -test when modifying - $ echo sub2 = sub2 >> .hgsub - $ hg qrefresh - warning: not refreshing .hgsub - $ echo 0000000000000000000000000000000000000000 sub2 >> .hgsubstate - $ hg qrefresh - warning: not refreshing .hgsub - warning: not refreshing .hgsubstate - $ hg revert --no-backup .hgsub .hgsubstate - -test when removing - $ hg rm .hgsub - $ hg rm .hgsubstate - $ hg qrefresh - warning: not removing .hgsub - warning: not removing .hgsubstate - $ hg status - R .hgsub - R .hgsubstate - $ hg revert --no-backup .hgsub .hgsubstate - -test when deleting - $ rm .hgsub .hgsubstate - $ hg qrefresh - warning: not removing .hgsub - warning: not removing .hgsubstate - warning: subrepo spec file .hgsub not found - $ hg status - ! .hgsub - ! .hgsubstate - $ hg cat -r1 .hgsub > .hgsub - $ hg revert --no-backup .hgsubstate - - $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-subrepo-svn.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,52 @@ + $ "$TESTDIR/hghave" svn || exit 80 + + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "[diff]" >> $HGRCPATH + $ echo "nodates=1" >> $HGRCPATH + +fn to create new repository, and cd into it + $ mkrepo() { + > hg init $1 + > cd $1 + > hg qinit + > } + + +handle svn subrepos safely + + $ svnadmin create svn-repo-2499 + $ curpath=`pwd | tr '\\\\' /` + $ expr "$svnpath" : "\/" > /dev/null + > if [ $? -ne 0 ]; then + > curpath="/$curpath" + > fi + $ svnurl="file://$curpath/svn-repo-2499/project" + $ mkdir -p svn-project-2499/trunk + $ svn import -m 'init project' svn-project-2499 "$svnurl" + Adding svn-project-2499/trunk + + Committed revision 1. + +qnew on repo w/svn subrepo + $ mkrepo repo-2499-svn-subrepo + $ svn co "$svnurl"/trunk sub + Checked out revision 1. + $ echo 'sub = [svn]sub' >> .hgsub + $ hg add .hgsub + $ hg status -S + A .hgsub + ? sub/.svn/entries + $ hg qnew -m0 0.diff + committing subrepository sub + $ cd sub + $ echo a > a + $ svn add a + A a + $ svn st + A a + $ cd .. + $ hg status -S # doesn't show status for svn subrepos (yet) + $ hg qnew -m1 1.diff + abort: uncommitted changes in subrepository sub + [255]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-subrepo.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,347 @@ + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "record=" >> $HGRCPATH + $ echo "[diff]" >> $HGRCPATH + $ echo "nodates=1" >> $HGRCPATH + + $ stdin=`pwd`/stdin.tmp + +fn to create new repository w/dirty subrepo, and cd into it + $ mkrepo() { + > hg init $1 + > cd $1 + > hg qinit + > } + +fn to create dirty subrepo + $ mksubrepo() { + > hg init $1 + > cd $1 + > echo a > a + > hg add + > cd .. + > } + + $ testadd() { + > cat - > "$stdin" + > mksubrepo sub + > echo sub = sub >> .hgsub + > hg add .hgsub + > echo % abort when adding .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo [$?] + > hg -R sub ci -m0sub + > echo % update substate when adding .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > hg debugsub + > } + + $ testmod() { + > cat - > "$stdin" + > mksubrepo sub2 + > echo sub2 = sub2 >> .hgsub + > echo % abort when modifying .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo [$?] + > hg -R sub2 ci -m0sub2 + > echo % update substate when modifying .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > hg debugsub + > } + + $ testrm1() { + > cat - > "$stdin" + > mksubrepo sub3 + > echo sub3 = sub3 >> .hgsub + > hg ci -Aqmsub3 + > $EXTRA + > echo b >> sub3/a + > hg rm .hgsub + > echo % update substate when removing .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo % debugsub should be empty + > hg debugsub + > } + + $ testrm2() { + > cat - > "$stdin" + > mksubrepo sub4 + > echo sub4 = sub4 >> .hgsub + > hg ci -Aqmsub4 + > $EXTRA + > hg rm .hgsub + > echo % update substate when removing .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo % debugsub should be empty + > hg debugsub + > } + + +handle subrepos safely on qnew + + $ mkrepo repo-2499-qnew + $ testadd qnew -m0 0.diff + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qnew -m0 0.diff + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qnew -m0 0.diff + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ testmod qnew -m1 1.diff + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qnew -m1 1.diff + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qnew -m1 1.diff + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ testrm1 qnew -m2 2.diff + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qnew -m2 2.diff + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ testrm2 qnew -m3 3.diff + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qnew -m3 3.diff + % debugsub should be empty + + $ cd .. + + +handle subrepos safely on qrefresh + + $ mkrepo repo-2499-qrefresh + $ hg qnew -m0 0.diff + $ testadd qrefresh + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qrefresh + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qrefresh + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ hg qnew -m1 1.diff + $ testmod qrefresh + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qrefresh + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qrefresh + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ EXTRA='hg qnew -m2 2.diff' testrm1 qrefresh + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qrefresh + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ EXTRA='hg qnew -m3 3.diff' testrm2 qrefresh + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qrefresh + % debugsub should be empty + + $ cd .. + + +handle subrepos safely on qpush/qpop + + $ mkrepo repo-2499-qpush + $ mksubrepo sub + adding a + $ hg -R sub ci -m0sub + $ echo sub = sub > .hgsub + $ hg add .hgsub + $ hg qnew -m0 0.diff + committing subrepository sub + $ hg debugsub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + +qpop + $ hg qpop + popping 0.diff + patch queue now empty + $ hg status -AS + $ hg debugsub + +qpush + $ hg qpush + applying 0.diff + now at: 0.diff + $ hg status -AS + C .hgsub + C .hgsubstate + C sub/a + $ hg debugsub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ cd .. + + +handle subrepos safely on qrecord + + $ mkrepo repo-2499-qrecord + $ testadd qrecord --config ui.interactive=1 -m0 0.diff <<EOF + > y + > y + > EOF + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qrecord --config ui.interactive=1 -m0 0.diff + diff --git a/.hgsub b/.hgsub + new file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qrecord --config ui.interactive=1 -m0 0.diff + diff --git a/.hgsub b/.hgsub + new file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ testmod qrecord --config ui.interactive=1 -m1 1.diff <<EOF + > y + > y + > EOF + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qrecord --config ui.interactive=1 -m1 1.diff + diff --git a/.hgsub b/.hgsub + 1 hunks, 1 lines changed + examine changes to '.hgsub'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + sub = sub + +sub2 = sub2 + record this change to '.hgsub'? [Ynsfdaq?] + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qrecord --config ui.interactive=1 -m1 1.diff + diff --git a/.hgsub b/.hgsub + 1 hunks, 1 lines changed + examine changes to '.hgsub'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + sub = sub + +sub2 = sub2 + record this change to '.hgsub'? [Ynsfdaq?] + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ EXTRA= testrm1 qrecord --config ui.interactive=1 -m2 2.diff <<EOF + > y + > y + > EOF + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qrecord --config ui.interactive=1 -m2 2.diff + diff --git a/.hgsub b/.hgsub + deleted file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ EXTRA= testrm2 qrecord --config ui.interactive=1 -m3 3.diff <<EOF + > y + > y + > EOF + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qrecord --config ui.interactive=1 -m3 3.diff + diff --git a/.hgsub b/.hgsub + deleted file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + % debugsub should be empty + + $ cd ..
--- a/tests/test-mq.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-mq.t Tue Feb 01 17:52:25 2011 -0600 @@ -284,12 +284,12 @@ qpush with dump of tag cache Dump the tag cache to ensure that it has exactly one head after qpush. - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg tags > /dev/null -.hg/tags.cache (pre qpush): +.hg/cache/tags (pre qpush): - $ cat .hg/tags.cache + $ cat .hg/cache/tags 1 [\da-f]{40} (re) $ hg qpush @@ -297,9 +297,9 @@ now at: test.patch $ hg tags > /dev/null -.hg/tags.cache (post qpush): +.hg/cache/tags (post qpush): - $ cat .hg/tags.cache + $ cat .hg/cache/tags 2 [\da-f]{40} (re) $ checkundo qpush
--- a/tests/test-newbranch.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-newbranch.t Tue Feb 01 17:52:25 2011 -0600 @@ -1,4 +1,4 @@ - $ branchcache=.hg/branchheads.cache + $ branchcache=.hg/cache/branchheads $ hg init t $ cd t @@ -208,12 +208,11 @@ $ hg branch foo $ hg commit -m'Merge ff into foo' + created new head $ hg parents - changeset: 6:917eb54e1b4b + changeset: 6:6af8030670c9 branch: foo tag: tip - parent: 4:98d14f698afe - parent: 5:6683a60370cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Merge ff into foo
--- a/tests/test-newcgi.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-newcgi.t Tue Feb 01 17:52:25 2011 -0600 @@ -49,36 +49,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2
--- a/tests/test-newercgi.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-newercgi.t Tue Feb 01 17:52:25 2011 -0600 @@ -43,36 +43,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2 @@ -81,7 +52,6 @@ $ REQUEST_URI="/test/test/" $ SCRIPT_URI="http://hg.omnifarious.org/test/test/" $ SCRIPT_URL="/test/test/" - $ python hgwebdir.cgi > page3 $ grep -i error page1 page2 page3
--- a/tests/test-no-symlinks Tue Feb 01 17:30:13 2011 -0600 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -#!/bin/sh - -"$TESTDIR/hghave" no-symlink || exit 80 - -# The following script was used to create the bundle: -# -# hg init symlinks -# cd symlinks -# echo a > a -# mkdir d -# echo b > d/b -# ln -s a a.lnk -# ln -s d/b d/b.lnk -# hg ci -Am t -# hg bundle --base null ../test-no-symlinks.hg - -# Extract a symlink on a platform not supporting them -echo % unbundle -hg init t -cd t -hg pull -q "$TESTDIR/test-no-symlinks.hg" -hg update - -cat a.lnk && echo -cat d/b.lnk && echo - -# Copy a symlink and move another -echo % move and copy -hg copy a.lnk d/a2.lnk -hg mv d/b.lnk b2.lnk -hg ci -Am copy -cat d/a2.lnk && echo -cat b2.lnk && echo - -# Bundle and extract again -echo % bundle -hg bundle --base null ../symlinks.hg -cd .. - -hg init t2 -cd t2 -hg pull ../symlinks.hg -hg update - -cat a.lnk && echo -cat d/a2.lnk && echo -cat b2.lnk && echo
--- a/tests/test-no-symlinks.out Tue Feb 01 17:30:13 2011 -0600 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -% unbundle -4 files updated, 0 files merged, 0 files removed, 0 files unresolved -a -d/b -% move and copy -a -d/b -% bundle -2 changesets found -pulling from ../symlinks.hg -requesting all changes -adding changesets -adding manifests -adding file changes -added 2 changesets with 6 changes to 6 files -(run 'hg update' to get a working copy) -5 files updated, 0 files merged, 0 files removed, 0 files unresolved -a -a -d/b
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-no-symlinks.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,59 @@ + $ "$TESTDIR/hghave" no-symlink || exit 80 + +# The following script was used to create the bundle: +# +# hg init symlinks +# cd symlinks +# echo a > a +# mkdir d +# echo b > d/b +# ln -s a a.lnk +# ln -s d/b d/b.lnk +# hg ci -Am t +# hg bundle --base null ../test-no-symlinks.hg + +Extract a symlink on a platform not supporting them + + $ hg init t + $ cd t + $ hg pull -q "$TESTDIR/test-no-symlinks.hg" + $ hg update + 4 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cat a.lnk && echo + a + $ cat d/b.lnk && echo + d/b + +Copy a symlink and move another + + $ hg copy a.lnk d/a2.lnk + $ hg mv d/b.lnk b2.lnk + $ hg ci -Am copy + $ cat d/a2.lnk && echo + a + $ cat b2.lnk && echo + d/b + +Bundle and extract again + + $ hg bundle --base null ../symlinks.hg + 2 changesets found + $ cd .. + $ hg init t2 + $ cd t2 + $ hg pull ../symlinks.hg + pulling from ../symlinks.hg + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 6 changes to 6 files + (run 'hg update' to get a working copy) + $ hg update + 5 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cat a.lnk && echo + a + $ cat d/a2.lnk && echo + a + $ cat b2.lnk && echo + d/b
--- a/tests/test-oldcgi.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-oldcgi.t Tue Feb 01 17:52:25 2011 -0600 @@ -59,36 +59,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2
--- a/tests/test-parentrevspec.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-parentrevspec.t Tue Feb 01 17:52:25 2011 -0600 @@ -69,12 +69,12 @@ 6^^^^^: 0 6^^^^^^: -1 6^1: 5 - 6^2: abort: unknown revision '6^2'! + 6^2: hg: parse error at 1: syntax error 6^^2: 4 6^1^2: 4 - 6^^3: abort: unknown revision '6^^3'! + 6^^3: hg: parse error at 1: syntax error $ lookup "6~" "6~1" "6~2" "6~3" "6~4" "6~5" "6~42" "6~1^2" "6~1^2~2" - 6~: abort: unknown revision '6~'! + 6~: hg: parse error at 1: syntax error 6~1: 5 6~2: 3 6~3: 2 @@ -102,4 +102,4 @@ $ hg tag -l -r 2 "foo^bar" $ lookup "foo^bar" "foo^bar^" foo^bar: 2 - foo^bar^: abort: unknown revision 'foo^bar^'! + foo^bar^: hg: parse error at 3: syntax error
--- a/tests/test-parseindex2.py Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-parseindex2.py Tue Feb 01 17:52:25 2011 -0600 @@ -21,7 +21,7 @@ index = [] nodemap = {nullid: nullrev} n = off = 0 - # if we're not using lazymap, always read the whole index + l = len(data) - s append = index.append if inline: @@ -50,7 +50,7 @@ # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - return index, nodemap, cache + return index, cache data_inlined = '\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c' \ @@ -97,10 +97,10 @@ def runtest() : py_res_1 = py_parseindex(data_inlined, True) - c_res_1 = parsers.parse_index(data_inlined, True) + c_res_1 = parsers.parse_index2(data_inlined, True) py_res_2 = py_parseindex(data_non_inlined, False) - c_res_2 = parsers.parse_index(data_non_inlined, False) + c_res_2 = parsers.parse_index2(data_non_inlined, False) if py_res_1 != c_res_1: print "Parse index result (with inlined data) differs!"
--- a/tests/test-patchbomb.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-patchbomb.t Tue Feb 01 17:52:25 2011 -0600 @@ -145,6 +145,17 @@ +b +.hg/last-email.txt + + $ cat > editor << '__EOF__' + > #!/bin/sh + > echo "a precious introductory message" > "$1" + > __EOF__ + $ chmod +x editor + $ HGEDITOR="'`pwd`'"/editor hg email -n -t foo -s test -r 0:tip > /dev/null + $ cat .hg/last-email.txt + a precious introductory message + $ hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip \ > --config extensions.progress= --config progress.assume-tty=1 \ > --config progress.delay=0 --config progress.refresh=0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-pending.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,117 @@ +Verify that pending changesets are seen by pretxn* hooks but not by other +processes that access the destination repo while the hooks are running. + +The hooks (python and external) both reject changesets after some think time, +during which another process runs pull. Each hook creates a file ('notify') to +indicate to the controlling process that it is running; the process removes the +file to indicate the hook can terminate. + +init env vars + + $ d=`pwd` + $ maxwait=20 + +utility to run the test - start a push in the background and run pull + + $ dotest() { + > rm -f notify + > printf 'push '; hg -R child-push tip --template '{node}\n' + > hg -R child-push -q push > push.out 2>&1 & + > + > # wait for hook to create the notify file + > i=$maxwait + > while [ ! -f notify -a $i != 0 ]; do + > sleep 1 + > i=`expr $i - 1` + > done + > + > # run pull + > hg -R child-pull -q pull + > rc=$? + > + > # tell hook to finish; notify should exist. + > rm notify + > wait + > + > cat push.out + > printf 'pull '; hg -R child-pull tip --template '{node}\n' + > return $rc + > } + +python hook + + $ cat <<EOF > reject.py + > import os, time + > from mercurial import ui, localrepo + > def rejecthook(ui, repo, hooktype, node, **opts): + > ui.write('hook %s\\n' % repo['tip'].hex()) + > # create the notify file so caller knows we're running + > fpath = os.path.join('$d', 'notify') + > f = open(fpath, 'w') + > f.close() + > # wait for ack - caller should delete the notify file + > i = $maxwait + > while os.path.exists(fpath) and i > 0: + > time.sleep(1) + > i -= 1 + > return True # reject the changesets + > EOF + +external hook + + $ cat <<EOF > reject.sh + > #! /bin/sh + > printf 'hook '; hg tip --template '{node}\\n' + > # create the notify file so caller knows we're running + > fpath=$d/notify + > touch \$fpath + > # wait for ack - caller should delete the notify file + > i=$maxwait + > while [ -f \$fpath -a \$i != 0 ]; do + > sleep 1 + > i=\`expr \$i - 1\` + > done + > exit 1 # reject the changesets + > EOF + $ chmod +x reject.sh + +create repos + + $ hg init parent + $ hg clone -q parent child-push + $ hg clone -q parent child-pull + $ echo a > child-push/a + $ hg -R child-push add child-push/a + $ hg -R child-push commit -m a -d '1000000 0' + +test python hook + + $ cat <<EOF > parent/.hg/hgrc + > [extensions] + > reject = $d/reject.py + > [hooks] + > pretxnchangegroup = python:reject.rejecthook + > EOF + + $ dotest + push 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + hook 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + transaction abort! + rollback completed + abort: pretxnchangegroup hook failed + pull 0000000000000000000000000000000000000000 + +test external hook + + $ cat <<EOF > parent/.hg/hgrc + > [hooks] + > pretxnchangegroup = $d/reject.sh + > EOF + + $ dotest + push 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + hook 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + transaction abort! + rollback completed + abort: pretxnchangegroup hook exited with status 1 + pull 0000000000000000000000000000000000000000
--- a/tests/test-progress.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-progress.t Tue Feb 01 17:52:25 2011 -0600 @@ -23,78 +23,144 @@ > } > EOF - $ cat > filtercr.py <<EOF - > import sys, re - > for line in sys.stdin: - > line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) - > sys.stdout.write(line) - > print - > EOF - $ echo "[extensions]" >> $HGRCPATH $ echo "progress=" >> $HGRCPATH $ echo "loop=`pwd`/loop.py" >> $HGRCPATH $ echo "[progress]" >> $HGRCPATH + $ echo "format = topic bar number" >> $HGRCPATH $ echo "assume-tty=1" >> $HGRCPATH + $ echo "width=60" >> $HGRCPATH test default params, display nothing because of delay - $ hg -y loop 3 2>&1 | python filtercr.py + $ hg -y loop 3 2>&1 | $TESTDIR/filtercr.py $ echo "delay=0" >> $HGRCPATH $ echo "refresh=0" >> $HGRCPATH test with delay=0, refresh=0 - $ hg -y loop 3 2>&1 | python filtercr.py + $ hg -y loop 3 2>&1 | $TESTDIR/filtercr.py - loop [ ] 0/3 - loop [=====================> ] 1/3 - loop [============================================> ] 2/3 - \r (esc) + loop [ ] 0/3 + loop [===============> ] 1/3 + loop [===============================> ] 2/3 + \r (esc) test refresh is taken in account - $ hg -y --config progress.refresh=100 loop 3 2>&1 | python filtercr.py + $ hg -y --config progress.refresh=100 loop 3 2>&1 | $TESTDIR/filtercr.py test format options 1 - $ hg -y --config 'progress.format=number topic item+2' loop 2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number topic item+2' loop 2 2>&1 \ + > | $TESTDIR/filtercr.py 0/2 loop lo 1/2 loop lo - \r (esc) + \r (esc) test format options 2 - $ hg -y --config 'progress.format=number item-3 bar' loop 2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number item-3 bar' loop 2 2>&1 \ + > | $TESTDIR/filtercr.py - 0/2 p.0 [ ] - 1/2 p.1 [=================================> ] - \r (esc) + 0/2 p.0 [ ] + 1/2 p.1 [=======================> ] + \r (esc) test format options and indeterminate progress - $ hg -y --config 'progress.format=number item bar' loop -- -2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number item bar' loop -- -2 2>&1 \ + > | $TESTDIR/filtercr.py - 0 loop.0 [ <=> ] - 1 loop.1 [ <=> ] - \r (esc) + 0 loop.0 [ <=> ] + 1 loop.1 [ <=> ] + \r (esc) make sure things don't fall over if count > total - $ hg -y loop --total 4 6 2>&1 | python filtercr.py + $ hg -y loop --total 4 6 2>&1 | $TESTDIR/filtercr.py - loop [ ] 0/4 - loop [================> ] 1/4 - loop [=================================> ] 2/4 - loop [==================================================> ] 3/4 - loop [===================================================================>] 4/4 - loop [ <=> ] 5/4 - \r (esc) + loop [ ] 0/4 + loop [===========> ] 1/4 + loop [=======================> ] 2/4 + loop [===================================> ] 3/4 + loop [===============================================>] 4/4 + loop [ <=> ] 5/4 + \r (esc) test immediate progress completion - $ hg -y loop 0 2>&1 | python filtercr.py + $ hg -y loop 0 2>&1 | $TESTDIR/filtercr.py + + +test delay time estimates + + $ cat > mocktime.py <<EOF + > import os + > import time + > + > class mocktime(object): + > def __init__(self, increment): + > self.time = 0 + > self.increment = increment + > def __call__(self): + > self.time += self.increment + > return self.time + > + > def uisetup(ui): + > time.time = mocktime(int(os.environ.get('MOCKTIME', '11'))) + > EOF + + $ echo "[extensions]" > $HGRCPATH + $ echo "mocktime=`pwd`/mocktime.py" >> $HGRCPATH + $ echo "progress=" >> $HGRCPATH + $ echo "loop=`pwd`/loop.py" >> $HGRCPATH + $ echo "[progress]" >> $HGRCPATH + $ echo "assume-tty=1" >> $HGRCPATH + $ echo "delay=25" >> $HGRCPATH + $ echo "width=60" >> $HGRCPATH + + $ hg -y loop 8 2>&1 | python $TESTDIR/filtercr.py + loop [=========> ] 2/8 1m07s + loop [===============> ] 3/8 56s + loop [=====================> ] 4/8 45s + loop [==========================> ] 5/8 34s + loop [================================> ] 6/8 23s + loop [=====================================> ] 7/8 12s + \r (esc) + + $ MOCKTIME=10000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 8h21m + loop [====================> ] 2/4 5h34m + loop [==============================> ] 3/4 2h47m + \r (esc) + + $ MOCKTIME=1000000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 5w00d + loop [====================> ] 2/4 3w03d + loop [=============================> ] 3/4 11d14h + \r (esc) + + + $ MOCKTIME=14000000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 1y18w + loop [===================> ] 2/4 46w03d + loop [=============================> ] 3/4 23w02d + \r (esc) + +Time estimates should not fail when there's no end point: + $ hg -y loop -- -4 2>&1 | python $TESTDIR/filtercr.py + + loop [ <=> ] 2 + loop [ <=> ] 3 + \r (esc)
--- a/tests/test-push-warn.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-push-warn.t Tue Feb 01 17:52:25 2011 -0600 @@ -30,6 +30,23 @@ (you should pull and merge or use push -f to force) [255] + $ hg push --debug ../a + pushing to ../a + searching for changes + examining 1c9246a22a0a:d8d565842d04 + found incomplete branch 1c9246a22a0a:d8d565842d04 + searching: 1 queries + narrowing 1:1 d8d565842d04 + found new branch changeset 1c9246a22a0a + found new changesets starting at 1c9246a22a0a + 1 total queries + common changesets up to d8d565842d04 + new remote heads on branch 'default' + new remote head 1e108cc5548c + abort: push creates new remote heads on branch 'default'! + (you should pull and merge or use push -f to force) + [255] + $ hg pull ../a pulling from ../a searching for changes @@ -396,6 +413,7 @@ (branch merge, don't forget to commit) $ hg -R k ci -m merge + created new head $ hg -R k push -r a j pushing to j
--- a/tests/test-qrecord.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-qrecord.t Tue Feb 01 17:52:25 2011 -0600 @@ -158,13 +158,13 @@ -2 +2 2 3 - record change 1/6 to '1.txt'? [Ynsfdaq?] + record change 1/4 to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@ 3 -4 +4 4 5 - record change 2/6 to '1.txt'? [Ynsfdaq?] + record change 2/4 to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt 1 hunks, 1 lines changed examine changes to '2.txt'? [Ynsfdaq?] @@ -175,7 +175,7 @@ c d e - record change 4/6 to '2.txt'? [Ynsfdaq?] + record change 3/4 to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -255,7 +255,7 @@ -4 +4 4 5 - record change 1/3 to '1.txt'? [Ynsfdaq?] + record change 1/2 to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -265,7 +265,7 @@ someone up - record change 3/3 to 'dir/a.txt'? [Ynsfdaq?] + record change 2/2 to 'dir/a.txt'? [Ynsfdaq?] After qrecord b.patch 'tip'
--- a/tests/test-record.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-record.t Tue Feb 01 17:52:25 2011 -0600 @@ -285,7 +285,9 @@ Modify end of plain file, add EOL $ echo >> plain - $ hg record -d '10 0' -m eol plain <<EOF + $ echo 1 > plain2 + $ hg add plain2 + $ hg record -d '10 0' -m eol plain plain2 <<EOF > y > y > y @@ -300,16 +302,23 @@ -7264f99c5f5ff3261504828afa4fb4d406c3af54 \ No newline at end of file +7264f99c5f5ff3261504828afa4fb4d406c3af54 - record this change to 'plain'? [Ynsfdaq?] + record change 1/2 to 'plain'? [Ynsfdaq?] + diff --git a/plain2 b/plain2 + new file mode 100644 + examine changes to 'plain2'? [Ynsfdaq?] -Modify beginning, trim end, record both +Modify beginning, trim end, record both, add another file to test +changes numbering $ rm plain $ for i in 2 2 3 4 5 6 7 8 9 10; do > echo $i >> plain > done + $ echo 2 >> plain2 - $ hg record -d '10 0' -m begin-and-end plain <<EOF + $ hg record -d '10 0' -m begin-and-end plain plain2 <<EOF + > y + > y > y > y > y @@ -323,23 +332,30 @@ 2 3 4 - record change 1/2 to 'plain'? [Ynsfdaq?] + record change 1/3 to 'plain'? [Ynsfdaq?] @@ -8,5 +8,3 @@ 8 9 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 - record change 2/2 to 'plain'? [Ynsfdaq?] + record change 2/3 to 'plain'? [Ynsfdaq?] + diff --git a/plain2 b/plain2 + 1 hunks, 1 lines changed + examine changes to 'plain2'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + 1 + +2 + record change 3/3 to 'plain2'? [Ynsfdaq?] $ hg tip -p - changeset: 11:efca65c9b09e + changeset: 11:21df83db12b8 tag: tip user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: begin-and-end - diff -r cd07d48e8cbe -r efca65c9b09e plain + diff -r ddb8b281c3ff -r 21df83db12b8 plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:10 1970 +0000 @@ -1,4 +1,4 @@ @@ -354,6 +370,12 @@ 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 + diff -r ddb8b281c3ff -r 21df83db12b8 plain2 + --- a/plain2 Thu Jan 01 00:00:10 1970 +0000 + +++ b/plain2 Thu Jan 01 00:00:10 1970 +0000 + @@ -1,1 +1,2 @@ + 1 + +2 Trim beginning, modify end @@ -396,13 +418,13 @@ record change 2/2 to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 12:7d1e66983c15 + changeset: 12:99337501826f tag: tip user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: end-only - diff -r efca65c9b09e -r 7d1e66983c15 plain + diff -r 21df83db12b8 -r 99337501826f plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:11 1970 +0000 @@ -7,4 +7,4 @@ @@ -432,13 +454,13 @@ record this change to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 13:a09fc62a0e61 + changeset: 13:bbd45465d540 tag: tip user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: begin-only - diff -r 7d1e66983c15 -r a09fc62a0e61 plain + diff -r 99337501826f -r bbd45465d540 plain --- a/plain Thu Jan 01 00:00:11 1970 +0000 +++ b/plain Thu Jan 01 00:00:12 1970 +0000 @@ -1,6 +1,3 @@ @@ -533,13 +555,13 @@ record change 3/3 to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 15:7d137997f3a6 + changeset: 15:f34a7937ec33 tag: tip user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: middle-only - diff -r c0b8e5fb0be6 -r 7d137997f3a6 plain + diff -r 82c065d0b850 -r f34a7937ec33 plain --- a/plain Thu Jan 01 00:00:13 1970 +0000 +++ b/plain Thu Jan 01 00:00:14 1970 +0000 @@ -1,5 +1,10 @@ @@ -573,13 +595,13 @@ record this change to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 16:4959e3ff13eb + changeset: 16:f9900b71a04c tag: tip user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: end-only - diff -r 7d137997f3a6 -r 4959e3ff13eb plain + diff -r f34a7937ec33 -r f9900b71a04c plain --- a/plain Thu Jan 01 00:00:14 1970 +0000 +++ b/plain Thu Jan 01 00:00:15 1970 +0000 @@ -9,3 +9,5 @@ @@ -610,13 +632,13 @@ record this change to 'subdir/a'? [Ynsfdaq?] $ hg tip -p - changeset: 18:40698cd490b2 + changeset: 18:61be427a9deb tag: tip user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: subdir-change - diff -r 661eacdc08b9 -r 40698cd490b2 subdir/a + diff -r a7ffae4d61cb -r 61be427a9deb subdir/a --- a/subdir/a Thu Jan 01 00:00:16 1970 +0000 +++ b/subdir/a Thu Jan 01 00:00:16 1970 +0000 @@ -1,1 +1,2 @@ @@ -709,13 +731,13 @@ examine changes to 'subdir/f2'? [Ynsfdaq?] $ hg tip -p - changeset: 20:d2d8c25276a8 + changeset: 20:b3df3dda369a tag: tip user: test date: Thu Jan 01 00:00:18 1970 +0000 summary: x - diff -r 25eb2a7694fb -r d2d8c25276a8 subdir/f2 + diff -r 6e02d6c9906d -r b3df3dda369a subdir/f2 --- a/subdir/f2 Thu Jan 01 00:00:17 1970 +0000 +++ b/subdir/f2 Thu Jan 01 00:00:18 1970 +0000 @@ -1,1 +1,2 @@ @@ -733,13 +755,13 @@ examine changes to 'subdir/f1'? [Ynsfdaq?] $ hg tip -p - changeset: 21:1013f51ce32f + changeset: 21:38ec577f126b tag: tip user: test date: Thu Jan 01 00:00:19 1970 +0000 summary: y - diff -r d2d8c25276a8 -r 1013f51ce32f subdir/f1 + diff -r b3df3dda369a -r 38ec577f126b subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:18 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:19 1970 +0000 @@ -1,1 +1,2 @@ @@ -768,7 +790,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 22:5df857735621 + changeset: 22:3261adceb075 tag: tip user: test date: Thu Jan 01 00:00:20 1970 +0000 @@ -804,7 +826,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 23:a4ae36a78715 + changeset: 23:b429867550db tag: tip user: test date: Thu Jan 01 00:00:21 1970 +0000 @@ -842,7 +864,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 24:1460f6e47966 + changeset: 24:0b082130c20a tag: tip user: test date: Thu Jan 01 00:00:22 1970 +0000 @@ -865,7 +887,7 @@ Abort early when a merge is in progress $ hg up 4 - 1 files updated, 0 files merged, 5 files removed, 0 files unresolved + 1 files updated, 0 files merged, 6 files removed, 0 files unresolved $ touch iwillmergethat $ hg add iwillmergethat @@ -876,14 +898,14 @@ $ hg ci -m'new head' $ hg up default - 5 files updated, 0 files merged, 2 files removed, 0 files unresolved + 6 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge thatbranch 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg record -m'will abort' - abort: cannot partially commit a merge (use hg commit instead) + abort: cannot partially commit a merge (use "hg commit" instead) [255] $ hg up -C @@ -921,14 +943,14 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip -p - changeset: 26:5bacc1f6e9cf + changeset: 26:b8306e70edc4 tag: tip - parent: 24:1460f6e47966 + parent: 24:0b082130c20a user: test date: Thu Jan 01 00:00:23 1970 +0000 summary: w1 - diff -r 1460f6e47966 -r 5bacc1f6e9cf subdir/f1 + diff -r 0b082130c20a -r b8306e70edc4 subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:22 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:23 1970 +0000 @@ -3,3 +3,4 @@
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-revset-dirstate-parents.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,52 @@ + $ HGENCODING=utf-8 + $ export HGENCODING + + $ try() { + > hg debugrevspec --debug $@ + > } + + $ log() { + > hg log --template '{rev}\n' -r "$1" + > } + + $ hg init repo + $ cd repo + + $ try 'p1()' + ('func', ('symbol', 'p1'), None) + $ try 'p2()' + ('func', ('symbol', 'p2'), None) + $ try 'parents()' + ('func', ('symbol', 'parents'), None) + +null revision + $ log 'p1()' + $ log 'p2()' + $ log 'parents()' + +working dir with a single parent + $ echo a > a + $ hg ci -Aqm0 + $ log 'p1()' + 0 + $ log 'tag() and p1()' + $ log 'p2()' + $ log 'parents()' + 0 + $ log 'tag() and parents()' + +merge in progress + $ echo b > b + $ hg ci -Aqm1 + $ hg up -q 0 + $ echo c > c + $ hg ci -Aqm2 + $ hg merge -q + $ log 'p1()' + 2 + $ log 'p2()' + 1 + $ log 'tag() and p2()' + $ log 'parents()' + 1 + 2
--- a/tests/test-rollback.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-rollback.t Tue Feb 01 17:52:25 2011 -0600 @@ -72,8 +72,9 @@ $ cat .hg/last-message.txt ; echo precious commit message - $ echo '% same thing, but run $EDITOR' - % same thing, but run $EDITOR + +same thing, but run $EDITOR + $ cat > editor << '__EOF__' > #!/bin/sh > echo "another precious commit message" > "$1" @@ -88,5 +89,3 @@ $ cat .hg/last-message.txt another precious commit message -.hg/last-message.txt: -
--- a/tests/test-ssh.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-ssh.t Tue Feb 01 17:52:25 2011 -0600 @@ -214,7 +214,7 @@ $ hg debugpushkey --config ui.ssh="python ../dummyssh" ssh://user@dummy/remote bookmarks foo 1160648e36cec0054048a7edc4110c6f84fde594 $ hg book -f foo - $ hg push + $ hg push --traceback pushing to ssh://user@dummy/remote searching for changes no changes found
--- a/tests/test-static-http.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-static-http.t Tue Feb 01 17:52:25 2011 -0600 @@ -10,7 +10,7 @@ one pull $ cat > dumb.py <<EOF - > import BaseHTTPServer, SimpleHTTPServer, os, signal + > import BaseHTTPServer, SimpleHTTPServer, os, signal, sys > > def run(server_class=BaseHTTPServer.HTTPServer, > handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler): @@ -18,7 +18,7 @@ > httpd = server_class(server_address, handler_class) > httpd.serve_forever() > - > signal.signal(signal.SIGTERM, lambda x: sys.exit(0)) + > signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0)) > run() > EOF $ python dumb.py 2>/dev/null & @@ -27,10 +27,13 @@ $ cd remote $ hg init $ echo foo > bar - $ hg add bar + $ echo c2 > '.dotfile with spaces' + $ hg add + adding .dotfile with spaces + adding bar $ hg commit -m"test" $ hg tip - changeset: 0:61c9426e69fe + changeset: 0:02770d679fb8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -42,16 +45,16 @@ adding changesets adding manifests adding file changes - added 1 changesets with 1 changes to 1 files + added 1 changesets with 2 changes to 2 files updating to branch default - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files - 1 files, 1 changesets, 1 total revisions + 2 files, 1 changesets, 2 total revisions $ cat bar foo $ cd ../remote @@ -61,12 +64,12 @@ check for HTTP opener failures when cachefile does not exist - $ rm .hg/*.cache + $ rm .hg/cache/* $ cd ../local $ echo '[hooks]' >> .hg/hgrc $ echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc $ hg pull - changegroup hook: HG_NODE=822d6e31f08b9d6e3b898ce5e52efc0a4bf4905a HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/remote + changegroup hook: HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/remote pulling from static-http://localhost:$HGPORT/remote searching for changes adding changesets @@ -96,9 +99,9 @@ adding changesets adding manifests adding file changes - added 1 changesets with 1 changes to 1 files + added 1 changesets with 2 changes to 2 files updating to branch default - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved test with "/" URI (issue 747)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-subrepo-git.t Tue Feb 01 17:52:25 2011 -0600 @@ -0,0 +1,331 @@ + $ "$TESTDIR/hghave" git || exit 80 + +make git commits repeatable + + $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME + $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL + $ GIT_AUTHOR_DATE='1234567891 +0000'; export GIT_AUTHOR_DATE + $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME + $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL + $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE + +root hg repo + + $ hg init t + $ cd t + $ echo a > a + $ hg add a + $ hg commit -m a + $ cd .. + +new external git repo + + $ mkdir gitroot + $ cd gitroot + $ git init -q + $ echo g > g + $ git add g + $ git commit -q -m g + +add subrepo clone + + $ cd ../t + $ echo 's = [git]../gitroot' > .hgsub + $ git clone -q ../gitroot s + $ hg add .hgsub + $ hg commit -m 'new git subrepo' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + +record a new commit from upstream from a different branch + + $ cd ../gitroot + $ git checkout -q -b testing + $ echo gg >> g + $ git commit -q -a -m gg + + $ cd ../t/s + $ git pull -q >/dev/null 2>/dev/null + $ git checkout -q -b testing origin/testing >/dev/null + + $ cd .. + $ hg status --subrepos + M s/g + $ hg commit -m 'update git subrepo' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a + +make $GITROOT pushable, by replacing it with a clone with nothing checked out + + $ cd .. + $ git clone gitroot gitrootbare --bare -q + $ rm -rf gitroot + $ mv gitrootbare gitroot + +clone root + + $ cd t + $ hg clone . ../tc + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd ../tc + $ hg debugsub + path s + source ../gitroot + revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a + +update to previous substate + + $ hg update 1 -q + $ cat s/g + g + $ hg debugsub + path s + source ../gitroot + revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + +clone root, make local change + + $ cd ../t + $ hg clone . ../ta + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ cd ../ta + $ echo ggg >> s/g + $ hg status --subrepos + M s/g + $ hg commit -m ggg + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 79695940086840c99328513acbe35f90fcd55e57 + +clone root separately, make different local change + + $ cd ../t + $ hg clone . ../tb + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ cd ../tb/s + $ echo f > f + $ git add f + $ cd .. + + $ hg status --subrepos + A s/f + $ hg commit -m f + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + +user b push changes + + $ hg push 2>/dev/null + pushing to $TESTTMP/t + pushing branch testing of subrepo s + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + +user a pulls, merges, commits + + $ cd ../ta + $ hg pull + pulling from $TESTTMP/t + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads, 'hg merge' to merge) + $ hg merge 2>/dev/null + pulling subrepo s + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ cat s/f + f + $ cat s/g + g + gg + ggg + $ hg commit -m 'merge' + committing subrepository s + $ hg status --subrepos --rev 1:5 + M .hgsubstate + M s/g + A s/f + $ hg debugsub + path s + source ../gitroot + revision f47b465e1bce645dbf37232a00574aa1546ca8d3 + $ hg push 2>/dev/null + pushing to $TESTTMP/t + pushing branch testing of subrepo s + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 1 files + +make upstream git changes + + $ cd .. + $ git clone -q gitroot gitclone + $ cd gitclone + $ echo ff >> f + $ git commit -q -a -m ff + $ echo fff >> f + $ git commit -q -a -m fff + $ git push origin testing 2>/dev/null + +make and push changes to hg without updating the subrepo + + $ cd ../t + $ hg clone . ../td + updating to branch default + cloning subrepo s + checking out detached HEAD in subrepo s + check out a git branch if you intend to make changes + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd ../td + $ echo aa >> a + $ hg commit -m aa + $ hg push + pushing to $TESTTMP/t + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + +sync to upstream git, distribute changes + + $ cd ../ta + $ hg pull -u -q + $ cd s + $ git pull -q >/dev/null 2>/dev/null + $ cd .. + $ hg commit -m 'git upstream sync' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc + $ hg push -q + + $ cd ../tb + $ hg pull -q + $ hg update 2>/dev/null + pulling subrepo s + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg debugsub + path s + source ../gitroot + revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc + +update to a revision without the subrepo, keeping the local git repository + + $ cd ../t + $ hg up 0 + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ ls -a s + . + .. + .git + + $ hg up 2 + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ ls -a s + . + .. + .git + g + +archive subrepos + + $ cd ../tc + $ hg pull -q + $ hg archive --subrepos -r 5 ../archive 2>/dev/null + pulling subrepo s + $ cd ../archive + $ cat s/f + f + $ cat s/g + g + gg + ggg + +create nested repo + + $ cd .. + $ hg init outer + $ cd outer + $ echo b>b + $ hg add b + $ hg commit -m b + + $ hg clone ../t inner + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo inner = inner > .hgsub + $ hg add .hgsub + $ hg commit -m 'nested sub' + committing subrepository inner + +nested commit + + $ echo ffff >> inner/s/f + $ hg status --subrepos + M inner/s/f + $ hg commit -m nested + committing subrepository inner + committing subrepository inner/s + +nested archive + + $ hg archive --subrepos ../narchive + $ ls ../narchive/inner/s | grep -v pax_global_header + f + g + +Check hg update --clean + $ cd $TESTTMP/ta + $ echo > s/g + $ cd s + $ echo c1 > f1 + $ echo c1 > f2 + $ git add f1 + $ cd .. + $ hg status -S + M s/g + A s/f1 + $ ls s + f + f1 + f2 + g + $ hg update --clean + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg status -S + $ ls s + f + f1 + f2 + g
--- a/tests/test-subrepo-recursion.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-subrepo-recursion.t Tue Feb 01 17:52:25 2011 -0600 @@ -221,9 +221,48 @@ z1 +z2 -Test archiving to a directory tree: +Enable progress extension for archive tests: + + $ cp $HGRCPATH $HGRCPATH.no-progress + $ cat >> $HGRCPATH <<EOF + > [extensions] + > progress = + > [progress] + > assume-tty = 1 + > delay = 0 + > format = topic bar number + > refresh = 0 + > width = 60 + > EOF + +Test archiving to a directory tree (the doubled lines in the output +only show up in the test output, not in real usage): - $ hg archive --subrepos ../archive + $ hg archive --subrepos ../archive 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/3 + archiving [ ] 0/3 + archiving [=============> ] 1/3 + archiving [=============> ] 1/3 + archiving [===========================> ] 2/3 + archiving [===========================> ] 2/3 + archiving [==========================================>] 3/3 + archiving [==========================================>] 3/3 + + archiving (foo) [ ] 0/3 + archiving (foo) [ ] 0/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [====================================>] 3/3 + archiving (foo) [====================================>] 3/3 + + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [================================>] 1/1 + archiving (foo/bar) [================================>] 1/1 + \r (esc) $ find ../archive | sort ../archive ../archive/.hg_archival.txt @@ -239,7 +278,35 @@ Test archiving to zip file (unzip output is unstable): - $ hg archive --subrepos ../archive.zip + $ hg archive --subrepos ../archive.zip 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/3 + archiving [ ] 0/3 + archiving [=============> ] 1/3 + archiving [=============> ] 1/3 + archiving [===========================> ] 2/3 + archiving [===========================> ] 2/3 + archiving [==========================================>] 3/3 + archiving [==========================================>] 3/3 + + archiving (foo) [ ] 0/3 + archiving (foo) [ ] 0/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [====================================>] 3/3 + archiving (foo) [====================================>] 3/3 + + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [================================>] 1/1 + archiving (foo/bar) [================================>] 1/1 + \r (esc) + +Disable progress extension and cleanup: + + $ mv $HGRCPATH.no-progress $HGRCPATH Clone and test outgoing:
--- a/tests/test-tag.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-tag.t Tue Feb 01 17:52:25 2011 -0600 @@ -252,27 +252,29 @@ $ echo c1 > f1 $ hg ci -Am0 adding f1 + $ echo c2 > f2 + $ hg ci -Am1 + adding f2 + $ hg co -q 0 $ hg branch b1 marked working directory as branch b1 - $ echo c2 >> f1 - $ hg ci -m1 + $ hg ci -m2 $ hg up default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge b1 - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg tag t1 abort: uncommitted merge [255] $ hg status - M f1 $ hg tag --rev 1 t2 abort: uncommitted merge [255] $ hg tag --rev 1 --local t3 $ hg tags -v - tip 1:9466ada9ee90 - t3 1:9466ada9ee90 local + tip 2:8a8f787d0d5c + t3 1:c3adabd1a5f4 local $ cd ..
--- a/tests/test-tags.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-tags.t Tue Feb 01 17:52:25 2011 -0600 @@ -1,7 +1,7 @@ Helper functions: $ cacheexists() { - > [ -f .hg/tags.cache ] && echo "tag cache exists" || echo "no tag cache" + > [ -f .hg/cache/tags ] && echo "tag cache exists" || echo "no tag cache" > } $ dumptags() { @@ -36,9 +36,9 @@ Try corrupting the cache - $ printf 'a b' > .hg/tags.cache + $ printf 'a b' > .hg/cache/tags $ hg identify - .hg/tags.cache is corrupt, rebuilding it + .hg/cache/tags is corrupt, rebuilding it acb14030fe0a tip $ cacheexists tag cache exists @@ -69,13 +69,13 @@ Repeat with cold tag cache: - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg identify b9154636be93 tip And again, but now unable to write tag cache: - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ chmod 555 .hg $ hg identify b9154636be93 tip @@ -216,7 +216,7 @@ Dump cache: - $ cat .hg/tags.cache + $ cat .hg/cache/tags 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d 3 6fa450212aeb2a21ed616a54aea39a4a27894cd7 7d3b718c964ef37b89e550ebdafd5789e76ce1b0 2 7a94127795a33c10a370c93f731fd9fea0b79af6 0c04f2a8af31de17fab7422878ee5a2dadbc943d @@ -325,7 +325,7 @@ $ hg tags # partly stale tip 4:735c3ca72986 bar 0:bbd179dfa0a7 - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg tags # cold cache tip 4:735c3ca72986 bar 0:bbd179dfa0a7
--- a/tests/test-win32text.t Tue Feb 01 17:30:13 2011 -0600 +++ b/tests/test-win32text.t Tue Feb 01 17:52:25 2011 -0600 @@ -9,10 +9,6 @@ > data = data.replace('\n', '\r\n') > file(path, 'wb').write(data) > EOF - $ cat > print.py <<EOF - > import sys - > print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>')) - > EOF $ echo '[hooks]' >> .hg/hgrc $ echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc $ echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc @@ -369,12 +365,13 @@ $ python -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")' $ hg add f3 f4.bat $ hg ci -m 6 - $ python print.py < bin - hello<NUL><CR><LF> - $ python print.py < f3 - some<LF>text<LF> - $ python print.py < f4.bat - rem empty<CR><LF> + $ cat bin + hello\x00\r (esc) + $ cat f3 + some + text + $ cat f4.bat + rem empty\r (esc) $ echo $ echo '[extensions]' >> .hg/hgrc @@ -405,38 +402,39 @@ tip $ rm f3 f4.bat bin - $ hg co -C 2>&1 | python -c 'import sys, os; sys.stdout.write(sys.stdin.read().replace(os.getcwd(), "...."))' + $ hg co -C WARNING: f4.bat already has CRLF line endings and does not need EOL conversion by the win32text plugin. Before your next commit, please reconsider your encode/decode settings in - Mercurial.ini or ..../.hg/hgrc. + Mercurial.ini or $TESTTMP/t/.hg/hgrc. 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ python print.py < bin - hello<NUL><CR><LF> - $ python print.py < f3 - some<CR><LF>text<CR><LF> - $ python print.py < f4.bat - rem empty<CR><LF> + $ cat bin + hello\x00\r (esc) + $ cat f3 + some\r (esc) + text\r (esc) + $ cat f4.bat + rem empty\r (esc) $ echo $ python -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")' $ hg add f5.sh $ hg ci -m 7 - $ python print.py < f5.sh - # empty<CR><LF> - $ hg cat f5.sh | python print.py - # empty<LF> + $ cat f5.sh + # empty\r (esc) + $ hg cat f5.sh + # empty $ echo '% just linefeed' > linefeed $ hg ci -qAm 8 linefeed - $ python print.py < linefeed - % just linefeed<LF> - $ hg cat linefeed | python print.py - % just linefeed<LF> + $ cat linefeed + % just linefeed + $ hg cat linefeed + % just linefeed $ hg st -q $ hg revert -a linefeed no changes needed to linefeed - $ python print.py < linefeed - % just linefeed<LF> + $ cat linefeed + % just linefeed $ hg st -q $ echo modified >> linefeed $ hg st -q @@ -444,5 +442,5 @@ $ hg revert -a reverting linefeed $ hg st -q - $ python print.py < linefeed - % just linefeed<CR><LF> + $ cat linefeed + % just linefeed\r (esc)