Mercurial > hg-stable
changeset 13427:2432b3227303 stable
merge default into stable for 1.8 code freeze
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Wed, 16 Feb 2011 14:13:22 -0600 |
parents | 69238d0ca60f (current diff) 643b8212813e (diff) |
children | 5ef29e0dd418 |
files | hgext/bookmarks.py tests/test-hardlinks-safety.t tests/test-no-symlinks tests/test-no-symlinks.out |
diffstat | 173 files changed, 6958 insertions(+), 3867 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Sat Feb 12 16:08:41 2011 +0800 +++ b/.hgignore Wed Feb 16 14:13:22 2011 -0600 @@ -9,6 +9,8 @@ *.so *.pyd *.pyc +*.pyo +*$py.class *.swp *.prof \#*\#
--- a/Makefile Sat Feb 12 16:08:41 2011 +0800 +++ b/Makefile Wed Feb 16 14:13:22 2011 -0600 @@ -45,7 +45,7 @@ clean: -$(PYTHON) setup.py clean --all # ignore errors from this command find . \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';' - rm -f MANIFEST mercurial/__version__.py tests/*.err + rm -f MANIFEST tests/*.err rm -rf build mercurial/locale $(MAKE) -C doc clean
--- a/contrib/bash_completion Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/bash_completion Wed Feb 16 14:13:22 2011 -0600 @@ -241,7 +241,7 @@ _hg_tags _hg_branches ;; - commit) + commit|record) _hg_status "mar" ;; remove)
--- a/contrib/check-code.py Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/check-code.py Wed Feb 16 14:13:22 2011 -0600 @@ -8,6 +8,7 @@ # GNU General Public License version 2 or any later version. import re, glob, os, sys +import keyword import optparse def repquote(m): @@ -64,6 +65,7 @@ ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), (r'^source\b', "don't use 'source', use '.'"), (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), + (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"), ] testfilters = [ @@ -117,8 +119,8 @@ (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', "linebreak after :"), (r'class\s[^(]:', "old-style class, use class foo(object)"), - (r'^\s+del\(', "del isn't a function"), - (r'^\s+except\(', "except isn't a function"), + (r'\b(%s)\(' % '|'.join(keyword.kwlist), + "Python keyword is not a function"), (r',]', "unneeded trailing ',' in list"), # (r'class\s[A-Z][^\(]*\((?!Exception)', # "don't capitalize non-exception classes"), @@ -127,11 +129,15 @@ (r'[\x80-\xff]', "non-ASCII character literal"), (r'("\')\.format\(', "str.format() not available in Python 2.4"), (r'^\s*with\s+', "with not available in Python 2.4"), + (r'^\s*except.* as .*:', "except as not available in Python 2.4"), + (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"), (r'(?<!def)\s+(any|all|format)\(', "any/all/format not available in Python 2.4"), (r'(?<!def)\s+(callable)\(', "callable not available in Python 3, use hasattr(f, '__call__')"), (r'if\s.*\selse', "if ... else form not available in Python 2.4"), + (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist), + "gratuitous whitespace after Python keyword"), (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), # (r'\s\s=', "gratuitous whitespace before ="), (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', @@ -145,6 +151,9 @@ (r'raise Exception', "don't raise generic exceptions"), (r'ui\.(status|progress|write|note|warn)\([\'\"]x', "warning: unwrapped ui message"), + (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), + (r' [=!]=\s+(True|False|None)', + "comparison with singleton, use 'is' or 'is not' instead"), ] pyfilters = [ @@ -239,7 +248,9 @@ fc = 0 if not re.match(match, f): continue - pre = post = open(f).read() + fp = open(f) + pre = post = fp.read() + fp.close() if "no-" + "check-code" in pre: break for p, r in filters:
--- a/contrib/hgk Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/hgk Wed Feb 16 14:13:22 2011 -0600 @@ -482,7 +482,7 @@ .bar.file add command -label "Quit" -command doquit menu .bar.help .bar add cascade -label "Help" -menu .bar.help - .bar.help add command -label "About gitk" -command about + .bar.help add command -label "About hgk" -command about . configure -menu .bar if {![info exists geometry(canv1)]} { @@ -867,9 +867,9 @@ return } toplevel $w - wm title $w "About gitk" + wm title $w "About hgk" message $w.m -text { -Gitk version 1.2 +Hgk version 1.2 Copyright © 2005 Paul Mackerras
--- a/contrib/perf.py Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/perf.py Wed Feb 16 14:13:22 2011 -0600 @@ -80,11 +80,12 @@ timer(d) def perfindex(ui, repo): - import mercurial.changelog + import mercurial.revlog + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg + n = repo["tip"].node() def d(): - t = repo.changelog.tip() - repo.changelog = mercurial.changelog.changelog(repo.sopener) - repo.changelog._loadindexmap() + repo.invalidate() + repo[n] timer(d) def perfstartup(ui, repo):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/win32/buildlocal.bat Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,9 @@ +@echo off +rem Double-click this file to (re)build Mercurial for Windows in place. +rem Useful for testing and development. +cd ..\.. +del /Q mercurial\*.pyd +del /Q mercurial\*.pyc +rmdir /Q /S mercurial\locale +python setup.py build_py -c -d . build_ext -i build_mo +pause
--- a/contrib/wix/dist.wxs Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/wix/dist.wxs Wed Feb 16 14:13:22 2011 -0600 @@ -16,23 +16,14 @@ <File Name="mercurial.parsers.pyd" /> <File Name="pyexpat.pyd" /> <File Name="python26.dll" /> - <File Name="pythoncom26.dll" /> - <File Name="pywintypes26.dll" /> <File Name="bz2.pyd" /> <File Name="select.pyd" /> <File Name="unicodedata.pyd" /> - <File Name="win32api.pyd" /> - <File Name="win32com.shell.shell.pyd" /> - <File Name="win32console.pyd" /> - <File Name="win32file.pyd" /> - <File Name="win32gui.pyd" /> - <File Name="win32pipe.pyd" /> - <File Name="win32process.pyd" /> + <File Name="_ctypes.pyd" /> <File Name="_elementtree.pyd" /> <File Name="_hashlib.pyd" /> <File Name="_socket.pyd" /> <File Name="_ssl.pyd" /> - <File Name="_win32sysloader.pyd" /> </Component> </DirectoryRef> </Fragment>
--- a/contrib/wix/guids.wxi Sat Feb 12 16:08:41 2011 +0800 +++ b/contrib/wix/guids.wxi Wed Feb 16 14:13:22 2011 -0600 @@ -9,7 +9,7 @@ <?define contrib.vim.guid = {BB04903A-652D-4C4F-9590-2BD07A2304F2} ?> <!-- dist.wxs --> - <?define dist.guid = {0F63D160-0740-4BAF-BF25-0C6930310F51} ?> + <?define dist.guid = {C3B634A4-1B05-4A40-94A9-38EE853CF693} ?> <!-- doc.wxs --> <?define doc.hg.1.html.guid = {AAAA3FDA-EDC5-4220-B59D-D342722358A2} ?>
--- a/doc/Makefile Sat Feb 12 16:08:41 2011 +0800 +++ b/doc/Makefile Wed Feb 16 14:13:22 2011 -0600 @@ -1,11 +1,13 @@ SOURCES=$(wildcard *.[0-9].txt) MAN=$(SOURCES:%.txt=%) HTML=$(SOURCES:%.txt=%.html) -GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py ../mercurial/help/*.txt +GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py \ + ../mercurial/help/*.txt ../hgext/*.py ../hgext/*/__init__.py PREFIX=/usr/local MANDIR=$(PREFIX)/share/man INSTALL=install -c -m 644 PYTHON=python +RSTARGS= export LANGUAGE=C export LC_ALL=C @@ -24,11 +26,11 @@ mv $@.tmp $@ %: %.txt common.txt - $(PYTHON) runrst hgmanpage --halt warning \ + $(PYTHON) runrst hgmanpage $(RSTARGS) --halt warning \ --strip-elements-with-class htmlonly $*.txt $* %.html: %.txt common.txt - $(PYTHON) runrst html --halt warning \ + $(PYTHON) runrst html $(RSTARGS) --halt warning \ --link-stylesheet --stylesheet-path style.css $*.txt $*.html MANIFEST: man html
--- a/doc/gendoc.py Sat Feb 12 16:08:41 2011 +0800 +++ b/doc/gendoc.py Wed Feb 16 14:13:22 2011 -0600 @@ -40,7 +40,7 @@ if longopt: allopts.append("--%s" % longopt) desc += default and _(" (default: %s)") % default or "" - yield(", ".join(allopts), desc) + yield (", ".join(allopts), desc) def get_cmd(cmd, cmdtable): d = {} @@ -143,7 +143,7 @@ opt_output = list(d['opts']) if opt_output: opts_len = max([len(line[0]) for line in opt_output]) - ui.write(_("options:\n\n")) + ui.write(_("Options:\n\n")) for optstr, desc in opt_output: if desc: s = "%-*s %s" % (opts_len, optstr, desc)
--- a/doc/hgrc.5.txt Sat Feb 12 16:08:41 2011 +0800 +++ b/doc/hgrc.5.txt Wed Feb 16 14:13:22 2011 -0600 @@ -330,8 +330,8 @@ ``diff`` """""""" -Settings used when displaying diffs. They are all Boolean and -defaults to False. +Settings used when displaying diffs. Everything except for ``unified`` is a +Boolean and defaults to False. ``git`` Use git extended diff format. @@ -345,6 +345,8 @@ Ignore changes in the amount of white space. ``ignoreblanklines`` Ignore changes whose lines are all blank. +``unified`` + Number of lines of context to show. ``email`` """"""""" @@ -727,8 +729,8 @@ ``port`` Optional. Port to connect to on mail server. Default: 25. ``tls`` - Optional. Whether to connect to mail server using TLS. True or - False. Default: False. + Optional. Method to enable TLS when connecting to mail server: starttls, + smtps or none. Default: none. ``username`` Optional. User name for authenticating with the SMTP server. Default: none. @@ -876,6 +878,11 @@ be prompted to enter a username. If no username is entered, the default ``USER@HOST`` is used instead. Default is False. +``commitsubrepos`` + Whether to commit modified subrepositories when committing the + parent repository. If False and one subrepository has uncommitted + changes, abort the commit. + Default is True. ``debug`` Print debugging information. True or False. Default is False. ``editor``
--- a/hgext/bookmarks.py Sat Feb 12 16:08:41 2011 +0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,582 +0,0 @@ -# Mercurial extension to provide the 'hg bookmark' command -# -# Copyright 2008 David Soria Parra <dsp@php.net> -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - -'''track a line of development with movable markers - -Bookmarks are local movable markers to changesets. Every bookmark -points to a changeset identified by its hash. If you commit a -changeset that is based on a changeset that has a bookmark on it, the -bookmark shifts to the new changeset. - -It is possible to use bookmark names in every revision lookup (e.g. -:hg:`merge`, :hg:`update`). - -By default, when several bookmarks point to the same changeset, they -will all move forward together. It is possible to obtain a more -git-like experience by adding the following configuration option to -your configuration file:: - - [bookmarks] - track.current = True - -This will cause Mercurial to track the bookmark that you are currently -using, and only update it. This is similar to git's approach to -branching. -''' - -from mercurial.i18n import _ -from mercurial.node import nullid, nullrev, bin, hex, short -from mercurial import util, commands, repair, extensions, pushkey, hg, url -from mercurial import revset -import os - -def write(repo): - '''Write bookmarks - - Write the given bookmark => hash dictionary to the .hg/bookmarks file - in a format equal to those of localtags. - - We also store a backup of the previous state in undo.bookmarks that - can be copied back on rollback. - ''' - refs = repo._bookmarks - - try: - bms = repo.opener('bookmarks').read() - except IOError: - bms = '' - repo.opener('undo.bookmarks', 'w').write(bms) - - if repo._bookmarkcurrent not in refs: - setcurrent(repo, None) - wlock = repo.wlock() - try: - file = repo.opener('bookmarks', 'w', atomictemp=True) - for refspec, node in refs.iteritems(): - file.write("%s %s\n" % (hex(node), refspec)) - file.rename() - - # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) - try: - os.utime(repo.sjoin('00changelog.i'), None) - except OSError: - pass - - finally: - wlock.release() - -def setcurrent(repo, mark): - '''Set the name of the bookmark that we are currently on - - Set the name of the bookmark that we are on (hg update <bookmark>). - The name is recorded in .hg/bookmarks.current - ''' - current = repo._bookmarkcurrent - if current == mark: - return - - refs = repo._bookmarks - - # do not update if we do update to a rev equal to the current bookmark - if (mark and mark not in refs and - current and refs[current] == repo.changectx('.').node()): - return - if mark not in refs: - mark = '' - wlock = repo.wlock() - try: - file = repo.opener('bookmarks.current', 'w', atomictemp=True) - file.write(mark) - file.rename() - finally: - wlock.release() - repo._bookmarkcurrent = mark - -def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): - '''track a line of development with movable markers - - Bookmarks are pointers to certain commits that move when - committing. Bookmarks are local. They can be renamed, copied and - deleted. It is possible to use bookmark names in :hg:`merge` and - :hg:`update` to merge and update respectively to a given bookmark. - - You can use :hg:`bookmark NAME` to set a bookmark on the working - directory's parent revision with the given name. If you specify - a revision using -r REV (where REV may be an existing bookmark), - the bookmark is assigned to that revision. - - Bookmarks can be pushed and pulled between repositories (see :hg:`help - push` and :hg:`help pull`). This requires the bookmark extension to be - enabled for both the local and remote repositories. - ''' - hexfn = ui.debugflag and hex or short - marks = repo._bookmarks - cur = repo.changectx('.').node() - - if rename: - if rename not in marks: - raise util.Abort(_("a bookmark of this name does not exist")) - if mark in marks and not force: - raise util.Abort(_("a bookmark of the same name already exists")) - if mark is None: - raise util.Abort(_("new bookmark name required")) - marks[mark] = marks[rename] - del marks[rename] - if repo._bookmarkcurrent == rename: - setcurrent(repo, mark) - write(repo) - return - - if delete: - if mark is None: - raise util.Abort(_("bookmark name required")) - if mark not in marks: - raise util.Abort(_("a bookmark of this name does not exist")) - if mark == repo._bookmarkcurrent: - setcurrent(repo, None) - del marks[mark] - write(repo) - return - - if mark != None: - if "\n" in mark: - raise util.Abort(_("bookmark name cannot contain newlines")) - mark = mark.strip() - if not mark: - raise util.Abort(_("bookmark names cannot consist entirely of " - "whitespace")) - if mark in marks and not force: - raise util.Abort(_("a bookmark of the same name already exists")) - if ((mark in repo.branchtags() or mark == repo.dirstate.branch()) - and not force): - raise util.Abort( - _("a bookmark cannot have the name of an existing branch")) - if rev: - marks[mark] = repo.lookup(rev) - else: - marks[mark] = repo.changectx('.').node() - setcurrent(repo, mark) - write(repo) - return - - if mark is None: - if rev: - raise util.Abort(_("bookmark name required")) - if len(marks) == 0: - ui.status(_("no bookmarks set\n")) - else: - for bmark, n in marks.iteritems(): - if ui.configbool('bookmarks', 'track.current'): - current = repo._bookmarkcurrent - if bmark == current and n == cur: - prefix, label = '*', 'bookmarks.current' - else: - prefix, label = ' ', '' - else: - if n == cur: - prefix, label = '*', 'bookmarks.current' - else: - prefix, label = ' ', '' - - if ui.quiet: - ui.write("%s\n" % bmark, label=label) - else: - ui.write(" %s %-25s %d:%s\n" % ( - prefix, bmark, repo.changelog.rev(n), hexfn(n)), - label=label) - return - -def _revstostrip(changelog, node): - srev = changelog.rev(node) - tostrip = [srev] - saveheads = [] - for r in xrange(srev, len(changelog)): - parents = changelog.parentrevs(r) - if parents[0] in tostrip or parents[1] in tostrip: - tostrip.append(r) - if parents[1] != nullrev: - for p in parents: - if p not in tostrip and p > srev: - saveheads.append(p) - return [r for r in tostrip if r not in saveheads] - -def strip(oldstrip, ui, repo, node, backup="all"): - """Strip bookmarks if revisions are stripped using - the mercurial.strip method. This usually happens during - qpush and qpop""" - revisions = _revstostrip(repo.changelog, node) - marks = repo._bookmarks - update = [] - for mark, n in marks.iteritems(): - if repo.changelog.rev(n) in revisions: - update.append(mark) - oldstrip(ui, repo, node, backup) - if len(update) > 0: - for m in update: - marks[m] = repo.changectx('.').node() - write(repo) - -def reposetup(ui, repo): - if not repo.local(): - return - - class bookmark_repo(repo.__class__): - - @util.propertycache - def _bookmarks(self): - '''Parse .hg/bookmarks file and return a dictionary - - Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values - in the .hg/bookmarks file. - Read the file and return a (name=>nodeid) dictionary - ''' - try: - bookmarks = {} - for line in self.opener('bookmarks'): - sha, refspec = line.strip().split(' ', 1) - bookmarks[refspec] = self.changelog.lookup(sha) - except: - pass - return bookmarks - - @util.propertycache - def _bookmarkcurrent(self): - '''Get the current bookmark - - If we use gittishsh branches we have a current bookmark that - we are on. This function returns the name of the bookmark. It - is stored in .hg/bookmarks.current - ''' - mark = None - if os.path.exists(self.join('bookmarks.current')): - file = self.opener('bookmarks.current') - # No readline() in posixfile_nt, reading everything is cheap - mark = (file.readlines() or [''])[0] - if mark == '': - mark = None - file.close() - return mark - - def rollback(self, dryrun=False): - if os.path.exists(self.join('undo.bookmarks')): - if not dryrun: - util.rename(self.join('undo.bookmarks'), self.join('bookmarks')) - elif not os.path.exists(self.sjoin("undo")): - # avoid "no rollback information available" message - return 0 - return super(bookmark_repo, self).rollback(dryrun) - - def lookup(self, key): - if key in self._bookmarks: - key = self._bookmarks[key] - return super(bookmark_repo, self).lookup(key) - - def _bookmarksupdate(self, parents, node): - marks = self._bookmarks - update = False - if ui.configbool('bookmarks', 'track.current'): - mark = self._bookmarkcurrent - if mark and marks[mark] in parents: - marks[mark] = node - update = True - else: - for mark, n in marks.items(): - if n in parents: - marks[mark] = node - update = True - if update: - write(self) - - def commitctx(self, ctx, error=False): - """Add a revision to the repository and - move the bookmark""" - wlock = self.wlock() # do both commit and bookmark with lock held - try: - node = super(bookmark_repo, self).commitctx(ctx, error) - if node is None: - return None - parents = self.changelog.parents(node) - if parents[1] == nullid: - parents = (parents[0],) - - self._bookmarksupdate(parents, node) - return node - finally: - wlock.release() - - def pull(self, remote, heads=None, force=False): - result = super(bookmark_repo, self).pull(remote, heads, force) - - self.ui.debug("checking for updated bookmarks\n") - rb = remote.listkeys('bookmarks') - changed = False - for k in rb.keys(): - if k in self._bookmarks: - nr, nl = rb[k], self._bookmarks[k] - if nr in self: - cr = self[nr] - cl = self[nl] - if cl.rev() >= cr.rev(): - continue - if cr in cl.descendants(): - self._bookmarks[k] = cr.node() - changed = True - self.ui.status(_("updating bookmark %s\n") % k) - else: - self.ui.warn(_("not updating divergent" - " bookmark %s\n") % k) - if changed: - write(repo) - - return result - - def push(self, remote, force=False, revs=None, newbranch=False): - result = super(bookmark_repo, self).push(remote, force, revs, - newbranch) - - self.ui.debug("checking for updated bookmarks\n") - rb = remote.listkeys('bookmarks') - for k in rb.keys(): - if k in self._bookmarks: - nr, nl = rb[k], self._bookmarks[k] - if nr in self: - cr = self[nr] - cl = self[nl] - if cl in cr.descendants(): - r = remote.pushkey('bookmarks', k, nr, nl) - if r: - self.ui.status(_("updating bookmark %s\n") % k) - else: - self.ui.warn(_('updating bookmark %s' - ' failed!\n') % k) - - return result - - def addchangegroup(self, *args, **kwargs): - parents = self.dirstate.parents() - - result = super(bookmark_repo, self).addchangegroup(*args, **kwargs) - if result > 1: - # We have more heads than before - return result - node = self.changelog.tip() - - self._bookmarksupdate(parents, node) - return result - - def _findtags(self): - """Merge bookmarks with normal tags""" - (tags, tagtypes) = super(bookmark_repo, self)._findtags() - tags.update(self._bookmarks) - return (tags, tagtypes) - - if hasattr(repo, 'invalidate'): - def invalidate(self): - super(bookmark_repo, self).invalidate() - for attr in ('_bookmarks', '_bookmarkcurrent'): - if attr in self.__dict__: - delattr(self, attr) - - repo.__class__ = bookmark_repo - -def listbookmarks(repo): - # We may try to list bookmarks on a repo type that does not - # support it (e.g., statichttprepository). - if not hasattr(repo, '_bookmarks'): - return {} - - d = {} - for k, v in repo._bookmarks.iteritems(): - d[k] = hex(v) - return d - -def pushbookmark(repo, key, old, new): - w = repo.wlock() - try: - marks = repo._bookmarks - if hex(marks.get(key, '')) != old: - return False - if new == '': - del marks[key] - else: - if new not in repo: - return False - marks[key] = repo[new].node() - write(repo) - return True - finally: - w.release() - -def pull(oldpull, ui, repo, source="default", **opts): - # translate bookmark args to rev args for actual pull - if opts.get('bookmark'): - # this is an unpleasant hack as pull will do this internally - source, branches = hg.parseurl(ui.expandpath(source), - opts.get('branch')) - other = hg.repository(hg.remoteui(repo, opts), source) - rb = other.listkeys('bookmarks') - - for b in opts['bookmark']: - if b not in rb: - raise util.Abort(_('remote bookmark %s not found!') % b) - opts.setdefault('rev', []).append(b) - - result = oldpull(ui, repo, source, **opts) - - # update specified bookmarks - if opts.get('bookmark'): - for b in opts['bookmark']: - # explicit pull overrides local bookmark if any - ui.status(_("importing bookmark %s\n") % b) - repo._bookmarks[b] = repo[rb[b]].node() - write(repo) - - return result - -def push(oldpush, ui, repo, dest=None, **opts): - dopush = True - if opts.get('bookmark'): - dopush = False - for b in opts['bookmark']: - if b in repo._bookmarks: - dopush = True - opts.setdefault('rev', []).append(b) - - result = 0 - if dopush: - result = oldpush(ui, repo, dest, **opts) - - if opts.get('bookmark'): - # this is an unpleasant hack as push will do this internally - dest = ui.expandpath(dest or 'default-push', dest or 'default') - dest, branches = hg.parseurl(dest, opts.get('branch')) - other = hg.repository(hg.remoteui(repo, opts), dest) - rb = other.listkeys('bookmarks') - for b in opts['bookmark']: - # explicit push overrides remote bookmark if any - if b in repo._bookmarks: - ui.status(_("exporting bookmark %s\n") % b) - new = repo[b].hex() - elif b in rb: - ui.status(_("deleting remote bookmark %s\n") % b) - new = '' # delete - else: - ui.warn(_('bookmark %s does not exist on the local ' - 'or remote repository!\n') % b) - return 2 - old = rb.get(b, '') - r = other.pushkey('bookmarks', b, old, new) - if not r: - ui.warn(_('updating bookmark %s failed!\n') % b) - if not result: - result = 2 - - return result - -def diffbookmarks(ui, repo, remote): - ui.status(_("searching for changed bookmarks\n")) - - lmarks = repo.listkeys('bookmarks') - rmarks = remote.listkeys('bookmarks') - - diff = sorted(set(rmarks) - set(lmarks)) - for k in diff: - ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) - - if len(diff) <= 0: - ui.status(_("no changed bookmarks found\n")) - return 1 - return 0 - -def incoming(oldincoming, ui, repo, source="default", **opts): - if opts.get('bookmarks'): - source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) - other = hg.repository(hg.remoteui(repo, opts), source) - ui.status(_('comparing with %s\n') % url.hidepassword(source)) - return diffbookmarks(ui, repo, other) - else: - return oldincoming(ui, repo, source, **opts) - -def outgoing(oldoutgoing, ui, repo, dest=None, **opts): - if opts.get('bookmarks'): - dest = ui.expandpath(dest or 'default-push', dest or 'default') - dest, branches = hg.parseurl(dest, opts.get('branch')) - other = hg.repository(hg.remoteui(repo, opts), dest) - ui.status(_('comparing with %s\n') % url.hidepassword(dest)) - return diffbookmarks(ui, other, repo) - else: - return oldoutgoing(ui, repo, dest, **opts) - -def uisetup(ui): - extensions.wrapfunction(repair, "strip", strip) - if ui.configbool('bookmarks', 'track.current'): - extensions.wrapcommand(commands.table, 'update', updatecurbookmark) - - entry = extensions.wrapcommand(commands.table, 'pull', pull) - entry[1].append(('B', 'bookmark', [], - _("bookmark to import"), - _('BOOKMARK'))) - entry = extensions.wrapcommand(commands.table, 'push', push) - entry[1].append(('B', 'bookmark', [], - _("bookmark to export"), - _('BOOKMARK'))) - entry = extensions.wrapcommand(commands.table, 'incoming', incoming) - entry[1].append(('B', 'bookmarks', False, - _("compare bookmark"))) - entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing) - entry[1].append(('B', 'bookmarks', False, - _("compare bookmark"))) - - pushkey.register('bookmarks', pushbookmark, listbookmarks) - -def updatecurbookmark(orig, ui, repo, *args, **opts): - '''Set the current bookmark - - If the user updates to a bookmark we update the .hg/bookmarks.current - file. - ''' - res = orig(ui, repo, *args, **opts) - rev = opts['rev'] - if not rev and len(args) > 0: - rev = args[0] - setcurrent(repo, rev) - return res - -def bmrevset(repo, subset, x): - """``bookmark([name])`` - The named bookmark or all bookmarks. - """ - # i18n: "bookmark" is a keyword - args = revset.getargs(x, 0, 1, _('bookmark takes one or no arguments')) - if args: - bm = revset.getstring(args[0], - # i18n: "bookmark" is a keyword - _('the argument to bookmark must be a string')) - bmrev = listbookmarks(repo).get(bm, None) - if bmrev: - bmrev = repo.changelog.rev(bin(bmrev)) - return [r for r in subset if r == bmrev] - bms = set([repo.changelog.rev(bin(r)) for r in listbookmarks(repo).values()]) - return [r for r in subset if r in bms] - -def extsetup(ui): - revset.symbols['bookmark'] = bmrevset - -cmdtable = { - "bookmarks": - (bookmark, - [('f', 'force', False, _('force')), - ('r', 'rev', '', _('revision'), _('REV')), - ('d', 'delete', False, _('delete a given bookmark')), - ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))], - _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')), -} - -colortable = {'bookmarks.current': 'green'} - -# tell hggettext to extract docstrings from these functions: -i18nfunctions = [bmrevset]
--- a/hgext/color.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/color.py Wed Feb 16 14:13:22 2011 -0600 @@ -92,6 +92,7 @@ 'cyan_background': 46, 'white_background': 47} _styles = {'grep.match': 'red bold', + 'bookmarks.current': 'green', 'branches.active': 'none', 'branches.closed': 'black bold', 'branches.current': 'green',
--- a/hgext/convert/__init__.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/convert/__init__.py Wed Feb 16 14:13:22 2011 -0600 @@ -59,10 +59,10 @@ --sourcesort try to preserve source revisions order, only supported by Mercurial sources. - If <REVMAP> isn't given, it will be put in a default location - (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file - that maps each source commit ID to the destination ID for that - revision, like so:: + If ``REVMAP`` isn't given, it will be put in a default location + (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple + text file that maps each source commit ID to the destination ID + for that revision, like so:: <source ID> <destination ID> @@ -138,15 +138,19 @@ Mercurial Source '''''''''''''''' - --config convert.hg.ignoreerrors=False (boolean) - ignore integrity errors when reading. Use it to fix Mercurial - repositories with missing revlogs, by converting from and to - Mercurial. - --config convert.hg.saverev=False (boolean) - store original revision ID in changeset (forces target IDs to - change) - --config convert.hg.startrev=0 (hg revision identifier) - convert start revision and its descendants + The Mercurial source recognizes the following configuration + options, which you can set on the command line with ``--config``: + + :convert.hg.ignoreerrors: ignore integrity errors when reading. + Use it to fix Mercurial repositories with missing revlogs, by + converting from and to Mercurial. Default is False. + + :convert.hg.saverev: store original. revision ID in changeset + (forces target IDs to change). It takes and boolean argument + and defaults to False. + + :convert.hg.startrev: convert start revision and its descendants. + It takes a hg revision identifier and defaults to 0. CVS Source '''''''''' @@ -154,42 +158,46 @@ CVS source will use a sandbox (i.e. a checked-out copy) from CVS to indicate the starting point of what will be converted. Direct access to the repository files is not needed, unless of course the - repository is :local:. The conversion uses the top level directory - in the sandbox to find the CVS repository, and then uses CVS rlog - commands to find files to convert. This means that unless a - filemap is given, all files under the starting directory will be + repository is ``:local:``. The conversion uses the top level + directory in the sandbox to find the CVS repository, and then uses + CVS rlog commands to find files to convert. This means that unless + a filemap is given, all files under the starting directory will be converted, and that any directory reorganization in the CVS sandbox is ignored. - The options shown are the defaults. + The following options can be used with ``--config``: + + :convert.cvsps.cache: Set to False to disable remote log caching, + for testing and debugging purposes. Default is True. + + :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is + allowed between commits with identical user and log message in + a single changeset. When very large files were checked in as + part of a changeset then the default may not be long enough. + The default is 60. - --config convert.cvsps.cache=True (boolean) - Set to False to disable remote log caching, for testing and - debugging purposes. - --config convert.cvsps.fuzz=60 (integer) - Specify the maximum time (in seconds) that is allowed between - commits with identical user and log message in a single - changeset. When very large files were checked in as part of a - changeset then the default may not be long enough. - --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}' - Specify a regular expression to which commit log messages are - matched. If a match occurs, then the conversion process will - insert a dummy revision merging the branch on which this log - message occurs to the branch indicated in the regex. - --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}' - Specify a regular expression to which commit log messages are - matched. If a match occurs, then the conversion process will - add the most recent revision on the branch indicated in the - regex as the second parent of the changeset. - --config hook.cvslog - Specify a Python function to be called at the end of gathering - the CVS log. The function is passed a list with the log entries, - and can modify the entries in-place, or add or delete them. - --config hook.cvschangesets - Specify a Python function to be called after the changesets - are calculated from the the CVS log. The function is passed - a list with the changeset entries, and can modify the changesets - in-place, or add or delete them. + :convert.cvsps.mergeto: Specify a regular expression to which + commit log messages are matched. If a match occurs, then the + conversion process will insert a dummy revision merging the + branch on which this log message occurs to the branch + indicated in the regex. Default is ``{{mergetobranch + ([-\\w]+)}}`` + + :convert.cvsps.mergefrom: Specify a regular expression to which + commit log messages are matched. If a match occurs, then the + conversion process will add the most recent revision on the + branch indicated in the regex as the second parent of the + changeset. Default is ``{{mergefrombranch ([-\\w]+)}}`` + + :hook.cvslog: Specify a Python function to be called at the end of + gathering the CVS log. The function is passed a list with the + log entries, and can modify the entries in-place, or add or + delete them. + + :hook.cvschangesets: Specify a Python function to be called after + the changesets are calculated from the the CVS log. The + function is passed a list with the changeset entries, and can + modify the changesets in-place, or add or delete them. An additional "debugcvsps" Mercurial command allows the builtin changeset merging code to be run without doing a conversion. Its @@ -200,29 +208,33 @@ ''''''''''''''''' Subversion source detects classical trunk/branches/tags layouts. - By default, the supplied "svn://repo/path/" source URL is - converted as a single branch. If "svn://repo/path/trunk" exists it - replaces the default branch. If "svn://repo/path/branches" exists, - its subdirectories are listed as possible branches. If - "svn://repo/path/tags" exists, it is looked for tags referencing - converted branches. Default "trunk", "branches" and "tags" values - can be overridden with following options. Set them to paths + By default, the supplied ``svn://repo/path/`` source URL is + converted as a single branch. If ``svn://repo/path/trunk`` exists + it replaces the default branch. If ``svn://repo/path/branches`` + exists, its subdirectories are listed as possible branches. If + ``svn://repo/path/tags`` exists, it is looked for tags referencing + converted branches. Default ``trunk``, ``branches`` and ``tags`` + values can be overridden with following options. Set them to paths relative to the source URL, or leave them blank to disable auto detection. - --config convert.svn.branches=branches (directory name) - specify the directory containing branches - --config convert.svn.tags=tags (directory name) - specify the directory containing tags - --config convert.svn.trunk=trunk (directory name) - specify the name of the trunk branch + The following options can be set with ``--config``: + + :convert.svn.branches: specify the directory containing branches. + The defaults is ``branches``. + + :convert.svn.tags: specify the directory containing tags. The + default is ``tags``. + + :convert.svn.trunk: specify the name of the trunk branch The + defauls is ``trunk``. Source history can be retrieved starting at a specific revision, instead of being integrally converted. Only single branch conversions are supported. - --config convert.svn.startrev=0 (svn revision number) - specify start Subversion revision. + :convert.svn.startrev: specify start Subversion revision number. + The default is 0. Perforce Source ''''''''''''''' @@ -232,24 +244,27 @@ source to a flat Mercurial repository, ignoring labels, branches and integrations. Note that when a depot path is given you then usually should specify a target directory, because otherwise the - target may be named ...-hg. + target may be named ``...-hg``. It is possible to limit the amount of source history to be - converted by specifying an initial Perforce revision. + converted by specifying an initial Perforce revision: - --config convert.p4.startrev=0 (perforce changelist number) - specify initial Perforce revision. + :convert.p4.startrev: specify initial Perforce revision, a + Perforce changelist number). Mercurial Destination ''''''''''''''''''''' - --config convert.hg.clonebranches=False (boolean) - dispatch source branches in separate clones. - --config convert.hg.tagsbranch=default (branch name) - tag revisions branch name - --config convert.hg.usebranchnames=True (boolean) - preserve branch names + The following options are supported: + + :convert.hg.clonebranches: dispatch source branches in separate + clones. The default is False. + :convert.hg.tagsbranch: branch name for tag revisions, defaults to + ``default``. + + :convert.hg.usebranchnames: preserve branch names. The default is + True """ return convcmd.convert(ui, src, dest, revmapfile, **opts)
--- a/hgext/convert/subversion.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/convert/subversion.py Wed Feb 16 14:13:22 2011 -0600 @@ -914,7 +914,7 @@ arg = encodeargs(args) hgexe = util.hgexecutable() cmd = '%s debugsvnlog' % util.shellquote(hgexe) - stdin, stdout = util.popen2(cmd) + stdin, stdout = util.popen2(util.quotecommand(cmd)) stdin.write(arg) try: stdin.close()
--- a/hgext/eol.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/eol.py Wed Feb 16 14:13:22 2011 -0600 @@ -67,6 +67,11 @@ Such files are normally not touched under the assumption that they have mixed EOLs on purpose. +The extension provides ``cleverencode:`` and ``cleverdecode:`` filters +like the deprecated win32text extension does. This means that you can +disable win32text and enable eol and your filters will still work. You +only need to these filters until you have prepared a ``.hgeol`` file. + The ``win32text.forbid*`` hooks provided by the win32text extension have been unified into a single hook named ``eol.hook``. The hook will lookup the expected line endings from the ``.hgeol`` file, which means @@ -115,6 +120,9 @@ 'to-lf': tolf, 'to-crlf': tocrlf, 'is-binary': isbinary, + # The following provide backwards compatibility with win32text + 'cleverencode:': tolf, + 'cleverdecode:': tocrlf }
--- a/hgext/gpg.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/gpg.py Wed Feb 16 14:13:22 2011 -0600 @@ -244,7 +244,9 @@ "(please commit .hgsigs manually " "or use --force)")) - repo.wfile(".hgsigs", "ab").write(sigmessage) + sigsfile = repo.wfile(".hgsigs", "ab") + sigsfile.write(sigmessage) + sigsfile.close() if '.hgsigs' not in repo.dirstate: repo[None].add([".hgsigs"])
--- a/hgext/hgk.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/hgk.py Wed Feb 16 14:13:22 2011 -0600 @@ -181,14 +181,14 @@ if i + x >= count: l[chunk - x:] = [0] * (chunk - x) break - if full != None: + if full is not None: l[x] = repo[i + x] l[x].changeset() # force reading else: l[x] = 1 for x in xrange(chunk - 1, -1, -1): if l[x] != 0: - yield (i + x, full != None and l[x] or None) + yield (i + x, full is not None and l[x] or None) if i == 0: break
--- a/hgext/inotify/linux/__init__.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/inotify/linux/__init__.py Wed Feb 16 14:13:22 2011 -0600 @@ -26,7 +26,10 @@ def _read_procfs_value(name): def read_value(): try: - return int(open(procfs_path + '/' + name).read()) + fp = open(procfs_path + '/' + name) + r = int(fp.read()) + fp.close() + return r except OSError: return None
--- a/hgext/keyword.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/keyword.py Wed Feb 16 14:13:22 2011 -0600 @@ -70,9 +70,8 @@ replaced with customized keywords and templates. Again, run :hg:`kwdemo` to control the results of your configuration changes. -Before changing/disabling active keywords, run :hg:`kwshrink` to avoid -the risk of inadvertently storing expanded keywords in the change -history. +Before changing/disabling active keywords, you must run :hg:`kwshrink` +to avoid storing expanded keywords in the change history. To force expansion after enabling it, or a configuration change, run :hg:`kwexpand`. @@ -101,6 +100,14 @@ # names of extensions using dorecord recordextensions = 'record' +colortable = { + 'kwfiles.enabled': 'green bold', + 'kwfiles.deleted': 'cyan bold underline', + 'kwfiles.enabledunknown': 'green', + 'kwfiles.ignored': 'bold', + 'kwfiles.ignoredunknown': 'none' +} + # date like in cvs' $Date utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S') # date like in svn's $Date @@ -111,7 +118,6 @@ # make keyword tools accessible kwtools = {'templater': None, 'hgcmd': ''} - def _defaultkwmaps(ui): '''Returns default keywordmaps according to keywordset configuration.''' templates = { @@ -170,14 +176,25 @@ for k, v in kwmaps) else: self.templates = _defaultkwmaps(self.ui) - escaped = '|'.join(map(re.escape, self.templates.keys())) - self.re_kw = re.compile(r'\$(%s)\$' % escaped) - self.re_kwexp = re.compile(r'\$(%s): [^$\n\r]*? \$' % escaped) - templatefilters.filters.update({'utcdate': utcdate, 'svnisodate': svnisodate, 'svnutcdate': svnutcdate}) + @util.propertycache + def escape(self): + '''Returns bar-separated and escaped keywords.''' + return '|'.join(map(re.escape, self.templates.keys())) + + @util.propertycache + def rekw(self): + '''Returns regex for unexpanded keywords.''' + return re.compile(r'\$(%s)\$' % self.escape) + + @util.propertycache + def rekwexp(self): + '''Returns regex for expanded keywords.''' + return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape) + def substitute(self, data, path, ctx, subfunc): '''Replaces keywords in data with expanded template.''' def kwsub(mobj): @@ -191,11 +208,15 @@ return '$%s: %s $' % (kw, ekw) return subfunc(kwsub, data) + def linkctx(self, path, fileid): + '''Similar to filelog.linkrev, but returns a changectx.''' + return self.repo.filectx(path, fileid=fileid).changectx() + def expand(self, path, node, data): '''Returns data with keywords expanded.''' if not self.restrict and self.match(path) and not util.binary(data): - ctx = self.repo.filectx(path, fileid=node).changectx() - return self.substitute(data, path, ctx, self.re_kw.sub) + ctx = self.linkctx(path, node) + return self.substitute(data, path, ctx, self.rekw.sub) return data def iskwfile(self, cand, ctx): @@ -212,8 +233,8 @@ kwcmd = self.restrict and lookup # kwexpand/kwshrink if self.restrict or expand and lookup: mf = ctx.manifest() - fctx = ctx - subn = (self.restrict or rekw) and self.re_kw.subn or self.re_kwexp.subn + lctx = ctx + re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp msg = (expand and _('overwriting %s expanding keywords\n') or _('overwriting %s shrinking keywords\n')) for f in candidates: @@ -225,12 +246,12 @@ continue if expand: if lookup: - fctx = self.repo.filectx(f, fileid=mf[f]).changectx() - data, found = self.substitute(data, f, fctx, subn) + lctx = self.linkctx(f, mf[f]) + data, found = self.substitute(data, f, lctx, re_kw.subn) elif self.restrict: - found = self.re_kw.search(data) + found = re_kw.search(data) else: - data, found = _shrinktext(data, subn) + data, found = _shrinktext(data, re_kw.subn) if found: self.ui.note(msg % f) self.repo.wwrite(f, data, ctx.flags(f)) @@ -242,7 +263,7 @@ def shrink(self, fname, text): '''Returns text with all keyword substitutions removed.''' if self.match(fname) and not util.binary(text): - return _shrinktext(text, self.re_kwexp.sub) + return _shrinktext(text, self.rekwexp.sub) return text def shrinklines(self, fname, lines): @@ -250,7 +271,7 @@ if self.match(fname): text = ''.join(lines) if not util.binary(text): - return _shrinktext(text, self.re_kwexp.sub).splitlines(True) + return _shrinktext(text, self.rekwexp.sub).splitlines(True) return lines def wread(self, fname, data): @@ -334,6 +355,9 @@ ui.note(_('creating temporary repository at %s\n') % tmpdir) repo = localrepo.localrepository(ui, tmpdir, True) ui.setconfig('keyword', fn, '') + svn = ui.configbool('keywordset', 'svn') + # explicitly set keywordset for demo output + ui.setconfig('keywordset', 'svn', svn) uikwmaps = ui.configitems('keywordmaps') if args or opts.get('rcfile'): @@ -341,7 +365,10 @@ if uikwmaps: ui.status(_('\textending current template maps\n')) if opts.get('default') or not uikwmaps: - ui.status(_('\toverriding default template maps\n')) + if svn: + ui.status(_('\toverriding default svn keywordset\n')) + else: + ui.status(_('\toverriding default cvs keywordset\n')) if opts.get('rcfile'): ui.readconfig(opts.get('rcfile')) if args: @@ -353,7 +380,10 @@ ui.readconfig(repo.join('hgrc')) kwmaps = dict(ui.configitems('keywordmaps')) elif opts.get('default'): - ui.status(_('\n\tconfiguration using default keyword template maps\n')) + if svn: + ui.status(_('\n\tconfiguration using default svn keywordset\n')) + else: + ui.status(_('\n\tconfiguration using default cvs keywordset\n')) kwmaps = _defaultkwmaps(ui) if uikwmaps: ui.status(_('\tdisabling current template maps\n')) @@ -367,6 +397,7 @@ reposetup(ui, repo) ui.write('[extensions]\nkeyword =\n') demoitems('keyword', ui.configitems('keyword')) + demoitems('keywordset', ui.configitems('keywordset')) demoitems('keywordmaps', kwmaps.iteritems()) keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n' repo.wopener(fn, 'w').write(keywords) @@ -424,24 +455,26 @@ files = sorted(modified + added + clean) wctx = repo[None] kwfiles = kwt.iskwfile(files, wctx) + kwdeleted = kwt.iskwfile(deleted, wctx) kwunknown = kwt.iskwfile(unknown, wctx) if not opts.get('ignore') or opts.get('all'): - showfiles = kwfiles, kwunknown + showfiles = kwfiles, kwdeleted, kwunknown else: - showfiles = [], [] + showfiles = [], [], [] if opts.get('all') or opts.get('ignore'): showfiles += ([f for f in files if f not in kwfiles], [f for f in unknown if f not in kwunknown]) - for char, filenames in zip('KkIi', showfiles): + kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split() + kwstates = zip('K!kIi', showfiles, kwlabels) + for char, filenames, kwstate in kwstates: fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n' for f in filenames: - ui.write(fmt % repo.pathto(f, cwd)) + ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate) def shrink(ui, repo, *pats, **opts): '''revert expanded keywords in the working directory - Run before changing/disabling active keywords or if you experience - problems with :hg:`import` or :hg:`merge`. + Must be run before changing/disabling active keywords. kwshrink refuses to run if given files contain local changes. ''' @@ -603,8 +636,6 @@ finally: wlock.release() - repo.__class__ = kwrepo - def kwfilectx_cmp(orig, self, fctx): # keyword affects data size, comparing wdir and filelog size does # not make sense @@ -628,6 +659,8 @@ except KeyError: pass + repo.__class__ = kwrepo + cmdtable = { 'kwdemo': (demo,
--- a/hgext/mq.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/mq.py Wed Feb 16 14:13:22 2011 -0600 @@ -86,6 +86,8 @@ parent = None format = None subject = None + branch = None + nodeid = None diffstart = 0 for line in file(pf): @@ -106,6 +108,10 @@ date = line[7:] elif line.startswith("# Parent "): parent = line[9:] + elif line.startswith("# Branch "): + branch = line[9:] + elif line.startswith("# Node ID "): + nodeid = line[10:] elif not line.startswith("# ") and line: message.append(line) format = None @@ -134,6 +140,9 @@ eatdiff(message) eatdiff(comments) + # Remember the exact starting line of the patch diffs before consuming + # empty lines, for external use by TortoiseHg and others + self.diffstartline = len(comments) eatempty(message) eatempty(comments) @@ -147,6 +156,9 @@ self.user = user self.date = date self.parent = parent + # nodeid and branch are for external use by TortoiseHg and others + self.nodeid = nodeid + self.branch = branch self.haspatch = diffstart > 1 self.plainmode = plainmode @@ -239,6 +251,7 @@ try: fh = open(os.path.join(path, 'patches.queue')) cur = fh.read().rstrip() + fh.close() if not cur: curpath = os.path.join(path, 'patches') else: @@ -793,6 +806,19 @@ return top, patch return None, None + def check_substate(self, repo): + '''return list of subrepos at a different revision than substate. + Abort if any subrepos have uncommitted changes.''' + inclsubs = [] + wctx = repo[None] + for s in wctx.substate: + if wctx.sub(s).dirty(True): + raise util.Abort( + _("uncommitted changes in subrepository %s") % s) + elif wctx.sub(s).dirty(): + inclsubs.append(s) + return inclsubs + def check_localchanges(self, repo, force=False, refresh=True): m, a, r, d = repo.status()[:4] if (m or a or r or d) and not force: @@ -826,16 +852,23 @@ % patchfn) else: raise util.Abort(_('patch "%s" already exists') % patchfn) + + inclsubs = self.check_substate(repo) + if inclsubs: + inclsubs.append('.hgsubstate') if opts.get('include') or opts.get('exclude') or pats: + if inclsubs: + pats = list(pats or []) + inclsubs match = cmdutil.match(repo, pats, opts) # detect missing files in pats def badfn(f, msg): - raise util.Abort('%s: %s' % (f, msg)) + if f != '.hgsubstate': # .hgsubstate is auto-created + raise util.Abort('%s: %s' % (f, msg)) match.bad = badfn m, a, r, d = repo.status(match=match)[:4] else: m, a, r, d = self.check_localchanges(repo, force=True) - match = cmdutil.matchfiles(repo, m + a + r) + match = cmdutil.matchfiles(repo, m + a + r + inclsubs) if len(repo[None].parents()) > 1: raise util.Abort(_('cannot manage merge changesets')) commitfiles = m + a + r @@ -1006,7 +1039,7 @@ raise util.Abort(_("patch %s not in series") % patch) def push(self, repo, patch=None, force=False, list=False, - mergeq=None, all=False, move=False): + mergeq=None, all=False, move=False, exact=False): diffopts = self.diffopts() wlock = repo.wlock() try: @@ -1015,7 +1048,7 @@ heads += ls if not heads: heads = [nullid] - if repo.dirstate.parents()[0] not in heads: + if repo.dirstate.parents()[0] not in heads and not exact: self.ui.status(_("(working directory not at a head)\n")) if not self.series: @@ -1064,9 +1097,21 @@ if not force: self.check_localchanges(repo) + if exact: + if move: + raise util.Abort(_("cannot use --exact and --move together")) + if self.applied: + raise util.Abort(_("cannot push --exact with applied patches")) + root = self.series[start] + target = patchheader(self.join(root), self.plainmode).parent + if not target: + raise util.Abort(_("%s does not have a parent recorded" % root)) + if not repo[target] == repo['.']: + hg.update(repo, target) + if move: if not patch: - raise util.Abort(_("please specify the patch to move")) + raise util.Abort(_("please specify the patch to move")) for i, rpn in enumerate(self.full_series[start:]): # strip markers for patch guards if self.guard_re.split(rpn, 1)[0] == patch: @@ -1104,7 +1149,7 @@ for f in all_files: if f not in repo.dirstate: try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: raise @@ -1198,7 +1243,7 @@ raise util.Abort(_("deletions found between repo revs")) for f in a: try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, e: if e.errno != errno.ENOENT: raise @@ -1249,6 +1294,8 @@ if repo.changelog.heads(top) != [top]: raise util.Abort(_("cannot refresh a revision with children")) + inclsubs = self.check_substate(repo) + cparents = repo.changelog.parents(top) patchparent = self.qparents(repo, top) ph = patchheader(self.join(patchfn), self.plainmode) @@ -1272,10 +1319,10 @@ # and then commit. # # this should really read: - # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4] + # mm, dd, aa = repo.status(top, patchparent)[:3] # but we do it backwards to take advantage of manifest/chlog # caching against the next repo.status call - mm, aa, dd, aa2 = repo.status(patchparent, top)[:4] + mm, aa, dd = repo.status(patchparent, top)[:3] changes = repo.changelog.read(top) man = repo.manifest.read(changes[0]) aaa = aa[:] @@ -1291,49 +1338,43 @@ else: match = cmdutil.matchall(repo) m, a, r, d = repo.status(match=match)[:4] + mm = set(mm) + aa = set(aa) + dd = set(dd) # we might end up with files that were added between # qtip and the dirstate parent, but then changed in the # local dirstate. in this case, we want them to only # show up in the added section for x in m: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not refreshing %s\n') % x) - continue if x not in aa: - mm.append(x) + mm.add(x) # we might end up with files added by the local dirstate that # were deleted by the patch. In this case, they should only # show up in the changed section. for x in a: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not adding %s\n') % x) - continue if x in dd: - del dd[dd.index(x)] - mm.append(x) + dd.remove(x) + mm.add(x) else: - aa.append(x) + aa.add(x) # make sure any files deleted in the local dirstate # are not in the add or change column of the patch forget = [] for x in d + r: - if x == '.hgsub' or x == '.hgsubstate': - self.ui.warn(_('warning: not removing %s\n') % x) - continue if x in aa: - del aa[aa.index(x)] + aa.remove(x) forget.append(x) continue - elif x in mm: - del mm[mm.index(x)] - dd.append(x) - - m = list(set(mm)) - r = list(set(dd)) - a = list(set(aa)) + else: + mm.discard(x) + dd.add(x) + + m = list(mm) + r = list(dd) + a = list(aa) c = [filter(matchfn, l) for l in (m, a, r)] - match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2])) + match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs)) chunks = patch.diff(repo, patchparent, match=match, changes=c, opts=diffopts) for chunk in chunks: @@ -1531,7 +1572,7 @@ l = line.rstrip() l = l[10:].split(' ') qpp = [bin(x) for x in l] - elif datastart != None: + elif datastart is not None: l = line.rstrip() n, name = l.split(':', 1) if n: @@ -1741,7 +1782,9 @@ _('need --name to import a patch from -')) text = sys.stdin.read() else: - text = url.open(self.ui, filename).read() + fp = url.open(self.ui, filename) + text = fp.read() + fp.close() except (OSError, IOError): raise util.Abort(_("unable to read file %s") % filename) if not patchname: @@ -1750,6 +1793,7 @@ checkfile(patchname) patchf = self.opener(patchname, "w") patchf.write(text) + patchf.close() if not force: checkseries(patchname) if patchname not in self.series: @@ -1761,6 +1805,8 @@ self.added.append(patchname) patchname = None + self.removeundo(repo) + def delete(ui, repo, *patches, **opts): """remove patches from queue @@ -2346,7 +2392,8 @@ mergeq = queue(ui, repo.join(""), newpath) ui.warn(_("merging with queue at: %s\n") % mergeq.path) ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'), - mergeq=mergeq, all=opts.get('all'), move=opts.get('move')) + mergeq=mergeq, all=opts.get('all'), move=opts.get('move'), + exact=opts.get('exact')) return ret def pop(ui, repo, patch=None, **opts): @@ -2746,6 +2793,7 @@ try: fh = repo.opener(_allqueues, 'r') queues = [queue.strip() for queue in fh if queue.strip()] + fh.close() if current not in queues: queues.append(current) except IOError: @@ -2880,7 +2928,7 @@ return super(mqrepo, self).commit(text, user, date, match, force, editor, extra) - def push(self, remote, force=False, revs=None, newbranch=False): + def checkpush(self, force, revs): if self.mq.applied and not force: haspatches = True if revs: @@ -2891,7 +2939,7 @@ haspatches = bool([n for n in revs if n in applied]) if haspatches: raise util.Abort(_('source has mq patches applied')) - return super(mqrepo, self).push(remote, force, revs, newbranch) + super(mqrepo, self).checkpush(force, revs) def _findtags(self): '''augment tags from base class with patch tags''' @@ -2903,7 +2951,7 @@ mqtags = [(patch.node, patch.name) for patch in q.applied] - if mqtags[-1][0] not in self.changelog.nodemap: + if mqtags[-1][0] not in self: self.ui.warn(_('mq status file refers to unknown node %s\n') % short(mqtags[-1][0])) return result @@ -2928,7 +2976,7 @@ cl = self.changelog qbasenode = q.applied[0].node - if qbasenode not in cl.nodemap: + if qbasenode not in self: self.ui.warn(_('mq status file refers to unknown node %s\n') % short(qbasenode)) return super(mqrepo, self)._branchtags(partial, lrev) @@ -3122,6 +3170,7 @@ "^qpush": (push, [('f', 'force', None, _('apply on top of local changes')), + ('e', 'exact', None, _('apply the target patch to its recorded parent')), ('l', 'list', None, _('list patch name in commit text')), ('a', 'all', None, _('apply all patches')), ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
--- a/hgext/patchbomb.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/patchbomb.py Wed Feb 16 14:13:22 2011 -0600 @@ -193,6 +193,9 @@ PAGER environment variable is set, your pager will be fired up once for each patchbomb message, so you can verify everything is alright. + In case email sending fails, you will find a backup of your series + introductory message in ``.hg/last-email.txt``. + Examples:: hg email -r 3000 # send patch 3000 only @@ -258,7 +261,10 @@ tmpfn = os.path.join(tmpdir, 'bundle') try: commands.bundle(ui, repo, tmpfn, dest, **opts) - return open(tmpfn, 'rb').read() + fp = open(tmpfn, 'rb') + data = fp.read() + fp.close() + return data finally: try: os.unlink(tmpfn) @@ -309,6 +315,10 @@ ui.write(_('\nWrite the introductory message for the ' 'patch series.\n\n')) body = ui.edit(body, sender) + # Save serie description in case sendmail fails + msgfile = repo.opener('last-email.txt', 'wb') + msgfile.write(body) + msgfile.close() return body def getpatchmsgs(patches, patchnames=None):
--- a/hgext/progress.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/progress.py Wed Feb 16 14:13:22 2011 -0600 @@ -28,7 +28,7 @@ [progress] delay = 3 # number of seconds (float) before showing the progress bar refresh = 0.1 # time in seconds between refreshes of the progress bar - format = topic bar number # format of the progress bar + format = topic bar number estimate # format of the progress bar width = <none> # if set, the maximum width of the progress information # (that is, min(width, term width) will be used) clear-complete = True # clear the progress bar after it's done @@ -36,15 +36,17 @@ assume-tty = False # if true, ALWAYS show a progress bar, unless # disable is given -Valid entries for the format field are topic, bar, number, unit, and -item. item defaults to the last 20 characters of the item, but this -can be changed by adding either ``-<num>`` which would take the last -num characters, or ``+<num>`` for the first num characters. +Valid entries for the format field are topic, bar, number, unit, +estimate, and item. item defaults to the last 20 characters of the +item, but this can be changed by adding either ``-<num>`` which would +take the last num characters, or ``+<num>`` for the first num +characters. """ import sys import time +from mercurial.i18n import _ from mercurial import util def spacejoin(*args): @@ -54,6 +56,43 @@ return (getattr(sys.stderr, 'isatty', None) and (sys.stderr.isatty() or ui.configbool('progress', 'assume-tty'))) +def fmtremaining(seconds): + if seconds < 60: + # i18n: format XX seconds as "XXs" + return _("%02ds") % (seconds) + minutes = seconds // 60 + if minutes < 60: + seconds -= minutes * 60 + # i18n: format X minutes and YY seconds as "XmYYs" + return _("%dm%02ds") % (minutes, seconds) + # we're going to ignore seconds in this case + minutes += 1 + hours = minutes // 60 + minutes -= hours * 60 + if hours < 30: + # i18n: format X hours and YY minutes as "XhYYm" + return _("%dh%02dm") % (hours, minutes) + # we're going to ignore minutes in this case + hours += 1 + days = hours // 24 + hours -= days * 24 + if days < 15: + # i18n: format X days and YY hours as "XdYYh" + return _("%dd%02dh") % (days, hours) + # we're going to ignore hours in this case + days += 1 + weeks = days // 7 + days -= weeks * 7 + if weeks < 55: + # i18n: format X weeks and YY days as "XwYYd" + return _("%dw%02dd") % (weeks, days) + # we're going to ignore days and treat a year as 52 weeks + weeks += 1 + years = weeks // 52 + weeks -= years * 52 + # i18n: format X years and YY weeks as "XyYYw" + return _("%dy%02dw") % (years, weeks) + class progbar(object): def __init__(self, ui): self.ui = ui @@ -61,6 +100,9 @@ def resetstate(self): self.topics = [] + self.topicstates = {} + self.starttimes = {} + self.startvals = {} self.printed = False self.lastprint = time.time() + float(self.ui.config( 'progress', 'delay', default=3)) @@ -69,9 +111,9 @@ 'progress', 'refresh', default=0.1)) self.order = self.ui.configlist( 'progress', 'format', - default=['topic', 'bar', 'number']) + default=['topic', 'bar', 'number', 'estimate']) - def show(self, topic, pos, item, unit, total): + def show(self, now, topic, pos, item, unit, total): if not shouldprint(self.ui): return termwidth = self.width() @@ -108,10 +150,12 @@ needprogress = True elif indicator == 'unit' and unit: add = unit + elif indicator == 'estimate': + add = self.estimate(topic, pos, total, now) if not needprogress: head = spacejoin(head, add) else: - tail = spacejoin(add, tail) + tail = spacejoin(tail, add) if needprogress: used = 0 if head: @@ -159,19 +203,44 @@ tw = self.ui.termwidth() return min(int(self.ui.config('progress', 'width', default=tw)), tw) + def estimate(self, topic, pos, total, now): + if total is None: + return '' + initialpos = self.startvals[topic] + target = total - initialpos + delta = pos - initialpos + if delta > 0: + elapsed = now - self.starttimes[topic] + if elapsed > float( + self.ui.config('progress', 'estimate', default=2)): + seconds = (elapsed * (target - delta)) // delta + 1 + return fmtremaining(seconds) + return '' + def progress(self, topic, pos, item='', unit='', total=None): + now = time.time() if pos is None: - if self.topics and self.topics[-1] == topic and self.printed: + self.starttimes.pop(topic, None) + self.startvals.pop(topic, None) + self.topicstates.pop(topic, None) + # reset the progress bar if this is the outermost topic + if self.topics and self.topics[0] == topic and self.printed: self.complete() self.resetstate() + # truncate the list of topics assuming all topics within + # this one are also closed + if topic in self.topics: + self.topics = self.topics[:self.topics.index(topic)] else: if topic not in self.topics: + self.starttimes[topic] = now + self.startvals[topic] = pos self.topics.append(topic) - now = time.time() - if (now - self.lastprint >= self.refresh - and topic == self.topics[-1]): + self.topicstates[topic] = pos, item, unit, total + if now - self.lastprint >= self.refresh and self.topics: self.lastprint = now - self.show(topic, pos, item, unit, total) + current = self.topics[-1] + self.show(now, topic, *self.topicstates[topic]) def uisetup(ui): class progressui(ui.__class__):
--- a/hgext/rebase.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/rebase.py Wed Feb 16 14:13:22 2011 -0600 @@ -215,7 +215,7 @@ clearstatus(repo) ui.note(_("rebase completed\n")) if os.path.exists(repo.sjoin('undo')): - util.unlink(repo.sjoin('undo')) + util.unlinkpath(repo.sjoin('undo')) if skipped: ui.note(_("%d revisions have been skipped\n") % len(skipped)) finally: @@ -393,7 +393,7 @@ def clearstatus(repo): 'Remove the status files' if os.path.exists(repo.join("rebasestate")): - util.unlink(repo.join("rebasestate")) + util.unlinkpath(repo.join("rebasestate")) def restorestatus(repo): 'Restore a previously stored status'
--- a/hgext/record.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/record.py Wed Feb 16 14:13:22 2011 -0600 @@ -10,7 +10,7 @@ from mercurial.i18n import gettext, _ from mercurial import cmdutil, commands, extensions, hg, mdiff, patch from mercurial import util -import copy, cStringIO, errno, os, re, tempfile +import copy, cStringIO, errno, os, re, shutil, tempfile lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') @@ -42,7 +42,7 @@ line = lr.readline() if not line: break - if line.startswith('diff --git a/'): + if line.startswith('diff --git a/') or line.startswith('diff -r '): def notheader(line): s = line.split(None, 1) return not s or s[0] not in ('---', 'diff') @@ -70,7 +70,8 @@ XXX shoudn't we move this to mercurial/patch.py ? """ - diff_re = re.compile('diff --git a/(.*) b/(.*)$') + diffgit_re = re.compile('diff --git a/(.*) b/(.*)$') + diff_re = re.compile('diff -r .* (.*)$') allhunks_re = re.compile('(?:index|new file|deleted file) ') pretty_re = re.compile('(?:new file|deleted file) ') special_re = re.compile('(?:index|new|deleted|copy|rename) ') @@ -80,9 +81,7 @@ self.hunks = [] def binary(self): - for h in self.header: - if h.startswith('index '): - return True + return util.any(h.startswith('index ') for h in self.header) def pretty(self, fp): for h in self.header: @@ -105,15 +104,17 @@ fp.write(''.join(self.header)) def allhunks(self): - for h in self.header: - if self.allhunks_re.match(h): - return True + return util.any(self.allhunks_re.match(h) for h in self.header) def files(self): - fromfile, tofile = self.diff_re.match(self.header[0]).groups() - if fromfile == tofile: - return [fromfile] - return [fromfile, tofile] + match = self.diffgit_re.match(self.header[0]) + if match: + fromfile, tofile = match.groups() + if fromfile == tofile: + return [fromfile] + return [fromfile, tofile] + else: + return self.diff_re.match(self.header[0]).groups() def filename(self): return self.files()[-1] @@ -122,9 +123,7 @@ return '<header %s>' % (' '.join(map(repr, self.files()))) def special(self): - for h in self.header: - if self.special_re.match(h): - return True + return util.any(self.special_re.match(h) for h in self.header) def countchanges(hunk): """hunk -> (n+,n-)""" @@ -173,7 +172,7 @@ return '<hunk %r@%d>' % (self.filename(), self.fromline) def parsepatch(fp): - """patch -> [] of hunks """ + """patch -> [] of headers -> [] of hunks """ class parser(object): """patch parsing state machine""" def __init__(self): @@ -184,7 +183,7 @@ self.context = [] self.before = [] self.hunk = [] - self.stream = [] + self.headers = [] def addrange(self, limits): fromstart, fromend, tostart, toend, proc = limits @@ -197,7 +196,6 @@ h = hunk(self.header, self.fromline, self.toline, self.proc, self.before, self.hunk, context) self.header.hunks.append(h) - self.stream.append(h) self.fromline += len(self.before) + h.removed self.toline += len(self.before) + h.added self.before = [] @@ -214,12 +212,12 @@ def newfile(self, hdr): self.addcontext([]) h = header(hdr) - self.stream.append(h) + self.headers.append(h) self.header = h def finished(self): self.addcontext([]) - return self.stream + return self.headers transitions = { 'file': {'context': addcontext, @@ -248,27 +246,10 @@ state = newstate return p.finished() -def filterpatch(ui, chunks): +def filterpatch(ui, headers): """Interactively filter patch chunks into applied-only chunks""" - chunks = list(chunks) - chunks.reverse() - seen = set() - def consumefile(): - """fetch next portion from chunks until a 'header' is seen - NB: header == new-file mark - """ - consumed = [] - while chunks: - if isinstance(chunks[-1], header): - break - else: - consumed.append(chunks.pop()) - return consumed - resp_all = [None] # this two are changed from inside prompt, - resp_file = [None] # so can't be usual variables - applied = {} # 'filename' -> [] of chunks - def prompt(query): + def prompt(skipfile, skipall, query): """prompt query, and process base inputs - y/n for the rest of file @@ -276,13 +257,12 @@ - ? (help) - q (quit) - Returns True/False and sets reps_all and resp_file as - appropriate. + Return True/False and possibly updated skipfile and skipall. """ - if resp_all[0] is not None: - return resp_all[0] - if resp_file[0] is not None: - return resp_file[0] + if skipall is not None: + return skipall, skipfile, skipall + if skipfile is not None: + return skipfile, skipfile, skipall while True: resps = _('[Ynsfdaq?]') choices = (_('&Yes, record this change'), @@ -307,47 +287,48 @@ elif r == 1: # no ret = False elif r == 2: # Skip - ret = resp_file[0] = False + ret = skipfile = False elif r == 3: # file (Record remaining) - ret = resp_file[0] = True + ret = skipfile = True elif r == 4: # done, skip remaining - ret = resp_all[0] = False + ret = skipall = False elif r == 5: # all - ret = resp_all[0] = True + ret = skipall = True elif r == 6: # quit raise util.Abort(_('user quit')) - return ret - pos, total = 0, len(chunks) - 1 - while chunks: - pos = total - len(chunks) + 1 - chunk = chunks.pop() - if isinstance(chunk, header): - # new-file mark - resp_file = [None] - fixoffset = 0 - hdr = ''.join(chunk.header) - if hdr in seen: - consumefile() - continue - seen.add(hdr) - if resp_all[0] is None: + return ret, skipfile, skipall + + seen = set() + applied = {} # 'filename' -> [] of chunks + skipfile, skipall = None, None + pos, total = 1, sum(len(h.hunks) for h in headers) + for h in headers: + pos += len(h.hunks) + skipfile = None + fixoffset = 0 + hdr = ''.join(h.header) + if hdr in seen: + continue + seen.add(hdr) + if skipall is None: + h.pretty(ui) + msg = (_('examine changes to %s?') % + _(' and ').join(map(repr, h.files()))) + r, skipfile, skipall = prompt(skipfile, skipall, msg) + if not r: + continue + applied[h.filename()] = [h] + if h.allhunks(): + applied[h.filename()] += h.hunks + continue + for i, chunk in enumerate(h.hunks): + if skipfile is None and skipall is None: chunk.pretty(ui) - r = prompt(_('examine changes to %s?') % - _(' and ').join(map(repr, chunk.files()))) - if r: - applied[chunk.filename()] = [chunk] - if chunk.allhunks(): - applied[chunk.filename()] += consumefile() - else: - consumefile() - else: - # new hunk - if resp_file[0] is None and resp_all[0] is None: - chunk.pretty(ui) - r = total == 1 and prompt(_('record this change to %r?') % - chunk.filename()) \ - or prompt(_('record change %d/%d to %r?') % - (pos, total, chunk.filename())) + msg = (total == 1 + and (_('record this change to %r?') % chunk.filename()) + or (_('record change %d/%d to %r?') % + (pos - len(h.hunks) + i, total, chunk.filename()))) + r, skipfile, skipall = prompt(skipfile, skipall, msg) if r: if fixoffset: chunk = copy.copy(chunk) @@ -403,8 +384,6 @@ def committomq(ui, repo, *pats, **opts): mq.new(ui, repo, patch, *pats, **opts) - opts = opts.copy() - opts['force'] = True # always 'qnew -f' dorecord(ui, repo, committomq, *pats, **opts) @@ -415,21 +394,22 @@ def recordfunc(ui, repo, message, match, opts): """This is generic record driver. - Its job is to interactively filter local changes, and accordingly - prepare working dir into a state, where the job can be delegated to - non-interactive commit command such as 'commit' or 'qrefresh'. + Its job is to interactively filter local changes, and + accordingly prepare working directory into a state in which the + job can be delegated to a non-interactive commit command such as + 'commit' or 'qrefresh'. - After the actual job is done by non-interactive command, working dir - state is restored to original. + After the actual job is done by non-interactive command, the + working directory is restored to its original state. - In the end we'll record interesting changes, and everything else will be - left in place, so the user can continue his work. + In the end we'll record interesting changes, and everything else + will be left in place, so the user can continue working. """ merge = len(repo[None].parents()) > 1 if merge: raise util.Abort(_('cannot partially commit a merge ' - '(use hg commit instead)')) + '(use "hg commit" instead)')) changes = repo.status(match=match)[:3] diffopts = mdiff.diffopts(git=True, nodates=True) @@ -475,6 +455,7 @@ os.close(fd) ui.debug('backup %r as %r\n' % (f, tmpname)) util.copyfile(repo.wjoin(f), tmpname) + shutil.copystat(repo.wjoin(f), tmpname) backups[f] = tmpname fp = cStringIO.StringIO() @@ -502,11 +483,13 @@ raise util.Abort(str(err)) del fp - # 4. We prepared working directory according to filtered patch. - # Now is the time to delegate the job to commit/qrefresh or the like! + # 4. We prepared working directory according to filtered + # patch. Now is the time to delegate the job to + # commit/qrefresh or the like! - # it is important to first chdir to repo root -- we'll call a - # highlevel command with list of pathnames relative to repo root + # it is important to first chdir to repo root -- we'll call + # a highlevel command with list of pathnames relative to + # repo root cwd = os.getcwd() os.chdir(repo.root) try: @@ -521,6 +504,14 @@ for realname, tmpname in backups.iteritems(): ui.debug('restoring %r to %r\n' % (tmpname, realname)) util.copyfile(tmpname, repo.wjoin(realname)) + # Our calls to copystat() here and above are a + # hack to trick any editors that have f open that + # we haven't modified them. + # + # Also note that this racy as an editor could + # notice the file's mtime before we've finished + # writing it. + shutil.copystat(tmpname, repo.wjoin(realname)) os.unlink(tmpname) os.rmdir(backupdir) except OSError: @@ -540,11 +531,7 @@ cmdtable = { "record": - (record, - - # add commit options - commands.table['^commit|ci'][1], - + (record, commands.table['^commit|ci'][1], # same options as commit _('hg record [OPTION]... [FILE]...')), } @@ -557,11 +544,7 @@ qcmdtable = { "qrecord": - (qrecord, - - # add qnew options, except '--force' - [opt for opt in mq.cmdtable['^qnew'][1] if opt[1] != 'force'], - + (qrecord, mq.cmdtable['^qnew'][1], # same options as qnew _('hg qrecord [OPTION]... PATCH [FILE]...')), }
--- a/hgext/transplant.py Sat Feb 12 16:08:41 2011 +0800 +++ b/hgext/transplant.py Wed Feb 16 14:13:22 2011 -0600 @@ -401,7 +401,7 @@ def hasnode(repo, node): try: - return repo.changelog.rev(node) != None + return repo.changelog.rev(node) is not None except error.RevlogError: return False
--- a/i18n/da.po Sat Feb 12 16:08:41 2011 +0800 +++ b/i18n/da.po Wed Feb 16 14:13:22 2011 -0600 @@ -17,8 +17,8 @@ msgstr "" "Project-Id-Version: Mercurial\n" "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n" -"POT-Creation-Date: 2010-12-10 12:44+0100\n" -"PO-Revision-Date: 2010-12-10 12:46+0100\n" +"POT-Creation-Date: 2011-01-04 12:03+0100\n" +"PO-Revision-Date: 2011-01-04 12:15+0100\n" "Last-Translator: <mg@lazybytes.net>\n" "Language-Team: Danish\n" "Language: Danish\n" @@ -1142,10 +1142,10 @@ msgstr "" msgid "" -" If <REVMAP> isn't given, it will be put in a default location\n" -" (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file\n" -" that maps each source commit ID to the destination ID for that\n" -" revision, like so::" +" If ``REVMAP`` isn't given, it will be put in a default location\n" +" (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple\n" +" text file that maps each source commit ID to the destination ID\n" +" for that revision, like so::" msgstr "" msgid " <source ID> <destination ID>" @@ -1251,15 +1251,25 @@ msgstr "" msgid "" -" --config convert.hg.ignoreerrors=False (boolean)\n" -" ignore integrity errors when reading. Use it to fix Mercurial\n" -" repositories with missing revlogs, by converting from and to\n" -" Mercurial.\n" -" --config convert.hg.saverev=False (boolean)\n" -" store original revision ID in changeset (forces target IDs to\n" -" change)\n" -" --config convert.hg.startrev=0 (hg revision identifier)\n" -" convert start revision and its descendants" +" The Mercurial source recognizes the following configuration\n" +" options, which you can set on the command line with ``--config``:" +msgstr "" + +msgid "" +" :convert.hg.ignoreerrors: ignore integrity errors when reading.\n" +" Use it to fix Mercurial repositories with missing revlogs, by\n" +" converting from and to Mercurial. Default is False." +msgstr "" + +msgid "" +" :convert.hg.saverev: store original. revision ID in changeset\n" +" (forces target IDs to change). It takes and boolean argument\n" +" and defaults to False." +msgstr "" + +msgid "" +" :convert.hg.startrev: convert start revision and its descendants.\n" +" It takes a hg revision identifier and defaults to 0." msgstr "" msgid "" @@ -1271,45 +1281,59 @@ " CVS source will use a sandbox (i.e. a checked-out copy) from CVS\n" " to indicate the starting point of what will be converted. Direct\n" " access to the repository files is not needed, unless of course the\n" -" repository is :local:. The conversion uses the top level directory\n" -" in the sandbox to find the CVS repository, and then uses CVS rlog\n" -" commands to find files to convert. This means that unless a\n" -" filemap is given, all files under the starting directory will be\n" +" repository is ``:local:``. The conversion uses the top level\n" +" directory in the sandbox to find the CVS repository, and then uses\n" +" CVS rlog commands to find files to convert. This means that unless\n" +" a filemap is given, all files under the starting directory will be\n" " converted, and that any directory reorganization in the CVS\n" " sandbox is ignored." msgstr "" -msgid " The options shown are the defaults." -msgstr "" - -msgid "" -" --config convert.cvsps.cache=True (boolean)\n" -" Set to False to disable remote log caching, for testing and\n" -" debugging purposes.\n" -" --config convert.cvsps.fuzz=60 (integer)\n" -" Specify the maximum time (in seconds) that is allowed between\n" -" commits with identical user and log message in a single\n" -" changeset. When very large files were checked in as part of a\n" -" changeset then the default may not be long enough.\n" -" --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'\n" -" Specify a regular expression to which commit log messages are\n" -" matched. If a match occurs, then the conversion process will\n" -" insert a dummy revision merging the branch on which this log\n" -" message occurs to the branch indicated in the regex.\n" -" --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'\n" -" Specify a regular expression to which commit log messages are\n" -" matched. If a match occurs, then the conversion process will\n" -" add the most recent revision on the branch indicated in the\n" -" regex as the second parent of the changeset.\n" -" --config hook.cvslog\n" -" Specify a Python function to be called at the end of gathering\n" -" the CVS log. The function is passed a list with the log entries,\n" -" and can modify the entries in-place, or add or delete them.\n" -" --config hook.cvschangesets\n" -" Specify a Python function to be called after the changesets\n" -" are calculated from the the CVS log. The function is passed\n" -" a list with the changeset entries, and can modify the changesets\n" -" in-place, or add or delete them." +msgid " The following options can be used with ``--config``:" +msgstr "" + +msgid "" +" :convert.cvsps.cache: Set to False to disable remote log caching,\n" +" for testing and debugging purposes. Default is True." +msgstr "" + +msgid "" +" :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is\n" +" allowed between commits with identical user and log message in\n" +" a single changeset. When very large files were checked in as\n" +" part of a changeset then the default may not be long enough.\n" +" The default is 60." +msgstr "" + +msgid "" +" :convert.cvsps.mergeto: Specify a regular expression to which\n" +" commit log messages are matched. If a match occurs, then the\n" +" conversion process will insert a dummy revision merging the\n" +" branch on which this log message occurs to the branch\n" +" indicated in the regex. Default is ``{{mergetobranch\n" +" ([-\\w]+)}}``" +msgstr "" + +msgid "" +" :convert.cvsps.mergefrom: Specify a regular expression to which\n" +" commit log messages are matched. If a match occurs, then the\n" +" conversion process will add the most recent revision on the\n" +" branch indicated in the regex as the second parent of the\n" +" changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``" +msgstr "" + +msgid "" +" :hook.cvslog: Specify a Python function to be called at the end of\n" +" gathering the CVS log. The function is passed a list with the\n" +" log entries, and can modify the entries in-place, or add or\n" +" delete them." +msgstr "" + +msgid "" +" :hook.cvschangesets: Specify a Python function to be called after\n" +" the changesets are calculated from the the CVS log. The\n" +" function is passed a list with the changeset entries, and can\n" +" modify the changesets in-place, or add or delete them." msgstr "" msgid "" @@ -1326,24 +1350,33 @@ msgid "" " Subversion source detects classical trunk/branches/tags layouts.\n" -" By default, the supplied \"svn://repo/path/\" source URL is\n" -" converted as a single branch. If \"svn://repo/path/trunk\" exists it\n" -" replaces the default branch. If \"svn://repo/path/branches\" exists,\n" -" its subdirectories are listed as possible branches. If\n" -" \"svn://repo/path/tags\" exists, it is looked for tags referencing\n" -" converted branches. Default \"trunk\", \"branches\" and \"tags\" values\n" -" can be overridden with following options. Set them to paths\n" +" By default, the supplied ``svn://repo/path/`` source URL is\n" +" converted as a single branch. If ``svn://repo/path/trunk`` exists\n" +" it replaces the default branch. If ``svn://repo/path/branches``\n" +" exists, its subdirectories are listed as possible branches. If\n" +" ``svn://repo/path/tags`` exists, it is looked for tags referencing\n" +" converted branches. Default ``trunk``, ``branches`` and ``tags``\n" +" values can be overridden with following options. Set them to paths\n" " relative to the source URL, or leave them blank to disable auto\n" " detection." msgstr "" -msgid "" -" --config convert.svn.branches=branches (directory name)\n" -" specify the directory containing branches\n" -" --config convert.svn.tags=tags (directory name)\n" -" specify the directory containing tags\n" -" --config convert.svn.trunk=trunk (directory name)\n" -" specify the name of the trunk branch" +msgid " The following options can be set with ``--config``:" +msgstr "" + +msgid "" +" :convert.svn.branches: specify the directory containing branches.\n" +" The defaults is ``branches``." +msgstr "" + +msgid "" +" :convert.svn.tags: specify the directory containing tags. The\n" +" default is ``tags``." +msgstr "" + +msgid "" +" :convert.svn.trunk: specify the name of the trunk branch The\n" +" defauls is ``trunk``." msgstr "" msgid "" @@ -1353,8 +1386,8 @@ msgstr "" msgid "" -" --config convert.svn.startrev=0 (svn revision number)\n" -" specify start Subversion revision." +" :convert.svn.startrev: specify start Subversion revision number.\n" +" The default is 0." msgstr "" msgid "" @@ -1368,17 +1401,17 @@ " source to a flat Mercurial repository, ignoring labels, branches\n" " and integrations. Note that when a depot path is given you then\n" " usually should specify a target directory, because otherwise the\n" -" target may be named ...-hg." +" target may be named ``...-hg``." msgstr "" msgid "" " It is possible to limit the amount of source history to be\n" -" converted by specifying an initial Perforce revision." -msgstr "" - -msgid "" -" --config convert.p4.startrev=0 (perforce changelist number)\n" -" specify initial Perforce revision." +" converted by specifying an initial Perforce revision:" +msgstr "" + +msgid "" +" :convert.p4.startrev: specify initial Perforce revision, a\n" +" Perforce changelist number)." msgstr "" msgid "" @@ -1386,17 +1419,24 @@ " '''''''''''''''''''''" msgstr "" -msgid "" -" --config convert.hg.clonebranches=False (boolean)\n" -" dispatch source branches in separate clones.\n" -" --config convert.hg.tagsbranch=default (branch name)\n" -" tag revisions branch name\n" -" --config convert.hg.usebranchnames=True (boolean)\n" -" preserve branch names" -msgstr "" - -msgid " " -msgstr " " +msgid " The following options are supported:" +msgstr "" + +msgid "" +" :convert.hg.clonebranches: dispatch source branches in separate\n" +" clones. The default is False." +msgstr "" + +msgid "" +" :convert.hg.tagsbranch: branch name for tag revisions, defaults to\n" +" ``default``." +msgstr "" + +msgid "" +" :convert.hg.usebranchnames: preserve branch names. The default is\n" +" True\n" +" " +msgstr "" msgid "create changeset information from CVS" msgstr "" @@ -1962,32 +2002,35 @@ "``[repository]``." msgid "" -"The ``[patterns]`` section specifies the line endings used in the\n" -"working directory. The format is specified by a file pattern. The\n" -"first match is used, so put more specific patterns first. The\n" -"available line endings are ``LF``, ``CRLF``, and ``BIN``." -msgstr "" -"Sektionen ``[patterns]`` angiver hvilken type linieskift der skal\n" -"bruges i arbejdskataloget. Typen er angivet ved et filmønster. Den\n" -"første træffer bliver brugt, så skriv mere specifikke mønstre først.\n" -"De mulige linieskifttyper er ``LF``, ``CRLF`` og ``BIN``." +"The ``[patterns]`` section specifies how line endings should be\n" +"converted between the working copy and the repository. The format is\n" +"specified by a file pattern. The first match is used, so put more\n" +"specific patterns first. The available line endings are ``LF``,\n" +"``CRLF``, and ``BIN``." +msgstr "" +"Sektionen ``[patterns]`` angiver hvordan linieskift skal konverteres\n" +"mellem arbejdskataloget og depotet. Formatet angives med et\n" +"filmønster. Den første træffer bliver brugt, så skriv mere specifikke\n" +"mønstre først. De mulige linieskifttyper er ``LF``, ``CRLF`` og\n" +"``BIN``." msgid "" "Files with the declared format of ``CRLF`` or ``LF`` are always\n" -"checked out in that format and files declared to be binary (``BIN``)\n" -"are left unchanged. Additionally, ``native`` is an alias for the\n" -"platform's default line ending: ``LF`` on Unix (including Mac OS X)\n" -"and ``CRLF`` on Windows. Note that ``BIN`` (do nothing to line\n" -"endings) is Mercurial's default behaviour; it is only needed if you\n" -"need to override a later, more general pattern." -msgstr "" -"Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud i\n" -"dette format og filer deklareret som binære (``BIN``) bliver ikke\n" -"ændret. Desuden er ``native`` et alias for platforms normale\n" -"linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på Windows.\n" -"Bemærk at ``BIN`` (gør ingenting ved linieskift) er Mercurials\n" -"standardopførsel; det er kun nødvendigt at bruge den hvis du skal\n" -"overskrive et senere og mere generelt mønster." +"checked out and stored in the repository in that format and files\n" +"declared to be binary (``BIN``) are left unchanged. Additionally,\n" +"``native`` is an alias for checking out in the platform's default line\n" +"ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on\n" +"Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's\n" +"default behaviour; it is only needed if you need to override a later,\n" +"more general pattern." +msgstr "" +"Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud og\n" +"gemt i depotet i dette format og filer deklareret som binære (``BIN``)\n" +"bliver ikke ændret. Desuden er ``native`` et alias for platformens\n" +"normale linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på\n" +"Windows. Bemærk at ``BIN`` (gør ingenting ved linieskift) er\n" +"Mercurials standardopførsel; det er kun nødvendigt at bruge den hvis\n" +"du skal overskrive et senere og mere generelt mønster." msgid "" "The optional ``[repository]`` section specifies the line endings to\n" @@ -2036,6 +2079,12 @@ " native = LF" msgid "" +".. note::\n" +" The rules will first apply when files are touched in the working\n" +" copy, e.g. by updating to null and back to tip to touch all files." +msgstr "" + +msgid "" "The extension uses an optional ``[eol]`` section in your hgrc file\n" "(not the ``.hgeol`` file) for settings that control the overall\n" "behavior. There are two settings:" @@ -2070,6 +2119,13 @@ " antagelse af at de har miksede linieskift med vilje." msgid "" +"The extension provides ``cleverencode:`` and ``cleverdecode:`` filters\n" +"like the deprecated win32text extension does. This means that you can\n" +"disable win32text and enable eol and your filters will still work. You\n" +"only need to these filters until you have prepared a ``.hgeol`` file." +msgstr "" + +msgid "" "The ``win32text.forbid*`` hooks provided by the win32text extension\n" "have been unified into a single hook named ``eol.hook``. The hook will\n" "lookup the expected line endings from the ``.hgeol`` file, which means\n" @@ -3392,6 +3448,10 @@ msgid "working directory revision is not qtip" msgstr "arbejdskatalogets revision er ikke qtip" +#, python-format +msgid "uncommitted changes in subrepository %s" +msgstr "udeponerede ændringer i underdepot %s" + msgid "local changes found, refresh first" msgstr "lokale ændringer fundet, genopfrisk først" @@ -3460,6 +3520,16 @@ msgid "patch series already fully applied\n" msgstr "serien af rettelser er allerede anvendt fuldt ud\n" +msgid "cannot use --exact and --move together" +msgstr "kan ikke bruge --exact og --move sammen" + +msgid "cannot push --exact with applied patches" +msgstr "kan push --exact med anvendte rettelser" + +#, python-format +msgid "%s does not have a parent recorded" +msgstr "%s har ikke gemt nogen forælder" + msgid "please specify the patch to move" msgstr "angiv venligst lappen der skal flyttes" @@ -3508,18 +3578,6 @@ msgid "cannot refresh a revision with children" msgstr "kan ikke genopfriske en revision som har børn" -#, python-format -msgid "warning: not refreshing %s\n" -msgstr "advarsel: genopfrisker ikke %s\n" - -#, python-format -msgid "warning: not adding %s\n" -msgstr "advarsel: tilføjer ikke %s\n" - -#, python-format -msgid "warning: not removing %s\n" -msgstr "advarsel: fjerner ikke %s\n" - msgid "" "refresh interrupted while patch was popped! (revert --all, qpush to " "recover)\n" @@ -4486,6 +4544,9 @@ msgid "apply on top of local changes" msgstr "anvend ovenpå lokale ændringer" +msgid "apply the target patch to its recorded parent" +msgstr "" + msgid "list patch name in commit text" msgstr "" @@ -4987,6 +5048,11 @@ msgstr "" msgid "" +" In case email sending fails, you will find a backup of your series\n" +" introductory message in ``.hg/last-email.txt``." +msgstr "" + +msgid "" " hg email -r 3000 # send patch 3000 only\n" " hg email -r 3000 -r 3001 # send patches 3000 and 3001\n" " hg email -r 3000:3005 # send patches 3000 through 3005\n" @@ -5177,7 +5243,7 @@ " [progress]\n" " delay = 3 # number of seconds (float) before showing the progress bar\n" " refresh = 0.1 # time in seconds between refreshes of the progress bar\n" -" format = topic bar number # format of the progress bar\n" +" format = topic bar number estimate # format of the progress bar\n" " width = <none> # if set, the maximum width of the progress information\n" " # (that is, min(width, term width) will be used)\n" " clear-complete = True # clear the progress bar after it's done\n" @@ -5187,10 +5253,26 @@ msgstr "" msgid "" -"Valid entries for the format field are topic, bar, number, unit, and\n" -"item. item defaults to the last 20 characters of the item, but this\n" -"can be changed by adding either ``-<num>`` which would take the last\n" -"num characters, or ``+<num>`` for the first num characters.\n" +"Valid entries for the format field are topic, bar, number, unit,\n" +"estimate, and item. item defaults to the last 20 characters of the\n" +"item, but this can be changed by adding either ``-<num>`` which would\n" +"take the last num characters, or ``+<num>`` for the first num\n" +"characters.\n" +msgstr "" + +#. i18n: format XX seconds as "XXs" +#, python-format +msgid "%02ds" +msgstr "" + +#. i18n: format X minutes and YY seconds as "XmYYs" +#, python-format +msgid "%dm%02ds" +msgstr "" + +#. i18n: format X hours and YY minutes as "XhYYm" +#, python-format +msgid "%dh%02dm" msgstr "" msgid "command to delete untracked files from the working directory" @@ -5382,7 +5464,8 @@ msgstr "ændringer" msgid "unresolved conflicts (see hg resolve, then hg rebase --continue)" -msgstr "uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" +msgstr "" +"uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" #, python-format msgid "no changes, revision %d skipped\n" @@ -5578,8 +5661,8 @@ msgid "running non-interactively, use commit instead" msgstr "kører ikke interaktivt, brug commit i stedet" -msgid "cannot partially commit a merge (use hg commit instead)" -msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet hg commit)" +msgid "cannot partially commit a merge (use \"hg commit\" instead)" +msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet \"hg commit\")" msgid "no changes to record\n" msgstr "ingen ændringer at optage\n" @@ -6176,6 +6259,9 @@ msgid "unknown archive type '%s'" msgstr "ukendt depottype '%s'" +msgid "archiving" +msgstr "arkiverer" + msgid "invalid changegroup" msgstr "ugyldig changegroup" @@ -8502,6 +8588,9 @@ msgid "you can't specify a revision and a date" msgstr "du kan ikke specificeret en revision og en dato" +msgid "uncommitted merge - use \"hg update\", see \"hg help revert\"" +msgstr "" + msgid "no files or directories specified; use --all to revert the whole repo" msgstr "" "ingen filer eller mapper specificeret; brug --all for at føre hele repo'et " @@ -8813,7 +8902,8 @@ msgid "" " Tags are used to name particular revisions of the repository and are\n" " very useful to compare different revisions, to go back to significant\n" -" earlier versions or to mark branch points as releases, etc." +" earlier versions or to mark branch points as releases, etc. Changing\n" +" an existing tag is normally disallowed; use -f/--force to override." msgstr "" msgid "" @@ -8823,10 +8913,18 @@ msgid "" " To facilitate version control, distribution, and merging of tags,\n" -" they are stored as a file named \".hgtags\" which is managed\n" -" similarly to other project files and can be hand-edited if\n" -" necessary. The file '.hg/localtags' is used for local tags (not\n" -" shared among repositories)." +" they are stored as a file named \".hgtags\" which is managed similarly\n" +" to other project files and can be hand-edited if necessary. This\n" +" also means that tagging creates a new commit. The file\n" +" \".hg/localtags\" is used for local tags (not shared among\n" +" repositories)." +msgstr "" + +msgid "" +" Tag commits are usually made at the head of a branch. If the parent\n" +" of the working directory is not a branch head, :hg:`tag` aborts; use\n" +" -f/--force to force the tag commit to be based on a non-head\n" +" changeset." msgstr "" msgid "" @@ -8859,6 +8957,12 @@ msgid "tag '%s' already exists (use -f to force)" msgstr "mærkaten '%s' eksisterer allerede (brug -f for at gennemtvinge)" +msgid "uncommitted merge" +msgstr "udeponeret sammenføjning" + +msgid "not at a branch head (use -f to force)" +msgstr "ej ved et grenhoved (brug -f for at gennemtvinge)" + msgid "list repository tags" msgstr "vis depotmærkater" @@ -9752,8 +9856,8 @@ msgid "list the changed files of a revision" msgstr "vis de ændrede filer i en revision" -msgid "replace existing tag" -msgstr "erstat eksisterende mærkat" +msgid "force tag" +msgstr "gennemtving markering" msgid "make the tag local" msgstr "gør mærkaten lokal" @@ -10133,6 +10237,10 @@ msgstr "*** import af udvidelse %s fejlede: %s\n" #, python-format +msgid "warning: error finding commands in %s\n" +msgstr "advarsel: fejl ved søgning efter kommandoer i %s\n" + +#, python-format msgid "couldn't find merge tool %s\n" msgstr "kunne ikke finde sammenføjningsværktøj %s\n" @@ -11514,6 +11622,13 @@ "Regexp pattern matching is anchored at the root of the repository." msgstr "" +msgid "" +"To read name patterns from a file, use ``listfile:`` or ``listfile0:``.\n" +"The latter expects null delimited patterns while the former expects line\n" +"feeds. Each string read from the file is itself treated as a file\n" +"pattern." +msgstr "" + msgid "Plain examples::" msgstr "" @@ -11539,8 +11654,15 @@ msgid "Regexp examples::" msgstr "" -msgid "" -" re:.*\\.c$ any name ending in \".c\", anywhere in the repository\n" +msgid " re:.*\\.c$ any name ending in \".c\", anywhere in the repository" +msgstr "" + +msgid "File examples::" +msgstr "Fillisteeksempler::" + +msgid "" +" listfile:list.txt read list from list.txt with one file pattern per line\n" +" listfile0:list.txt read list from list.txt with null byte delimiters\n" msgstr "" msgid "Mercurial supports several ways to specify individual revisions." @@ -11959,8 +12081,9 @@ msgstr "" msgid "" -":branches: String. The name of the branch on which the changeset was\n" -" committed. Will be empty if the branch name was default." +":branches: List of strings. The name of the branch on which the\n" +" changeset was committed. Will be empty if the branch name was\n" +" default." msgstr "" msgid ":children: List of strings. The children of the changeset." @@ -12344,6 +12467,10 @@ msgstr "(grensammenføjning, glem ikke at deponere)\n" #, python-format +msgid "config file %s not found!" +msgstr "konfigurationsfilen %s blev ikke fundet!" + +#, python-format msgid "error reading %s/.hg/hgrc: %s\n" msgstr "fejl ved læsning af %s/.hg/hgrc: %s\n" @@ -12490,6 +12617,10 @@ msgstr ".hg/sharedpath peger på et ikke-eksisterende katalog %s" #, python-format +msgid "warning: ignoring unknown working parent %s!\n" +msgstr "advarsel: ignorerer ukendt forælder %s til arbejdsbiblioteket!\n" + +#, python-format msgid "%r cannot be used in a tag name" msgstr "%r kan ikke bruges i et mærkatnavnet" @@ -12596,34 +12727,28 @@ msgid "%d changesets found\n" msgstr "fandt %d ændringer\n" -msgid "bundling changes" -msgstr "bundter ændringer" - -msgid "chunks" -msgstr "" - -msgid "bundling manifests" -msgstr "bundter manifester" +msgid "bundling" +msgstr "bundter" + +msgid "manifests" +msgstr "manifester" #, python-format msgid "empty or missing revlog for %s" msgstr "tom eller manglende revlog for %s" -msgid "bundling files" -msgstr "bundter filer" - msgid "adding changesets\n" msgstr "tilføjer ændringer\n" +msgid "chunks" +msgstr "" + msgid "received changelog group is empty" msgstr "modtagen changelog-gruppe er tom" msgid "adding manifests\n" msgstr "tilføjer manifester\n" -msgid "manifests" -msgstr "manifester" - msgid "adding file changes\n" msgstr "tilføjer filændringer\n" @@ -12665,6 +12790,12 @@ msgid "transferred %s in %.1f seconds (%s/sec)\n" msgstr "overførte %s i %.1f sekunder (%s/sek)\n" +msgid "can't use TLS: Python SSL support not installed" +msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" + +msgid "(using smtps)\n" +msgstr "(bruger smtps)\n" + msgid "smtp.host not configured - cannot send mail" msgstr "" @@ -12672,11 +12803,8 @@ msgid "sending mail: smtp host %s, port %s\n" msgstr "sender mail: smtp host %s, port %s\n" -msgid "can't use TLS: Python SSL support not installed" -msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" - -msgid "(using tls)\n" -msgstr "(bruger tsl)\n" +msgid "(using starttls)\n" +msgstr "(bruger starttls)\n" #, python-format msgid "(authenticating to mail server as %s)\n" @@ -12717,6 +12845,10 @@ msgstr "ugyldig mønster" #, python-format +msgid "unable to read file list (%s)" +msgstr "kan ikke læse filliste (%s)" + +#, python-format msgid "diff context lines count must be an integer, not %r" msgstr "" @@ -13008,10 +13140,10 @@ #, python-format msgid "can't use %s here" -msgstr "" +msgstr "kan ikke bruge %s her" msgid "can't use a list in this context" -msgstr "" +msgstr "en liste kan ikke bruges i denne konteks" #, python-format msgid "not a function: %s" @@ -13028,7 +13160,7 @@ #. i18n: "id" is a keyword msgid "id requires a string" -msgstr "" +msgstr "id kræver en streng" msgid "" "``rev(number)``\n" @@ -13037,29 +13169,30 @@ #. i18n: "rev" is a keyword msgid "rev requires one argument" -msgstr "" +msgstr "rev kræver et argument" #. i18n: "rev" is a keyword msgid "rev requires a number" -msgstr "" +msgstr "rev kræver et tal" #. i18n: "rev" is a keyword msgid "rev expects a number" msgstr "rev forventer et revisionsnummer" msgid "" -"``p1(set)``\n" -" First parent of changesets in set." -msgstr "" - -msgid "" -"``p2(set)``\n" -" Second parent of changesets in set." -msgstr "" - -msgid "" -"``parents(set)``\n" -" The set of all parents for all changesets in set." +"``p1([set])``\n" +" First parent of changesets in set, or the working directory." +msgstr "" + +msgid "" +"``p2([set])``\n" +" Second parent of changesets in set, or the working directory." +msgstr "" + +msgid "" +"``parents([set])``\n" +" The set of all parents for all changesets in set, or the working " +"directory." msgstr "" msgid "" @@ -13322,20 +13455,23 @@ "``tag(name)``\n" " The specified tag by name, or all tagged revisions if no name is given." msgstr "" +"``tag(navn)``\n" +" Den navngivne mærkat eller alle revisioner med en mærkat hvis der\n" +" ikke angives noget navn." #. i18n: "tag" is a keyword msgid "tag takes one or no arguments" -msgstr "" +msgstr "tag tager et eller to argumenter" #. i18n: "tag" is a keyword msgid "the argument to tag must be a string" -msgstr "" +msgstr "argumentet til tag skal være en streng" msgid "can't negate that" msgstr "" msgid "not a symbol" -msgstr "" +msgstr "ikke et symbol" msgid "empty query" msgstr "tomt forespørgsel" @@ -13435,6 +13571,10 @@ msgstr "ukendt underdepottype %s" #, python-format +msgid "archiving (%s)" +msgstr "arkiverer (%s)" + +#, python-format msgid "warning: error \"%s\" in subrepository \"%s\"\n" msgstr "advarsel: fejl \"%s\" i underdepot \"%s\"\n" @@ -13458,6 +13598,39 @@ msgstr "fjerner ikke depotet %s fordi det er ændret.\n" #, python-format +msgid "cloning subrepo %s\n" +msgstr "kloner underdepot %s\n" + +#, python-format +msgid "pulling subrepo %s\n" +msgstr "hiver underdepot %s\n" + +#, python-format +msgid "revision %s does not exist in subrepo %s\n" +msgstr "revision %s findes ikke i underdepot %s\n" + +#, python-format +msgid "checking out detached HEAD in subrepo %s\n" +msgstr "" + +msgid "check out a git branch if you intend to make changes\n" +msgstr "" + +#, python-format +msgid "unrelated git branch checked out in subrepo %s\n" +msgstr "" + +#, python-format +msgid "pushing branch %s of subrepo %s\n" +msgstr "skubber gren %s af underdepot %s\n" + +#, python-format +msgid "" +"no branch checked out in subrepo %s\n" +"cannot push revision %s" +msgstr "" + +#, python-format msgid "%s, line %s: %s\n" msgstr "%s, linie %s: %s\n" @@ -13471,22 +13644,36 @@ msgid ".hg/tags.cache is corrupt, rebuilding it\n" msgstr "" +#, python-format +msgid "unknown method '%s'" +msgstr "ukendt metode '%s'" + +msgid "expected a symbol" +msgstr "forventede et symbol" + +#, python-format +msgid "unknown function '%s'" +msgstr "ukendt funktion '%s'" + +msgid "expected template specifier" +msgstr "" + +#, python-format +msgid "filter %s expects one argument" +msgstr "filter %s kræver et argument" + msgid "unmatched quotes" msgstr "" #, python-format -msgid "error expanding '%s%%%s'" -msgstr "fejl ved ekspansion af '%s%%%s'" - -#, python-format -msgid "unknown filter '%s'" -msgstr "ukendt filter '%s'" - -#, python-format msgid "style not found: %s" msgstr "" #, python-format +msgid "\"%s\" not in template map" +msgstr "\"%s\" er ikke i skabelon-fil" + +#, python-format msgid "template file %s: %s" msgstr "skabelon-fil %s: %s" @@ -13577,6 +13764,9 @@ msgid "ignoring invalid [auth] key '%s'\n" msgstr "ignorerer ugyldig [auth] nøgle '%s'\n" +msgid "kb" +msgstr "" + msgid "certificate checking requires Python 2.6" msgstr "" @@ -13588,10 +13778,15 @@ msgstr "certifikatet er for %s" msgid "no commonName found in certificate" -msgstr "" +msgstr "fandt ikke noget commonName i certifikatet" #, python-format msgid "%s certificate error: %s" +msgstr "%s certifikatfejl: %s" + +#, python-format +msgid "" +"warning: %s certificate not verified (check web.cacerts config setting)\n" msgstr "" #, python-format @@ -13599,6 +13794,10 @@ msgstr "kommandoen '%s' fejlede: %s" #, python-format +msgid "path ends in directory separator: %s" +msgstr "" + +#, python-format msgid "path contains illegal component: %s" msgstr "stien indeholder ugyldig komponent: %s" @@ -13686,7 +13885,7 @@ #, python-format msgid "no port number associated with service '%s'" -msgstr "" +msgstr "der er ikke knyttet noget portnummer til servicen '%s'" msgid "cannot verify bundle or remote repos" msgstr "kan ikke verificere bundt eller fjerndepoter" @@ -13743,7 +13942,7 @@ msgstr "duplikeret revision %d (%d)" msgid "abandoned transaction found - run hg recover\n" -msgstr "" +msgstr "fandt efterladt transaktion - kør hg recover\n" #, python-format msgid "repository uses revlog format %d\n" @@ -13777,7 +13976,7 @@ msgstr "krydstjekker filer i ændringer og manifester\n" msgid "crosschecking" -msgstr "" +msgstr "krydstjekker" #, python-format msgid "changeset refers to unknown manifest %s" @@ -13805,7 +14004,7 @@ #, python-format msgid "%s not in manifests" -msgstr "" +msgstr "%s findes ikke i manifestet" #, python-format msgid "unpacked size is %s, %s expected"
--- a/i18n/polib.py Sat Feb 12 16:08:41 2011 +0800 +++ b/i18n/polib.py Wed Feb 16 14:13:22 2011 -0600 @@ -105,7 +105,7 @@ ... finally: ... os.unlink(tmpf) """ - if kwargs.get('autodetect_encoding', True) == True: + if kwargs.get('autodetect_encoding', True): enc = detect_encoding(fpath) else: enc = kwargs.get('encoding', default_encoding) @@ -159,7 +159,7 @@ ... finally: ... os.unlink(tmpf) """ - if kwargs.get('autodetect_encoding', True) == True: + if kwargs.get('autodetect_encoding', True): enc = detect_encoding(fpath, True) else: enc = kwargs.get('encoding', default_encoding)
--- a/mercurial/archival.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/archival.py Wed Feb 16 14:13:22 2011 -0600 @@ -8,7 +8,7 @@ from i18n import _ from node import hex import cmdutil -import util +import util, encoding import cStringIO, os, stat, tarfile, time, zipfile import zlib, gzip @@ -84,6 +84,7 @@ def __init__(self, dest, mtime, kind=''): self.mtime = mtime + self.fileobj = None def taropen(name, mode, fileobj=None): if kind == 'gz': @@ -93,8 +94,10 @@ gzfileobj = self.GzipFileWithTime(name, mode + 'b', zlib.Z_BEST_COMPRESSION, fileobj, timestamp=mtime) + self.fileobj = gzfileobj return tarfile.TarFile.taropen(name, mode, gzfileobj) else: + self.fileobj = fileobj return tarfile.open(name, mode + kind, fileobj) if isinstance(dest, str): @@ -120,6 +123,8 @@ def done(self): self.z.close() + if self.fileobj: + self.fileobj.close() class tellable(object): '''provide tell method for zipfile.ZipFile when writing to http @@ -245,7 +250,7 @@ if repo.ui.configbool("ui", "archivemeta", True): def metadata(): base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( - repo[0].hex(), hex(node), ctx.branch()) + repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch())) tags = ''.join('tag: %s\n' % t for t in ctx.tags() if repo.tagtype(t) == 'global') @@ -262,13 +267,18 @@ write('.hg_archival.txt', 0644, False, metadata) - for f in ctx: + total = len(ctx.manifest()) + repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total) + for i, f in enumerate(ctx): ff = ctx.flags(f) write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data) + repo.ui.progress(_('archiving'), i + 1, item=f, + unit=_('files'), total=total) + repo.ui.progress(_('archiving'), None) if subrepos: for subpath in ctx.substate: sub = ctx.sub(subpath) - sub.archive(archiver, prefix) + sub.archive(repo.ui, archiver, prefix) archiver.done()
--- a/mercurial/bdiff.c Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/bdiff.c Wed Feb 16 14:13:22 2011 -0600 @@ -57,12 +57,10 @@ int pos, len; }; +struct hunk; struct hunk { int a1, a2, b1, b2; -}; - -struct hunklist { - struct hunk *base, *head; + struct hunk *next; }; int splitlines(const char *a, int len, struct line **lr) @@ -223,8 +221,8 @@ return mk + mb; } -static void recurse(struct line *a, struct line *b, struct pos *pos, - int a1, int a2, int b1, int b2, struct hunklist *l) +static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos, + int a1, int a2, int b1, int b2, struct hunk *l) { int i, j, k; @@ -232,51 +230,66 @@ /* find the longest match in this chunk */ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); if (!k) - return; + return l; /* and recurse on the remaining chunks on either side */ - recurse(a, b, pos, a1, i, b1, j, l); - l->head->a1 = i; - l->head->a2 = i + k; - l->head->b1 = j; - l->head->b2 = j + k; - l->head++; - /* tail-recursion didn't happen, so doing equivalent iteration */ + l = recurse(a, b, pos, a1, i, b1, j, l); + if (!l) + return NULL; + + l->next = (struct hunk *)malloc(sizeof(struct hunk)); + if (!l->next) + return NULL; + + l = l->next; + l->a1 = i; + l->a2 = i + k; + l->b1 = j; + l->b2 = j + k; + l->next = NULL; + + /* tail-recursion didn't happen, so do equivalent iteration */ a1 = i + k; b1 = j + k; } } -static struct hunklist diff(struct line *a, int an, struct line *b, int bn) +static int diff(struct line *a, int an, struct line *b, int bn, + struct hunk *base) { - struct hunklist l; struct hunk *curr; struct pos *pos; - int t; + int t, count = 0; /* allocate and fill arrays */ t = equatelines(a, an, b, bn); pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos)); - /* we can't have more matches than lines in the shorter file */ - l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) * - ((an<bn ? an:bn) + 1)); + + if (pos && t) { + /* generate the matching block list */ + + curr = recurse(a, b, pos, 0, an, 0, bn, base); + if (!curr) + return -1; - if (pos && l.base && t) { - /* generate the matching block list */ - recurse(a, b, pos, 0, an, 0, bn, &l); - l.head->a1 = l.head->a2 = an; - l.head->b1 = l.head->b2 = bn; - l.head++; + /* sentinel end hunk */ + curr->next = (struct hunk *)malloc(sizeof(struct hunk)); + if (!curr->next) + return -1; + curr = curr->next; + curr->a1 = curr->a2 = an; + curr->b1 = curr->b2 = bn; + curr->next = NULL; } free(pos); /* normalize the hunk list, try to push each hunk towards the end */ - for (curr = l.base; curr != l.head; curr++) { - struct hunk *next = curr + 1; + for (curr = base->next; curr; curr = curr->next) { + struct hunk *next = curr->next; int shift = 0; - if (next == l.head) + if (!next) break; if (curr->a2 == next->a1) @@ -297,16 +310,26 @@ next->a1 += shift; } - return l; + for (curr = base->next; curr; curr = curr->next) + count++; + return count; +} + +static void freehunks(struct hunk *l) +{ + struct hunk *n; + for (; l; l = n) { + n = l->next; + free(l); + } } static PyObject *blocks(PyObject *self, PyObject *args) { PyObject *sa, *sb, *rl = NULL, *m; struct line *a, *b; - struct hunklist l = {NULL, NULL}; - struct hunk *h; - int an, bn, pos = 0; + struct hunk l, *h; + int an, bn, count, pos = 0; if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) return NULL; @@ -317,12 +340,16 @@ if (!a || !b) goto nomem; - l = diff(a, an, b, bn); - rl = PyList_New(l.head - l.base); - if (!l.head || !rl) + l.next = NULL; + count = diff(a, an, b, bn, &l); + if (count < 0) goto nomem; - for (h = l.base; h != l.head; h++) { + rl = PyList_New(count); + if (!rl) + goto nomem; + + for (h = l.next; h; h = h->next) { m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); PyList_SetItem(rl, pos, m); pos++; @@ -331,7 +358,7 @@ nomem: free(a); free(b); - free(l.base); + freehunks(l.next); return rl ? rl : PyErr_NoMemory(); } @@ -340,10 +367,9 @@ char *sa, *sb; PyObject *result = NULL; struct line *al, *bl; - struct hunklist l = {NULL, NULL}; - struct hunk *h; + struct hunk l, *h; char encode[12], *rb; - int an, bn, len = 0, la, lb; + int an, bn, len = 0, la, lb, count; if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb)) return NULL; @@ -353,13 +379,14 @@ if (!al || !bl) goto nomem; - l = diff(al, an, bl, bn); - if (!l.head) + l.next = NULL; + count = diff(al, an, bl, bn, &l); + if (count < 0) goto nomem; /* calculate length of output */ la = lb = 0; - for (h = l.base; h != l.head; h++) { + for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) len += 12 + bl[h->b1].l - bl[lb].l; la = h->a2; @@ -375,7 +402,7 @@ rb = PyBytes_AsString(result); la = lb = 0; - for (h = l.base; h != l.head; h++) { + for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) { len = bl[h->b1].l - bl[lb].l; *(uint32_t *)(encode) = htonl(al[la].l - al->l); @@ -392,7 +419,7 @@ nomem: free(al); free(bl); - free(l.base); + freehunks(l.next); return result ? result : PyErr_NoMemory(); }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/bookmarks.py Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,176 @@ +# Mercurial bookmark support code +# +# Copyright 2008 David Soria Parra <dsp@php.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from mercurial.i18n import _ +from mercurial.node import nullid, nullrev, bin, hex, short +from mercurial import encoding, util +import os + +def valid(mark): + for c in (':', '\0', '\n', '\r'): + if c in mark: + return False + return True + +def read(repo): + '''Parse .hg/bookmarks file and return a dictionary + + Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values + in the .hg/bookmarks file. + Read the file and return a (name=>nodeid) dictionary + ''' + try: + bookmarks = {} + for line in repo.opener('bookmarks'): + sha, refspec = line.strip().split(' ', 1) + refspec = encoding.tolocal(refspec) + bookmarks[refspec] = repo.changelog.lookup(sha) + except: + pass + return bookmarks + +def readcurrent(repo): + '''Get the current bookmark + + If we use gittishsh branches we have a current bookmark that + we are on. This function returns the name of the bookmark. It + is stored in .hg/bookmarks.current + ''' + mark = None + if os.path.exists(repo.join('bookmarks.current')): + file = repo.opener('bookmarks.current') + # No readline() in posixfile_nt, reading everything is cheap + mark = encoding.tolocal((file.readlines() or [''])[0]) + if mark == '': + mark = None + file.close() + return mark + +def write(repo): + '''Write bookmarks + + Write the given bookmark => hash dictionary to the .hg/bookmarks file + in a format equal to those of localtags. + + We also store a backup of the previous state in undo.bookmarks that + can be copied back on rollback. + ''' + refs = repo._bookmarks + + try: + bms = repo.opener('bookmarks').read() + except IOError: + bms = '' + repo.opener('undo.bookmarks', 'w').write(bms) + + if repo._bookmarkcurrent not in refs: + setcurrent(repo, None) + for mark in refs.keys(): + if not valid(mark): + raise util.Abort(_("bookmark '%s' contains illegal " + "character" % mark)) + + wlock = repo.wlock() + try: + + file = repo.opener('bookmarks', 'w', atomictemp=True) + for refspec, node in refs.iteritems(): + file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec))) + file.rename() + + # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) + try: + os.utime(repo.sjoin('00changelog.i'), None) + except OSError: + pass + + finally: + wlock.release() + +def setcurrent(repo, mark): + '''Set the name of the bookmark that we are currently on + + Set the name of the bookmark that we are on (hg update <bookmark>). + The name is recorded in .hg/bookmarks.current + ''' + current = repo._bookmarkcurrent + if current == mark: + return + + refs = repo._bookmarks + + # do not update if we do update to a rev equal to the current bookmark + if (mark and mark not in refs and + current and refs[current] == repo.changectx('.').node()): + return + if mark not in refs: + mark = '' + if not valid(mark): + raise util.Abort(_("bookmark '%s' contains illegal " + "character" % mark)) + + wlock = repo.wlock() + try: + file = repo.opener('bookmarks.current', 'w', atomictemp=True) + file.write(mark) + file.rename() + finally: + wlock.release() + repo._bookmarkcurrent = mark + +def update(repo, parents, node): + marks = repo._bookmarks + update = False + mark = repo._bookmarkcurrent + if mark and marks[mark] in parents: + marks[mark] = node + update = True + if update: + write(repo) + +def listbookmarks(repo): + # We may try to list bookmarks on a repo type that does not + # support it (e.g., statichttprepository). + if not hasattr(repo, '_bookmarks'): + return {} + + d = {} + for k, v in repo._bookmarks.iteritems(): + d[k] = hex(v) + return d + +def pushbookmark(repo, key, old, new): + w = repo.wlock() + try: + marks = repo._bookmarks + if hex(marks.get(key, '')) != old: + return False + if new == '': + del marks[key] + else: + if new not in repo: + return False + marks[key] = repo[new].node() + write(repo) + return True + finally: + w.release() + +def diff(ui, repo, remote): + ui.status(_("searching for changed bookmarks\n")) + + lmarks = repo.listkeys('bookmarks') + rmarks = remote.listkeys('bookmarks') + + diff = sorted(set(rmarks) - set(lmarks)) + for k in diff: + ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) + + if len(diff) <= 0: + ui.status(_("no changed bookmarks found\n")) + return 1 + return 0
--- a/mercurial/bundlerepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/bundlerepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -174,7 +174,7 @@ self._url = 'bundle:' + bundlename self.tempfile = None - f = open(bundlename, "rb") + f = util.posixfile(bundlename, "rb") self.bundle = changegroup.readbundle(f, bundlename) if self.bundle.compressed(): fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", @@ -192,7 +192,7 @@ finally: fptemp.close() - f = open(self.tempfile, "rb") + f = util.posixfile(self.tempfile, "rb") self.bundle = changegroup.readbundle(f, bundlename) # dict with the mapping 'filename' -> position in the bundle @@ -251,11 +251,6 @@ self.bundle.close() if self.tempfile is not None: os.unlink(self.tempfile) - - def __del__(self): - del self.bundle - if self.tempfile is not None: - os.unlink(self.tempfile) if self._tempparent: shutil.rmtree(self._tempparent, True)
--- a/mercurial/cmdutil.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/cmdutil.py Wed Feb 16 14:13:22 2011 -0600 @@ -147,6 +147,11 @@ # attempt to parse old-style ranges first to deal with # things like old-tag which contain query metacharacters try: + if isinstance(spec, int): + seen.add(spec) + l.append(spec) + continue + if revrangesep in spec: start, end = spec.split(revrangesep, 1) start = revfix(repo, start, 0) @@ -228,7 +233,8 @@ writable = 'w' in mode or 'a' in mode if not pat or pat == '-': - return writable and sys.stdout or sys.stdin + fp = writable and sys.stdout or sys.stdin + return os.fdopen(os.dup(fp.fileno()), mode) if hasattr(pat, 'write') and writable: return pat if hasattr(pat, 'read') and 'r' in mode: @@ -673,7 +679,9 @@ parents.reverse() prev = (parents and parents[0]) or nullid + shouldclose = False if not fp: + shouldclose = True fp = make_file(repo, template, node, total=total, seqno=seqno, revwidth=revwidth, mode='ab') if fp != sys.stdout and hasattr(fp, 'name'): @@ -694,6 +702,9 @@ for chunk in patch.diff(repo, prev, node, opts=opts): fp.write(chunk) + if shouldclose: + fp.close() + for seqno, rev in enumerate(revs): single(rev, seqno + 1, fp) @@ -796,9 +807,11 @@ branch = ctx.branch() # don't show the default branch name if branch != 'default': - branch = encoding.tolocal(branch) self.ui.write(_("branch: %s\n") % branch, label='log.branch') + for bookmark in self.repo.nodebookmarks(changenode): + self.ui.write(_("bookmark: %s\n") % bookmark, + label='log.bookmark') for tag in self.repo.nodetags(changenode): self.ui.write(_("tag: %s\n") % tag, label='log.tag') @@ -1352,8 +1365,7 @@ if ctx.p2(): edittext.append(_("HG: branch merge")) if ctx.branch(): - edittext.append(_("HG: branch '%s'") - % encoding.tolocal(ctx.branch())) + edittext.append(_("HG: branch '%s'") % ctx.branch()) edittext.extend([_("HG: subrepo %s") % s for s in subs]) edittext.extend([_("HG: added %s") % f for f in added]) edittext.extend([_("HG: changed %s") % f for f in modified])
--- a/mercurial/commands.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/commands.py Wed Feb 16 14:13:22 2011 -0600 @@ -9,7 +9,7 @@ from lock import release from i18n import _, gettext import os, re, sys, difflib, time, tempfile -import hg, util, revlog, extensions, copies, error +import hg, util, revlog, extensions, copies, error, bookmarks import patch, help, mdiff, url, encoding, templatekw, discovery import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server import merge as mergemod @@ -58,7 +58,7 @@ repository. New files are ignored if they match any of the patterns in - .hgignore. As with add, these changes take effect at the next + ``.hgignore``. As with add, these changes take effect at the next commit. Use the -s/--similarity option to detect renamed files. With a @@ -126,7 +126,7 @@ lastfunc = funcmap[-1] funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) m = cmdutil.match(repo, pats, opts) follow = not opts.get('no_follow') for abs in ctx.walk(m): @@ -178,7 +178,7 @@ Returns 0 on success. ''' - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) if not ctx: raise util.Abort(_('no working directory: please specify a revision')) node = ctx.node() @@ -239,7 +239,7 @@ opts['date'] = util.parsedate(date) cmdutil.bail_if_changed(repo) - node = repo.lookup(rev) + node = cmdutil.revsingle(repo, rev).node() op1, op2 = repo.dirstate.parents() a = repo.changelog.ancestor(op1, node) @@ -404,7 +404,8 @@ raise util.Abort(_("%s killed") % command) else: transition = "bad" - ctx = repo[rev or '.'] + ctx = cmdutil.revsingle(repo, rev) + rev = None # clear for future iterations state[transition].append(ctx.node()) ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) check_state(state, interactive=False) @@ -456,6 +457,95 @@ cmdutil.bail_if_changed(repo) return hg.clean(repo, node) +def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): + '''track a line of development with movable markers + + Bookmarks are pointers to certain commits that move when + committing. Bookmarks are local. They can be renamed, copied and + deleted. It is possible to use bookmark names in :hg:`merge` and + :hg:`update` to merge and update respectively to a given bookmark. + + You can use :hg:`bookmark NAME` to set a bookmark on the working + directory's parent revision with the given name. If you specify + a revision using -r REV (where REV may be an existing bookmark), + the bookmark is assigned to that revision. + + Bookmarks can be pushed and pulled between repositories (see :hg:`help + push` and :hg:`help pull`). This requires both the local and remote + repositories to support bookmarks. For versions prior to 1.8, this means + the bookmarks extension must be enabled. + ''' + hexfn = ui.debugflag and hex or short + marks = repo._bookmarks + cur = repo.changectx('.').node() + + if rename: + if rename not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if mark is None: + raise util.Abort(_("new bookmark name required")) + marks[mark] = marks[rename] + del marks[rename] + if repo._bookmarkcurrent == rename: + bookmarks.setcurrent(repo, mark) + bookmarks.write(repo) + return + + if delete: + if mark is None: + raise util.Abort(_("bookmark name required")) + if mark not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark == repo._bookmarkcurrent: + bookmarks.setcurrent(repo, None) + del marks[mark] + bookmarks.write(repo) + return + + if mark is not None: + if "\n" in mark: + raise util.Abort(_("bookmark name cannot contain newlines")) + mark = mark.strip() + if not mark: + raise util.Abort(_("bookmark names cannot consist entirely of " + "whitespace")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if ((mark in repo.branchtags() or mark == repo.dirstate.branch()) + and not force): + raise util.Abort( + _("a bookmark cannot have the name of an existing branch")) + if rev: + marks[mark] = repo.lookup(rev) + else: + marks[mark] = repo.changectx('.').node() + bookmarks.setcurrent(repo, mark) + bookmarks.write(repo) + return + + if mark is None: + if rev: + raise util.Abort(_("bookmark name required")) + if len(marks) == 0: + ui.status(_("no bookmarks set\n")) + else: + for bmark, n in sorted(marks.iteritems()): + current = repo._bookmarkcurrent + if bmark == current and n == cur: + prefix, label = '*', 'bookmarks.current' + else: + prefix, label = ' ', '' + + if ui.quiet: + ui.write("%s\n" % bmark, label=label) + else: + ui.write(" %s %-25s %d:%s\n" % ( + prefix, bmark, repo.changelog.rev(n), hexfn(n)), + label=label) + return + def branch(ui, repo, label=None, **opts): """set or show the current branch name @@ -483,15 +573,14 @@ repo.dirstate.setbranch(label) ui.status(_('reset working directory to branch %s\n') % label) elif label: - utflabel = encoding.fromlocal(label) - if not opts.get('force') and utflabel in repo.branchtags(): + if not opts.get('force') and label in repo.branchtags(): if label not in [p.branch() for p in repo.parents()]: raise util.Abort(_('a branch of the same name already exists' " (use 'hg update' to switch to it)")) - repo.dirstate.setbranch(utflabel) + repo.dirstate.setbranch(label) ui.status(_('marked working directory as branch %s\n') % label) else: - ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch())) + ui.write("%s\n" % repo.dirstate.branch()) def branches(ui, repo, active=False, closed=False): """list repository named branches @@ -520,9 +609,8 @@ for isactive, node, tag in branches: if (not active) or isactive: - encodedtag = encoding.tolocal(tag) if ui.quiet: - ui.write("%s\n" % encodedtag) + ui.write("%s\n" % tag) else: hn = repo.lookup(node) if isactive: @@ -538,10 +626,10 @@ notice = _(' (inactive)') if tag == repo.dirstate.branch(): label = 'branches.current' - rev = str(node).rjust(31 - encoding.colwidth(encodedtag)) + rev = str(node).rjust(31 - encoding.colwidth(tag)) rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset') - encodedtag = ui.label(encodedtag, label) - ui.write("%s %s%s\n" % (encodedtag, rev, notice)) + tag = ui.label(tag, label) + ui.write("%s %s%s\n" % (tag, rev, notice)) def bundle(ui, repo, fname, dest=None, **opts): """create a changegroup file @@ -568,11 +656,14 @@ Returns 0 on success, 1 if no changes found. """ - revs = opts.get('rev') or None + revs = None + if 'rev' in opts: + revs = cmdutil.revrange(repo, opts['rev']) + if opts.get('all'): base = ['null'] else: - base = opts.get('base') + base = cmdutil.revrange(repo, opts.get('base')) if base: if dest: raise util.Abort(_("--base is incompatible with specifying " @@ -654,6 +745,7 @@ if opts.get('decode'): data = repo.wwritedata(abs, data) fp.write(data) + fp.close() err = 0 return err @@ -666,12 +758,12 @@ basename of the source. The location of the source is added to the new repository's - .hg/hgrc file, as the default to be used for future pulls. + ``.hg/hgrc`` file, as the default to be used for future pulls. See :hg:`help urls` for valid source format details. It is possible to specify an ``ssh://`` URL as the destination, but no - .hg/hgrc and working directory will be created on the remote side. + ``.hg/hgrc`` and working directory will be created on the remote side. Please see :hg:`help urls` for important details about ``ssh://`` URLs. A set of changesets (tags, or branch names) to pull may be specified @@ -737,7 +829,7 @@ """commit the specified files or all outstanding changes Commit changes to the given files into the repository. Unlike a - centralized RCS, this operation is a local operation. See + centralized SCM, this operation is a local operation. See :hg:`push` for a way to actively distribute your changes. If a list of files is omitted, all changes reported by :hg:`status` @@ -1022,7 +1114,7 @@ def debugrebuildstate(ui, repo, rev="tip"): """rebuild the dirstate as it would look like for the given revision""" - ctx = repo[rev] + ctx = cmdutil.revsingle(repo, rev) wlock = repo.wlock() try: repo.dirstate.rebuild(ctx.node(), ctx.manifest()) @@ -1112,7 +1204,7 @@ key, old, new = keyinfo r = target.pushkey(namespace, key, old, new) ui.status(str(r) + '\n') - return not(r) + return not r else: for k, v in target.listkeys(namespace).iteritems(): ui.write("%s\t%s\n" % (k.encode('string-escape'), @@ -1136,12 +1228,12 @@ Returns 0 on success. """ - if not rev2: - rev2 = hex(nullid) + r1 = cmdutil.revsingle(repo, rev1).node() + r2 = cmdutil.revsingle(repo, rev2, 'null').node() wlock = repo.wlock() try: - repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) + repo.dirstate.setparents(r1, r2) finally: wlock.release() @@ -1170,9 +1262,8 @@ ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) def debugsub(ui, repo, rev=None): - if rev == '': - rev = None - for k, v in sorted(repo[rev].substate.items()): + ctx = cmdutil.revsingle(repo, rev, None) + for k, v in sorted(ctx.substate.items()): ui.write('path %s\n' % k) ui.write(' source %s\n' % v[0]) ui.write(' revision %s\n' % v[1]) @@ -1256,6 +1347,14 @@ m = util.matchdate(range) ui.write("match: %s\n" % m(d[0])) +def debugignore(ui, repo, *values, **opts): + """display the combined ignore pattern""" + ignore = repo.dirstate._ignore + if hasattr(ignore, 'includepat'): + ui.write("%s\n" % ignore.includepat) + else: + raise util.Abort(_("no ignore patterns found")) + def debugindex(ui, repo, file_, **opts): """dump the contents of an index file""" r = None @@ -1431,7 +1530,7 @@ def debugrename(ui, repo, file1, *pats, **opts): """dump rename information""" - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) m = cmdutil.match(repo, (file1,) + pats, opts) for abs in ctx.walk(m): fctx = ctx[abs] @@ -1804,10 +1903,9 @@ Returns 0 if matching heads are found, 1 if not. """ - if opts.get('rev'): - start = repo.lookup(opts['rev']) - else: - start = None + start = None + if 'rev' in opts: + start = cmdutil.revsingle(repo, opts['rev'], None).node() if opts.get('topo'): heads = [repo[h] for h in repo.heads(start)] @@ -1824,8 +1922,7 @@ heads += [repo[h] for h in ls if rev(h) in descendants] if branchrevs: - decode, encode = encoding.fromlocal, encoding.tolocal - branches = set(repo[decode(br)].branch() for br in branchrevs) + branches = set(repo[br].branch() for br in branchrevs) heads = [h for h in heads if h.branch() in branches] if not opts.get('closed'): @@ -1838,7 +1935,7 @@ if branchrevs: haveheads = set(h.branch() for h in heads) if branches - haveheads: - headless = ', '.join(encode(b) for b in branches - haveheads) + headless = ', '.join(b for b in branches - haveheads) msg = _('no open branch heads found on branches %s') if opts.get('rev'): msg += _(' (started at %s)' % opts['rev']) @@ -2031,7 +2128,7 @@ 'extensions\n')) def helpextcmd(name): - cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) + cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict')) doc = gettext(mod.__doc__).splitlines()[0] msg = help.listexts(_("'%s' is provided by the following " @@ -2196,14 +2293,14 @@ output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), (changed) and "+" or "")) else: - ctx = repo[rev] + ctx = cmdutil.revsingle(repo, rev) if default or id: output = [hexfunc(ctx.node())] if num: output.append(str(ctx.rev())) if repo.local() and default and not ui.quiet: - b = encoding.tolocal(ctx.branch()) + b = ctx.branch() if b != 'default': output.append("(%s)" % b) @@ -2213,7 +2310,7 @@ output.append(t) if branch: - output.append(encoding.tolocal(ctx.branch())) + output.append(ctx.branch()) if tags: output.extend(ctx.tags()) @@ -2275,6 +2372,7 @@ d = opts["base"] strip = opts["strip"] wlock = lock = None + msgs = [] def tryone(ui, hunk): tmpname, message, user, date, branch, nodeid, p1, p2 = \ @@ -2325,7 +2423,10 @@ finally: files = cmdutil.updatedir(ui, repo, files, similarity=sim / 100.0) - if not opts.get('no_commit'): + if opts.get('no_commit'): + if message: + msgs.append(message) + else: if opts.get('exact'): m = None else: @@ -2374,6 +2475,8 @@ if not haspatch: raise util.Abort(_('no diffs found')) + if msgs: + repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs)) finally: release(lock, wlock) @@ -2394,6 +2497,13 @@ if opts.get('bundle') and opts.get('subrepos'): raise util.Abort(_('cannot combine --bundle and --subrepos')) + if opts.get('bookmarks'): + source, branches = hg.parseurl(ui.expandpath(source), + opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + ui.status(_('comparing with %s\n') % url.hidepassword(source)) + return bookmarks.diff(ui, repo, other) + ret = hg.incoming(ui, repo, source, opts) return ret @@ -2433,7 +2543,7 @@ Returns 0 if a match is found, 1 otherwise. """ end = opts.get('print0') and '\0' or '\n' - rev = opts.get('rev') or None + rev = cmdutil.revsingle(repo, opts.get('rev'), None).node() ret = 1 m = cmdutil.match(repo, pats, opts, default='relglob') @@ -2568,7 +2678,7 @@ node = rev decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} - ctx = repo[node] + ctx = cmdutil.revsingle(repo, node) for f in ctx: if ui.debugflag: ui.write("%40s " % hex(ctx.manifest()[f])) @@ -2611,7 +2721,7 @@ node = opts.get('rev') if not node: - branch = repo.changectx(None).branch() + branch = repo[None].branch() bheads = repo.branchheads(branch) if len(bheads) > 2: raise util.Abort(_( @@ -2637,6 +2747,8 @@ raise util.Abort(_('working dir not at a head rev - ' 'use "hg update" or merge with an explicit rev')) node = parent == bheads[0] and bheads[-1] or bheads[0] + else: + node = cmdutil.revsingle(repo, node).node() if opts.get('preview'): # find nodes that are ancestors of p2 but not of p1 @@ -2668,6 +2780,14 @@ Returns 0 if there are outgoing changes, 1 otherwise. """ + + if opts.get('bookmarks'): + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + ui.status(_('comparing with %s\n') % url.hidepassword(dest)) + return bookmarks.diff(ui, other, repo) + ret = hg.outgoing(ui, repo, dest, opts) return ret @@ -2682,11 +2802,8 @@ Returns 0 on success. """ - rev = opts.get('rev') - if rev: - ctx = repo[rev] - else: - ctx = repo[None] + + ctx = cmdutil.revsingle(repo, opts.get('rev'), None) if file_: m = cmdutil.match(repo, (file_,), opts) @@ -2787,6 +2904,16 @@ other = hg.repository(hg.remoteui(repo, opts), source) ui.status(_('pulling from %s\n') % url.hidepassword(source)) revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) + + if opts.get('bookmark'): + if not revs: + revs = [] + rb = other.listkeys('bookmarks') + for b in opts['bookmark']: + if b not in rb: + raise util.Abort(_('remote bookmark %s not found!') % b) + revs.append(rb[b]) + if revs: try: revs = [other.lookup(rev) for rev in revs] @@ -2800,10 +2927,21 @@ checkout = str(repo.changelog.rev(other.lookup(checkout))) repo._subtoppath = source try: - return postincoming(ui, repo, modheads, opts.get('update'), checkout) + ret = postincoming(ui, repo, modheads, opts.get('update'), checkout) + finally: del repo._subtoppath + # update specified bookmarks + if opts.get('bookmark'): + for b in opts['bookmark']: + # explicit pull overrides local bookmark if any + ui.status(_("importing bookmark %s\n") % b) + repo._bookmarks[b] = repo[rb[b]].node() + bookmarks.write(repo) + + return ret + def push(ui, repo, dest=None, **opts): """push changes to the specified destination @@ -2833,6 +2971,17 @@ Returns 0 if push was successful, 1 if nothing to push. """ + + if opts.get('bookmark'): + for b in opts['bookmark']: + # translate -B options to -r so changesets get pushed + if b in repo._bookmarks: + opts.setdefault('rev', []).append(b) + else: + # if we try to push a deleted bookmark, translate it to null + # this lets simultaneous -r, -b options continue working + opts.setdefault('rev', []).append("null") + dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest, opts.get('branch')) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) @@ -2851,9 +3000,33 @@ return False finally: del repo._subtoppath - r = repo.push(other, opts.get('force'), revs=revs, - newbranch=opts.get('new_branch')) - return r == 0 + result = repo.push(other, opts.get('force'), revs=revs, + newbranch=opts.get('new_branch')) + + result = (result == 0) + + if opts.get('bookmark'): + rb = other.listkeys('bookmarks') + for b in opts['bookmark']: + # explicit push overrides remote bookmark if any + if b in repo._bookmarks: + ui.status(_("exporting bookmark %s\n") % b) + new = repo[b].hex() + elif b in rb: + ui.status(_("deleting remote bookmark %s\n") % b) + new = '' # delete + else: + ui.warn(_('bookmark %s does not exist on the local ' + 'or remote repository!\n') % b) + return 2 + old = rb.get(b, '') + r = other.pushkey('bookmarks', b, old, new) + if not r: + ui.warn(_('updating bookmark %s failed!\n') % b) + if not result: + result = 2 + + return result def recover(ui, repo): """roll back an interrupted transaction @@ -3094,15 +3267,16 @@ raise util.Abort(_("you can't specify a revision and a date")) opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) + parent, p2 = repo.dirstate.parents() + if not opts.get('rev') and p2 != nullid: + raise util.Abort(_('uncommitted merge - ' + 'use "hg update", see "hg help revert"')) + if not pats and not opts.get('all'): raise util.Abort(_('no files or directories specified; ' 'use --all to revert the whole repo')) - parent, p2 = repo.dirstate.parents() - if not opts.get('rev') and p2 != nullid: - raise util.Abort(_('uncommitted merge - please provide a ' - 'specific revision')) - ctx = repo[opts.get('rev')] + ctx = cmdutil.revsingle(repo, opts.get('rev')) node = ctx.node() mf = ctx.manifest() if node == parent: @@ -3241,7 +3415,7 @@ continue audit_path(f) try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError: pass repo.dirstate.remove(f) @@ -3722,7 +3896,7 @@ bheads = repo.branchheads() if not opts.get('force') and bheads and p1 not in bheads: raise util.Abort(_('not at a branch head (use -f to force)')) - r = repo[rev_].node() + r = cmdutil.revsingle(repo, rev_).node() if not message: # we don't translate commit messages @@ -3856,6 +4030,8 @@ if not rev: rev = node + # if we defined a bookmark, we have to remember the original bookmark name + brev = rev rev = cmdutil.revsingle(repo, rev, rev).rev() if check and clean: @@ -3873,9 +4049,14 @@ rev = cmdutil.finddate(ui, repo, date) if clean or check: - return hg.clean(repo, rev) + ret = hg.clean(repo, rev) else: - return hg.update(repo, rev) + ret = hg.update(repo, rev) + + if brev in repo._bookmarks: + bookmarks.setcurrent(repo, brev) + + return ret def verify(ui, repo): """verify the integrity of the repository @@ -4066,6 +4247,13 @@ _('use command to check changeset state'), _('CMD')), ('U', 'noupdate', False, _('do not update to target'))], _("[-gbsr] [-U] [-c CMD] [REV]")), + "bookmarks": + (bookmark, + [('f', 'force', False, _('force')), + ('r', 'rev', '', _('revision'), _('REV')), + ('d', 'delete', False, _('delete a given bookmark')), + ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))], + _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')), "branch": (branch, [('f', 'force', None, @@ -4165,6 +4353,7 @@ _('[-e] DATE [RANGE]')), "debugdata": (debugdata, [], _('FILE REV')), "debugfsinfo": (debugfsinfo, [], _('[PATH]')), + "debugignore": (debugignore, [], ''), "debugindex": (debugindex, [('f', 'format', 0, _('revlog format'), _('FORMAT'))], _('FILE')), @@ -4282,6 +4471,7 @@ _('file to store the bundles into'), _('FILE')), ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')), + ('B', 'bookmarks', False, _("compare bookmarks")), ('b', 'branch', [], _('a specific branch you would like to pull'), _('BRANCH')), ] + logopts + remoteopts + subrepoopts, @@ -4350,6 +4540,7 @@ _('a changeset intended to be included in the destination'), _('REV')), ('n', 'newest-first', None, _('show newest record first')), + ('B', 'bookmarks', False, _("compare bookmarks")), ('b', 'branch', [], _('a specific branch you would like to push'), _('BRANCH')), ] + logopts + remoteopts + subrepoopts, @@ -4369,6 +4560,7 @@ _('run even when remote repository is unrelated')), ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')), + ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')), ('b', 'branch', [], _('a specific branch you would like to pull'), _('BRANCH')), ] + remoteopts, @@ -4379,6 +4571,7 @@ ('r', 'rev', [], _('a changeset intended to be included in the destination'), _('REV')), + ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')), ('b', 'branch', [], _('a specific branch you would like to push'), _('BRANCH')), ('', 'new-branch', False, _('allow pushing a new branch')),
--- a/mercurial/config.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/config.py Wed Feb 16 14:13:22 2011 -0600 @@ -130,7 +130,7 @@ name = m.group(1) if sections and section not in sections: continue - if self.get(section, name) != None: + if self.get(section, name) is not None: del self._data[section][name] continue
--- a/mercurial/context.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/context.py Wed Feb 16 14:13:22 2011 -0600 @@ -7,7 +7,7 @@ from node import nullid, nullrev, short, hex from i18n import _ -import ancestor, bdiff, error, util, subrepo, patch +import ancestor, bdiff, error, util, subrepo, patch, encoding import os, errno, stat propertycache = util.propertycache @@ -109,11 +109,13 @@ def description(self): return self._changeset[4] def branch(self): - return self._changeset[5].get("branch") + return encoding.tolocal(self._changeset[5].get("branch")) def extra(self): return self._changeset[5] def tags(self): return self._repo.nodetags(self._node) + def bookmarks(self): + return self._repo.nodebookmarks(self._node) def parents(self): """return contexts for each parent changeset""" @@ -179,7 +181,7 @@ """ # deal with workingctxs n2 = c2._node - if n2 == None: + if n2 is None: n2 = c2._parents[0]._node n = self._repo.changelog.ancestor(self._node, n2) return changectx(self._repo, n) @@ -591,9 +593,8 @@ if extra: self._extra = extra.copy() if 'branch' not in self._extra: - branch = self._repo.dirstate.branch() try: - branch = branch.decode('UTF-8').encode('UTF-8') + branch = encoding.fromlocal(self._repo.dirstate.branch()) except UnicodeDecodeError: raise util.Abort(_('branch name not in UTF-8!')) self._extra['branch'] = branch @@ -603,6 +604,9 @@ def __str__(self): return str(self._parents[0]) + "+" + def __repr__(self): + return "<workingctx %s>" % str(self) + def __nonzero__(self): return True @@ -712,13 +716,14 @@ assert self._clean is not None # must call status first return self._clean def branch(self): - return self._extra['branch'] + return encoding.tolocal(self._extra['branch']) def extra(self): return self._extra def tags(self): t = [] - [t.extend(p.tags()) for p in self.parents()] + for p in self.parents(): + t.extend(p.tags()) return t def children(self): @@ -827,7 +832,7 @@ if unlink: for f in list: try: - util.unlink(self._repo.wjoin(f)) + util.unlinkpath(self._repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: raise @@ -902,6 +907,9 @@ def __str__(self): return "%s@%s" % (self.path(), self._changectx) + def __repr__(self): + return "<workingfilectx %s>" % str(self) + def data(self): return self._repo.wread(self._path) def renamed(self): @@ -1042,7 +1050,7 @@ def clean(self): return self._status[6] def branch(self): - return self._extra['branch'] + return encoding.tolocal(self._extra['branch']) def extra(self): return self._extra def flags(self, f):
--- a/mercurial/demandimport.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/demandimport.py Wed Feb 16 14:13:22 2011 -0600 @@ -78,10 +78,10 @@ self._load() setattr(self._module, attr, val) -def _demandimport(name, globals=None, locals=None, fromlist=None, level=None): +def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1): if not locals or name in ignore or fromlist == ('*',): # these cases we can't really delay - if level is None: + if level == -1: return _origimport(name, globals, locals, fromlist) else: return _origimport(name, globals, locals, fromlist, level) @@ -91,7 +91,10 @@ base, rest = name.split('.', 1) # email.__init__ loading email.mime if globals and globals.get('__name__', None) == base: - return _origimport(name, globals, locals, fromlist) + if level != -1: + return _origimport(name, globals, locals, fromlist, level) + else: + return _origimport(name, globals, locals, fromlist) # if a is already demand-loaded, add b to its submodule list if base in locals: if isinstance(locals[base], _demandmod): @@ -99,7 +102,7 @@ return locals[base] return _demandmod(name, globals, locals) else: - if level is not None: + if level != -1: # from . import b,c,d or from .a import b,c,d return _origimport(name, globals, locals, fromlist, level) # from a import b,c,d @@ -111,7 +114,7 @@ mod = getattr(mod, comp) for x in fromlist: # set requested submodules for demand load - if not(hasattr(mod, x)): + if not hasattr(mod, x): setattr(mod, x, _demandmod(x, mod.__dict__, locals)) return mod
--- a/mercurial/dirstate.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/dirstate.py Wed Feb 16 14:13:22 2011 -0600 @@ -7,7 +7,7 @@ from node import nullid from i18n import _ -import util, ignore, osutil, parsers +import util, ignore, osutil, parsers, encoding import struct, os, stat, errno import cStringIO @@ -36,7 +36,7 @@ class dirstate(object): - def __init__(self, opener, ui, root): + def __init__(self, opener, ui, root, validate): '''Create a new dirstate object. opener is an open()-like callable that can be used to open the @@ -44,6 +44,7 @@ the dirstate. ''' self._opener = opener + self._validate = validate self._root = root self._rootdir = os.path.join(root, '') self._dirty = False @@ -79,7 +80,9 @@ @propertycache def _pl(self): try: - st = self._opener("dirstate").read(40) + fp = self._opener("dirstate") + st = fp.read(40) + fp.close() l = len(st) if l == 40: return st[:20], st[20:40] @@ -197,10 +200,10 @@ yield x def parents(self): - return self._pl + return [self._validate(p) for p in self._pl] def branch(self): - return self._branch + return encoding.tolocal(self._branch) def setparents(self, p1, p2=nullid): self._dirty = self._dirtypl = True @@ -209,8 +212,8 @@ def setbranch(self, branch): if branch in ['tip', '.', 'null']: raise util.Abort(_('the name \'%s\' is reserved') % branch) - self._branch = branch - self._opener("branch", "w").write(branch + '\n') + self._branch = encoding.fromlocal(branch) + self._opener("branch", "w").write(self._branch + '\n') def _read(self): self._map = {} @@ -229,7 +232,8 @@ self._pl = p def invalidate(self): - for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): + for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs", + "_ignore"): if a in self.__dict__: delattr(self, a) self._dirty = False
--- a/mercurial/discovery.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/discovery.py Wed Feb 16 14:13:22 2011 -0600 @@ -220,8 +220,6 @@ # - a local outgoing head descended from update # - a remote head that's known locally and not # ancestral to an outgoing head - # - # New named branches cannot be created without --force. # 1. Create set of branches involved in the push. branches = set(repo[n].branch() for n in outg) @@ -280,20 +278,30 @@ # 5. Check for new heads. # If there are more heads after the push than before, a suitable - # warning, depending on unsynced status, is displayed. + # error message, depending on unsynced status, is displayed. + error = None for branch in branches: - if len(newmap[branch]) > len(oldmap[branch]): + newhs = set(newmap[branch]) + oldhs = set(oldmap[branch]) + if len(newhs) > len(oldhs): + if error is None: + if branch: + error = _("push creates new remote heads " + "on branch '%s'!") % branch + else: + error = _("push creates new remote heads!") + if branch in unsynced: + hint = _("you should pull and merge or " + "use push -f to force") + else: + hint = _("did you forget to merge? " + "use push -f to force") if branch: - msg = _("push creates new remote heads " - "on branch '%s'!") % branch - else: - msg = _("push creates new remote heads!") - - if branch in unsynced: - hint = _("you should pull and merge or use push -f to force") - else: - hint = _("did you forget to merge? use push -f to force") - raise util.Abort(msg, hint=hint) + repo.ui.debug("new remote heads on branch '%s'\n" % branch) + for h in (newhs - oldhs): + repo.ui.debug("new remote head %s\n" % short(h)) + if error: + raise util.Abort(error, hint=hint) # 6. Check for unsynced changes on involved branches. if unsynced:
--- a/mercurial/dispatch.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/dispatch.py Wed Feb 16 14:13:22 2011 -0600 @@ -221,15 +221,20 @@ def fn(ui, *args): env = {'HG_ARGS': ' '.join((self.name,) + args)} def _checkvar(m): - if int(m.groups()[0]) <= len(args): + if m.groups()[0] == '$': + return m.group() + elif int(m.groups()[0]) <= len(args): return m.group() else: + ui.debug(_("No argument found for substitution" + "of %i variable in alias '%s' definition.") + % (int(m.groups()[0]), self.name)) return '' - cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) + cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:]) replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) replace['0'] = self.name replace['@'] = ' '.join(args) - cmd = util.interpolate(r'\$', replace, cmd) + cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True) return util.system(cmd, environ=env) self.fn = fn return @@ -290,7 +295,7 @@ ui.debug("alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)) - if self.definition.startswith('!'): + if hasattr(self, 'shell'): return self.fn(ui, *args, **opts) else: try: @@ -589,8 +594,12 @@ msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) ui.log("command", msg + "\n") d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) - return runcommand(lui, repo, cmd, fullargs, ui, options, d, - cmdpats, cmdoptions) + try: + return runcommand(lui, repo, cmd, fullargs, ui, options, d, + cmdpats, cmdoptions) + finally: + if repo: + repo.close() def _runcommand(ui, options, cmd, cmdfunc): def checkargs():
--- a/mercurial/encoding.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/encoding.py Wed Feb 16 14:13:22 2011 -0600 @@ -48,6 +48,16 @@ encodingmode = os.environ.get("HGENCODINGMODE", "strict") fallbackencoding = 'ISO-8859-1' +class localstr(str): + '''This class allows strings that are unmodified to be + round-tripped to the local encoding and back''' + def __new__(cls, u, l): + s = str.__new__(cls, l) + s._utf8 = u + return s + def __hash__(self): + return hash(self._utf8) # avoid collisions in local string space + def tolocal(s): """ Convert a string from internal UTF-8 to local encoding @@ -57,17 +67,45 @@ other character sets. We attempt to decode everything strictly using UTF-8, then Latin-1, and failing that, we use UTF-8 and replace unknown characters. + + The localstr class is used to cache the known UTF-8 encoding of + strings next to their local representation to allow lossless + round-trip conversion back to UTF-8. + + >>> u = 'foo: \\xc3\\xa4' # utf-8 + >>> l = tolocal(u) + >>> l + 'foo: ?' + >>> fromlocal(l) + 'foo: \\xc3\\xa4' + >>> u2 = 'foo: \\xc3\\xa1' + >>> d = { l: 1, tolocal(u2): 2 } + >>> d # no collision + {'foo: ?': 1, 'foo: ?': 2} + >>> 'foo: ?' in d + False + >>> l1 = 'foo: \\xe4' # historical latin1 fallback + >>> l = tolocal(l1) + >>> l + 'foo: ?' + >>> fromlocal(l) # magically in utf-8 + 'foo: \\xc3\\xa4' """ + for e in ('UTF-8', fallbackencoding): try: u = s.decode(e) # attempt strict decoding - return u.encode(encoding, "replace") + if e == 'UTF-8': + return localstr(s, u.encode(encoding, "replace")) + else: + return localstr(u.encode('UTF-8'), + u.encode(encoding, "replace")) except LookupError, k: raise error.Abort("%s, please check your locale settings" % k) except UnicodeDecodeError: pass u = s.decode("utf-8", "replace") # last ditch - return u.encode(encoding, "replace") + return u.encode(encoding, "replace") # can't round-trip def fromlocal(s): """ @@ -79,6 +117,11 @@ 'replace', which replaces unknown characters with a special Unicode character, and 'ignore', which drops the character. """ + + # can we do a lossless round-trip? + if isinstance(s, localstr): + return s._utf8 + try: return s.decode(encoding, encodingmode).encode("utf-8") except UnicodeDecodeError, inst:
--- a/mercurial/extensions.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/extensions.py Wed Feb 16 14:13:22 2011 -0600 @@ -11,6 +11,7 @@ _extensions = {} _order = [] +_ignore = ['hbisect', 'bookmarks'] def extensions(): for name in _order: @@ -45,6 +46,8 @@ shortname = name[6:] else: shortname = name + if shortname in _ignore: + return None if shortname in _extensions: return _extensions[shortname] _extensions[shortname] = None @@ -248,7 +251,7 @@ if name in paths: return _disabledhelp(paths[name]) -def disabledcmd(cmd, strict=False): +def disabledcmd(ui, cmd, strict=False): '''import disabled extensions until cmd is found. returns (cmdname, extname, doc)''' @@ -266,6 +269,10 @@ getattr(mod, 'cmdtable', {}), strict) except (error.AmbiguousCommand, error.UnknownCommand): return + except Exception: + ui.warn(_('warning: error finding commands in %s\n') % path) + ui.traceback() + return for c in aliases: if c.startswith(cmd): cmd = c
--- a/mercurial/filelog.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/filelog.py Wed Feb 16 14:13:22 2011 -0600 @@ -7,6 +7,17 @@ import revlog +def _parsemeta(text): + if not text.startswith('\1\n'): + return {} + s = text.index('\1\n', 2) + mt = text[2:s] + m = {} + for l in mt.splitlines(): + k, v = l.split(": ", 1) + m[k] = v + return m + class filelog(revlog.revlog): def __init__(self, opener, path): revlog.revlog.__init__(self, opener, @@ -19,18 +30,6 @@ s = t.index('\1\n', 2) return t[s + 2:] - def _readmeta(self, node): - t = self.revision(node) - if not t.startswith('\1\n'): - return {} - s = t.index('\1\n', 2) - mt = t[2:s] - m = {} - for l in mt.splitlines(): - k, v = l.split(": ", 1) - m[k] = v - return m - def add(self, text, meta, transaction, link, p1=None, p2=None): if meta or text.startswith('\1\n'): mt = ["%s: %s\n" % (k, v) for k, v in sorted(meta.iteritems())] @@ -40,7 +39,8 @@ def renamed(self, node): if self.parents(node)[0] != revlog.nullid: return False - m = self._readmeta(node) + t = self.revision(node) + m = _parsemeta(t) if m and "copy" in m: return (m["copy"], revlog.bin(m["copyrev"])) return False
--- a/mercurial/help/patterns.txt Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/help/patterns.txt Wed Feb 16 14:13:22 2011 -0600 @@ -20,6 +20,11 @@ To use a Perl/Python regular expression, start a name with ``re:``. Regexp pattern matching is anchored at the root of the repository. +To read name patterns from a file, use ``listfile:`` or ``listfile0:``. +The latter expects null delimited patterns while the former expects line +feeds. Each string read from the file is itself treated as a file +pattern. + Plain examples:: path:foo/bar a name bar in a directory named foo in the root @@ -39,3 +44,8 @@ Regexp examples:: re:.*\.c$ any name ending in ".c", anywhere in the repository + +File examples:: + + listfile:list.txt read list from list.txt with one file pattern per line + listfile0:list.txt read list from list.txt with null byte delimiters
--- a/mercurial/help/subrepos.txt Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/help/subrepos.txt Wed Feb 16 14:13:22 2011 -0600 @@ -78,7 +78,10 @@ :commit: commit creates a consistent snapshot of the state of the entire project and its subrepositories. It does this by first attempting to commit all modified subrepositories, then recording - their state and finally committing it in the parent repository. + their state and finally committing it in the parent + repository. Mercurial can be made to abort if any subrepository + content is modified by setting "ui.commitsubrepos=no" in a + configuration file (see :hg:`help config`). :diff: diff does not recurse in subrepos unless -S/--subrepos is specified. Changes are displayed as usual, on the subrepositories
--- a/mercurial/help/urls.txt Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/help/urls.txt Wed Feb 16 14:13:22 2011 -0600 @@ -4,7 +4,7 @@ file://local/filesystem/path[#revision] http://[user[:pass]@]host[:port]/[path][#revision] https://[user[:pass]@]host[:port]/[path][#revision] - ssh://[user[:pass]@]host[:port]/[path][#revision] + ssh://[user@]host[:port]/[path][#revision] Paths in the local filesystem can either point to Mercurial repositories or to bundle files (as created by :hg:`bundle` or :hg:`
--- a/mercurial/hg.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/hg.py Wed Feb 16 14:13:22 2011 -0600 @@ -32,24 +32,22 @@ return revs, revs[0] branchmap = repo.branchmap() - def primary(butf8): - if butf8 == '.': + def primary(branch): + if branch == '.': if not lrepo or not lrepo.local(): raise util.Abort(_("dirstate branch not accessible")) - butf8 = lrepo.dirstate.branch() - if butf8 in branchmap: - revs.extend(node.hex(r) for r in reversed(branchmap[butf8])) + branch = lrepo.dirstate.branch() + if branch in branchmap: + revs.extend(node.hex(r) for r in reversed(branchmap[branch])) return True else: return False for branch in branches: - butf8 = encoding.fromlocal(branch) - if not primary(butf8): + if not primary(branch): raise error.RepoLookupError(_("unknown branch '%s'") % branch) if hashbranch: - butf8 = encoding.fromlocal(hashbranch) - if not primary(butf8): + if not primary(hashbranch): revs.append(hashbranch) return revs, revs[0] @@ -365,8 +363,7 @@ except error.RepoLookupError: continue bn = dest_repo[uprev].branch() - dest_repo.ui.status(_("updating to branch %s\n") - % encoding.tolocal(bn)) + dest_repo.ui.status(_("updating to branch %s\n") % bn) _update(dest_repo, uprev) return src_repo, dest_repo @@ -398,7 +395,8 @@ return stats[3] > 0 def merge(repo, node, force=None, remind=True): - """branch merge with node, resolving changes""" + """Branch merge with node, resolving changes. Return true if any + unresolved conflicts.""" stats = mergemod.update(repo, node, True, force, False) _showstats(repo, stats) if stats[3]:
--- a/mercurial/hgweb/common.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/hgweb/common.py Wed Feb 16 14:13:22 2011 -0600 @@ -119,7 +119,10 @@ os.stat(path) ct = mimetypes.guess_type(path)[0] or "text/plain" req.respond(HTTP_OK, ct, length = os.path.getsize(path)) - return open(path, 'rb').read() + fp = open(path, 'rb') + data = fp.read() + fp.close() + return data except TypeError: raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') except OSError, err:
--- a/mercurial/hgweb/hgwebdir_mod.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/hgweb/hgwebdir_mod.py Wed Feb 16 14:13:22 2011 -0600 @@ -33,14 +33,23 @@ repos.append((prefix, root)) continue roothead = os.path.normpath(os.path.abspath(roothead)) - for path in util.walkrepos(roothead, followsym=True, recurse=recurse): - path = os.path.normpath(path) - name = util.pconvert(path[len(roothead):]).strip('/') - if prefix: - name = prefix + '/' + name - repos.append((name, path)) + paths = util.walkrepos(roothead, followsym=True, recurse=recurse) + repos.extend(urlrepos(prefix, roothead, paths)) return repos +def urlrepos(prefix, roothead, paths): + """yield url paths and filesystem paths from a list of repo paths + + >>> list(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) + [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] + >>> list(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) + [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] + """ + for path in paths: + path = os.path.normpath(path) + yield (prefix + '/' + + util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path + class hgwebdir(object): refreshinterval = 20
--- a/mercurial/hgweb/webcommands.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/hgweb/webcommands.py Wed Feb 16 14:13:22 2011 -0600 @@ -550,7 +550,8 @@ "targetline": targetline, "line": l, "lineid": "l%d" % (lineno + 1), - "linenumber": "% 6d" % (lineno + 1)} + "linenumber": "% 6d" % (lineno + 1), + "revdate": f.date()} return tmpl("fileannotate", file=f,
--- a/mercurial/hook.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/hook.py Wed Feb 16 14:13:22 2011 -0600 @@ -92,6 +92,12 @@ for k, v in args.iteritems(): if hasattr(v, '__call__'): v = v() + if isinstance(v, dict): + # make the dictionary element order stable across Python + # implementations + v = ('{' + + ', '.join('%r: %r' % i for i in sorted(v.iteritems())) + + '}') env['HG_' + k.upper()] = v if repo:
--- a/mercurial/httprepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/httprepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -160,7 +160,7 @@ break tempname = changegroup.writebundle(cg, None, type) - fp = url.httpsendfile(tempname, "rb") + fp = url.httpsendfile(self.ui, tempname, "rb") headers = {'Content-Type': 'application/mercurial-0.1'} try:
--- a/mercurial/ignore.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/ignore.py Wed Feb 16 14:13:22 2011 -0600 @@ -86,7 +86,8 @@ (f, inst.strerror)) allpats = [] - [allpats.extend(patlist) for patlist in pats.values()] + for patlist in pats.values(): + allpats.extend(patlist) if not allpats: return util.never
--- a/mercurial/localrepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/localrepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -8,7 +8,7 @@ from node import bin, hex, nullid, nullrev, short from i18n import _ import repo, changegroup, subrepo, discovery, pushkey -import changelog, dirstate, filelog, manifest, context +import changelog, dirstate, filelog, manifest, context, bookmarks import lock, transaction, store, encoding import util, extensions, hook, error import match as matchmod @@ -105,7 +105,7 @@ self._tags = None self._tagtypes = None - self._branchcache = None # in UTF-8 + self._branchcache = None self._branchcachetip = None self.nodetagscache = None self.filterpats = {} @@ -161,6 +161,13 @@ parts.pop() return False + @util.propertycache + def _bookmarks(self): + return bookmarks.read(self) + + @util.propertycache + def _bookmarkcurrent(self): + return bookmarks.readcurrent(self) @propertycache def changelog(self): @@ -178,7 +185,19 @@ @propertycache def dirstate(self): - return dirstate.dirstate(self.opener, self.ui, self.root) + warned = [0] + def validate(node): + try: + r = self.changelog.rev(node) + return node + except error.LookupError: + if not warned[0]: + warned[0] = True + self.ui.warn(_("warning: ignoring unknown" + " working parent %s!\n") % short(node)) + return nullid + + return dirstate.dirstate(self.opener, self.ui, self.root, validate) def __getitem__(self, changeid): if changeid is None: @@ -264,6 +283,8 @@ # committed tags are stored in UTF-8 writetags(fp, names, encoding.fromlocal, prevtags) + fp.close() + if '.hgtags' not in self.dirstate: self[None].add(['.hgtags']) @@ -379,6 +400,13 @@ tags.sort() return self.nodetagscache.get(node, []) + def nodebookmarks(self, node): + marks = [] + for bookmark, n in self._bookmarks.iteritems(): + if n == node: + marks.append(bookmark) + return sorted(marks) + def _branchtags(self, partial, lrev): # TODO: rename this function? tiprev = len(self) - 1 @@ -424,11 +452,10 @@ bt[bn] = tip return bt - def _readbranchcache(self): partial = {} try: - f = self.opener("branchheads.cache") + f = self.opener("cache/branchheads") lines = f.read().split('\n') f.close() except (IOError, OSError): @@ -444,7 +471,8 @@ if not l: continue node, label = l.split(" ", 1) - partial.setdefault(label.strip(), []).append(bin(node)) + label = encoding.tolocal(label.strip()) + partial.setdefault(label, []).append(bin(node)) except KeyboardInterrupt: raise except Exception, inst: @@ -455,11 +483,11 @@ def _writebranchcache(self, branches, tip, tiprev): try: - f = self.opener("branchheads.cache", "w", atomictemp=True) + f = self.opener("cache/branchheads", "w", atomictemp=True) f.write("%s %s\n" % (hex(tip), tiprev)) for label, nodes in branches.iteritems(): for node in nodes: - f.write("%s %s\n" % (hex(node), label)) + f.write("%s %s\n" % (hex(node), encoding.fromlocal(label))) f.rename() except (IOError, OSError): pass @@ -500,6 +528,8 @@ n = self.changelog._match(key) if n: return n + if key in self._bookmarks: + return self._bookmarks[key] if key in self.tags(): return self.tags()[key] if key in self.branchtags(): @@ -618,10 +648,6 @@ def wwrite(self, filename, data, flags): data = self._filter(self._decodefilterpats, filename, data) - try: - os.unlink(self.wjoin(filename)) - except OSError: - pass if 'l' in flags: self.wopener.symlink(data, filename) else: @@ -648,7 +674,8 @@ except IOError: ds = "" self.opener("journal.dirstate", "w").write(ds) - self.opener("journal.branch", "w").write(self.dirstate.branch()) + self.opener("journal.branch", "w").write( + encoding.fromlocal(self.dirstate.branch())) self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc)) renames = [(self.sjoin("journal"), self.sjoin("undo")), @@ -700,13 +727,16 @@ transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn) util.rename(self.join("undo.dirstate"), self.join("dirstate")) + if os.path.exists(self.join('undo.bookmarks')): + util.rename(self.join('undo.bookmarks'), + self.join('bookmarks')) try: branch = self.opener("undo.branch").read() self.dirstate.setbranch(branch) except IOError: self.ui.warn(_("Named branch could not be reset, " "current branch still is: %s\n") - % encoding.tolocal(self.dirstate.branch())) + % self.dirstate.branch()) self.invalidate() self.dirstate.invalidate() self.destroyed() @@ -724,7 +754,7 @@ self._branchcachetip = None def invalidate(self): - for a in "changelog manifest".split(): + for a in ("changelog", "manifest", "_bookmarks", "_bookmarkscurrent"): if a in self.__dict__: delattr(self, a) self.invalidatecaches() @@ -753,8 +783,8 @@ l.lock() return l - l = self._lock(self.sjoin("lock"), wait, None, self.invalidate, - _('repository %s') % self.origroot) + l = self._lock(self.sjoin("lock"), wait, self.store.write, + self.invalidate, _('repository %s') % self.origroot) self._lockref = weakref.ref(l) return l @@ -903,6 +933,12 @@ if '.hgsubstate' not in changes[0]: changes[0].insert(0, '.hgsubstate') + if subs and not self.ui.configbool('ui', 'commitsubrepos', True): + changedsubs = [s for s in subs if wctx.sub(s).dirty(True)] + if changedsubs: + raise util.Abort(_("uncommitted changes in subrepo %s") + % changedsubs[0]) + # make sure all explicit patterns are matched if not force and match.files(): matched = set(changes[0] + changes[1] + changes[2]) @@ -968,7 +1004,11 @@ _('note: commit message saved in %s\n') % msgfn) raise - # update dirstate and mergestate + # update bookmarks, dirstate and mergestate + parents = (p1, p2) + if p2 == nullid: + parents = (p1,) + bookmarks.update(self, parents, ret) for f in changes[0] + changes[1]: self.dirstate.normal(f) for f in changes[2]: @@ -1202,14 +1242,14 @@ self.ui.status(_("skipping missing subrepository: %s\n") % subpath) - [l.sort() for l in r] + for l in r: + l.sort() return r def heads(self, start=None): heads = self.changelog.heads(start) # sort the output in rev descending order - heads = [(-self.changelog.rev(h), h) for h in heads] - return [n for (r, n) in sorted(heads)] + return sorted(heads, key=self.changelog.rev, reverse=True) def branchheads(self, branch=None, start=None, closed=False): '''return a (possibly filtered) list of heads for the given branch @@ -1276,26 +1316,57 @@ common, fetch, rheads = tmp if not fetch: self.ui.status(_("no changes found\n")) - return 0 - - if heads is None and fetch == [nullid]: - self.ui.status(_("requesting all changes\n")) - elif heads is None and remote.capable('changegroupsubset'): - # issue1320, avoid a race if remote changed after discovery - heads = rheads + result = 0 + else: + if heads is None and fetch == [nullid]: + self.ui.status(_("requesting all changes\n")) + elif heads is None and remote.capable('changegroupsubset'): + # issue1320, avoid a race if remote changed after discovery + heads = rheads - if heads is None: - cg = remote.changegroup(fetch, 'pull') - else: - if not remote.capable('changegroupsubset'): + if heads is None: + cg = remote.changegroup(fetch, 'pull') + elif not remote.capable('changegroupsubset'): raise util.Abort(_("partial pull cannot be done because " - "other repository doesn't support " - "changegroupsubset.")) - cg = remote.changegroupsubset(fetch, heads, 'pull') - return self.addchangegroup(cg, 'pull', remote.url(), lock=lock) + "other repository doesn't support " + "changegroupsubset.")) + else: + cg = remote.changegroupsubset(fetch, heads, 'pull') + result = self.addchangegroup(cg, 'pull', remote.url(), + lock=lock) finally: lock.release() + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + changed = False + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], self._bookmarks[k] + if nr in self: + cr = self[nr] + cl = self[nl] + if cl.rev() >= cr.rev(): + continue + if cr in cl.descendants(): + self._bookmarks[k] = cr.node() + changed = True + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_("not updating divergent" + " bookmark %s\n") % k) + if changed: + bookmarks.write(self) + + return result + + def checkpush(self, force, revs): + """Extensions can override this function if additional checks have + to be performed before pushing, or call it if they override push + command. + """ + pass + def push(self, remote, force=False, revs=None, newbranch=False): '''Push outgoing changesets (limited by revs) from the current repository to remote. Return an integer: @@ -1312,35 +1383,52 @@ # unbundle assumes local user cannot lock remote repo (new ssh # servers, http servers). + self.checkpush(force, revs) lock = None unbundle = remote.capable('unbundle') if not unbundle: lock = remote.lock() try: - ret = discovery.prepush(self, remote, force, revs, newbranch) - if ret[0] is None: - # and here we return 0 for "nothing to push" or 1 for - # "something to push but I refuse" - return ret[1] - - cg, remote_heads = ret - if unbundle: - # local repo finds heads on server, finds out what revs it must - # push. once revs transferred, if server finds it has - # different heads (someone else won commit/push race), server - # aborts. - if force: - remote_heads = ['force'] - # ssh: return remote's addchangegroup() - # http: return remote's addchangegroup() or 0 for error - return remote.unbundle(cg, remote_heads, 'push') - else: - # we return an integer indicating remote head count change - return remote.addchangegroup(cg, 'push', self.url(), lock=lock) + cg, remote_heads = discovery.prepush(self, remote, force, revs, + newbranch) + ret = remote_heads + if cg is not None: + if unbundle: + # local repo finds heads on server, finds out what + # revs it must push. once revs transferred, if server + # finds it has different heads (someone else won + # commit/push race), server aborts. + if force: + remote_heads = ['force'] + # ssh: return remote's addchangegroup() + # http: return remote's addchangegroup() or 0 for error + ret = remote.unbundle(cg, remote_heads, 'push') + else: + # we return an integer indicating remote head count change + ret = remote.addchangegroup(cg, 'push', self.url(), + lock=lock) finally: if lock is not None: lock.release() + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], hex(self._bookmarks[k]) + if nr in self: + cr = self[nr] + cl = self[nl] + if cl in cr.descendants(): + r = remote.pushkey('bookmarks', k, nr, nl) + if r: + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_('updating bookmark %s' + ' failed!\n') % k) + + return ret + def changegroupinfo(self, nodes, source): if self.ui.verbose or source == 'bundle': self.ui.status(_("%d changesets found\n") % len(nodes)) @@ -1404,9 +1492,6 @@ # Nor do we know which filenodes are missing. msng_filenode_set = {} - junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex - junk = None - # A changeset always belongs to itself, so the changenode lookup # function for a changenode is identity. def identity(x): @@ -1494,8 +1579,13 @@ group = cl.group(msng_cl_lst, identity, collect) for cnt, chnk in enumerate(group): yield chnk - self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) - self.ui.progress(_('bundling changes'), None) + # revlog.group yields three entries per node, so + # dividing by 3 gives an approximation of how many + # nodes have been processed. + self.ui.progress(_('bundling'), cnt / 3, + unit=_('changesets')) + changecount = cnt / 3 + self.ui.progress(_('bundling'), None) prune(mnfst, msng_mnfst_set) add_extra_nodes(1, msng_mnfst_set) @@ -1507,10 +1597,17 @@ group = mnfst.group(msng_mnfst_lst, lambda mnode: msng_mnfst_set[mnode], filenode_collector(changedfiles)) + efiles = {} for cnt, chnk in enumerate(group): + if cnt % 3 == 1: + mnode = chnk[:20] + efiles.update(mnfst.readdelta(mnode)) yield chnk - self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) - self.ui.progress(_('bundling manifests'), None) + # see above comment for why we divide by 3 + self.ui.progress(_('bundling'), cnt / 3, + unit=_('manifests'), total=changecount) + self.ui.progress(_('bundling'), None) + efiles = len(efiles) # These are no longer needed, dereference and toss the memory for # them. @@ -1524,8 +1621,7 @@ msng_filenode_set.setdefault(fname, {}) changedfiles.add(fname) # Go through all our files in order sorted by name. - cnt = 0 - for fname in sorted(changedfiles): + for idx, fname in enumerate(sorted(changedfiles)): filerevlog = self.file(fname) if not len(filerevlog): raise util.Abort(_("empty or missing revlog for %s") % fname) @@ -1548,13 +1644,16 @@ group = filerevlog.group(nodeiter, lambda fnode: missingfnodes[fnode]) for chnk in group: + # even though we print the same progress on + # most loop iterations, put the progress call + # here so that time estimates (if any) can be updated self.ui.progress( - _('bundling files'), cnt, item=fname, unit=_('chunks')) - cnt += 1 + _('bundling'), idx, item=fname, + unit=_('files'), total=efiles) yield chnk # Signal that no more groups are left. yield changegroup.closechunk() - self.ui.progress(_('bundling files'), None) + self.ui.progress(_('bundling'), None) if msng_cl_lst: self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source) @@ -1602,20 +1701,30 @@ collect = changegroup.collector(cl, mmfs, changedfiles) for cnt, chnk in enumerate(cl.group(nodes, identity, collect)): - self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) + # revlog.group yields three entries per node, so + # dividing by 3 gives an approximation of how many + # nodes have been processed. + self.ui.progress(_('bundling'), cnt / 3, unit=_('changesets')) yield chnk - self.ui.progress(_('bundling changes'), None) + changecount = cnt / 3 + self.ui.progress(_('bundling'), None) mnfst = self.manifest nodeiter = gennodelst(mnfst) + efiles = {} for cnt, chnk in enumerate(mnfst.group(nodeiter, lookuplinkrev_func(mnfst))): - self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) + if cnt % 3 == 1: + mnode = chnk[:20] + efiles.update(mnfst.readdelta(mnode)) + # see above comment for why we divide by 3 + self.ui.progress(_('bundling'), cnt / 3, + unit=_('manifests'), total=changecount) yield chnk - self.ui.progress(_('bundling manifests'), None) + efiles = len(efiles) + self.ui.progress(_('bundling'), None) - cnt = 0 - for fname in sorted(changedfiles): + for idx, fname in enumerate(sorted(changedfiles)): filerevlog = self.file(fname) if not len(filerevlog): raise util.Abort(_("empty or missing revlog for %s") % fname) @@ -1627,10 +1736,10 @@ lookup = lookuplinkrev_func(filerevlog) for chnk in filerevlog.group(nodeiter, lookup): self.ui.progress( - _('bundling files'), cnt, item=fname, unit=_('chunks')) - cnt += 1 + _('bundling'), idx, item=fname, + total=efiles, unit=_('files')) yield chnk - self.ui.progress(_('bundling files'), None) + self.ui.progress(_('bundling'), None) yield changegroup.closechunk() @@ -1643,6 +1752,8 @@ """Add the changegroup returned by source.read() to this repo. srctype is a string like 'push', 'pull', or 'unbundle'. url is the URL of the repo where this changegroup is coming from. + If lock is not None, the function takes ownership of the lock + and releases it after the changegroup is added. Return an integer summarizing the change to this repo: - nothing changed or no source: 0 @@ -1795,6 +1906,10 @@ self.hook("incoming", node=hex(cl.node(i)), source=srctype, url=url) + # FIXME - why does this care about tip? + if newheads == oldheads: + bookmarks.update(self, self.dirstate.parents(), self['tip'].node()) + # never return 0 here: if newheads < oldheads: return newheads - oldheads - 1 @@ -1803,59 +1918,63 @@ def stream_in(self, remote, requirements): - fp = remote.stream_out() - l = fp.readline() + lock = self.lock() try: - resp = int(l) - except ValueError: - raise error.ResponseError( - _('Unexpected response from remote server:'), l) - if resp == 1: - raise util.Abort(_('operation forbidden by server')) - elif resp == 2: - raise util.Abort(_('locking the remote repository failed')) - elif resp != 0: - raise util.Abort(_('the server sent an unknown error code')) - self.ui.status(_('streaming all changes\n')) - l = fp.readline() - try: - total_files, total_bytes = map(int, l.split(' ', 1)) - except (ValueError, TypeError): - raise error.ResponseError( - _('Unexpected response from remote server:'), l) - self.ui.status(_('%d files to transfer, %s of data\n') % - (total_files, util.bytecount(total_bytes))) - start = time.time() - for i in xrange(total_files): - # XXX doesn't support '\n' or '\r' in filenames + fp = remote.stream_out() l = fp.readline() try: - name, size = l.split('\0', 1) - size = int(size) + resp = int(l) + except ValueError: + raise error.ResponseError( + _('Unexpected response from remote server:'), l) + if resp == 1: + raise util.Abort(_('operation forbidden by server')) + elif resp == 2: + raise util.Abort(_('locking the remote repository failed')) + elif resp != 0: + raise util.Abort(_('the server sent an unknown error code')) + self.ui.status(_('streaming all changes\n')) + l = fp.readline() + try: + total_files, total_bytes = map(int, l.split(' ', 1)) except (ValueError, TypeError): raise error.ResponseError( _('Unexpected response from remote server:'), l) - self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) - # for backwards compat, name was partially encoded - ofp = self.sopener(store.decodedir(name), 'w') - for chunk in util.filechunkiter(fp, limit=size): - ofp.write(chunk) - ofp.close() - elapsed = time.time() - start - if elapsed <= 0: - elapsed = 0.001 - self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % - (util.bytecount(total_bytes), elapsed, - util.bytecount(total_bytes / elapsed))) + self.ui.status(_('%d files to transfer, %s of data\n') % + (total_files, util.bytecount(total_bytes))) + start = time.time() + for i in xrange(total_files): + # XXX doesn't support '\n' or '\r' in filenames + l = fp.readline() + try: + name, size = l.split('\0', 1) + size = int(size) + except (ValueError, TypeError): + raise error.ResponseError( + _('Unexpected response from remote server:'), l) + self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) + # for backwards compat, name was partially encoded + ofp = self.sopener(store.decodedir(name), 'w') + for chunk in util.filechunkiter(fp, limit=size): + ofp.write(chunk) + ofp.close() + elapsed = time.time() - start + if elapsed <= 0: + elapsed = 0.001 + self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % + (util.bytecount(total_bytes), elapsed, + util.bytecount(total_bytes / elapsed))) - # new requirements = old non-format requirements + new format-related - # requirements from the streamed-in repository - requirements.update(set(self.requirements) - self.supportedformats) - self._applyrequirements(requirements) - self._writerequirements() + # new requirements = old non-format requirements + new format-related + # requirements from the streamed-in repository + requirements.update(set(self.requirements) - self.supportedformats) + self._applyrequirements(requirements) + self._writerequirements() - self.invalidate() - return len(self.heads()) + 1 + self.invalidate() + return len(self.heads()) + 1 + finally: + lock.release() def clone(self, remote, heads=[], stream=False): '''clone remote repository.
--- a/mercurial/lock.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/lock.py Wed Feb 16 14:13:22 2011 -0600 @@ -113,7 +113,7 @@ # held, or can race and break valid lock. try: l = lock(self.f + '.break', timeout=0) - os.unlink(self.f) + util.unlink(self.f) l.release() except error.LockError: return locker @@ -126,7 +126,7 @@ if self.releasefn: self.releasefn() try: - os.unlink(self.f) + util.unlink(self.f) except OSError: pass
--- a/mercurial/mail.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/mail.py Wed Feb 16 14:13:22 2011 -0600 @@ -33,7 +33,17 @@ def _smtp(ui): '''build an smtp connection and return a function to send mail''' local_hostname = ui.config('smtp', 'local_hostname') - s = smtplib.SMTP(local_hostname=local_hostname) + tls = ui.config('smtp', 'tls', 'none') + # backward compatible: when tls = true, we use starttls. + starttls = tls == 'starttls' or util.parsebool(tls) + smtps = tls == 'smtps' + if (starttls or smtps) and not hasattr(socket, 'ssl'): + raise util.Abort(_("can't use TLS: Python SSL support not installed")) + if smtps: + ui.note(_('(using smtps)\n')) + s = smtplib.SMTP_SSL(local_hostname=local_hostname) + else: + s = smtplib.SMTP(local_hostname=local_hostname) mailhost = ui.config('smtp', 'host') if not mailhost: raise util.Abort(_('smtp.host not configured - cannot send mail')) @@ -41,11 +51,8 @@ ui.note(_('sending mail: smtp host %s, port %s\n') % (mailhost, mailport)) s.connect(host=mailhost, port=mailport) - if ui.configbool('smtp', 'tls'): - if not hasattr(socket, 'ssl'): - raise util.Abort(_("can't use TLS: Python SSL support " - "not installed")) - ui.note(_('(using tls)\n')) + if starttls: + ui.note(_('(using starttls)\n')) s.ehlo() s.starttls() s.ehlo()
--- a/mercurial/manifest.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/manifest.py Wed Feb 16 14:13:22 2011 -0600 @@ -171,19 +171,19 @@ raise AssertionError( _("failed to remove %s from manifest") % f) l = "" - if dstart != None and dstart <= start and dend >= start: + if dstart is not None and dstart <= start and dend >= start: if dend < end: dend = end if l: dline.append(l) else: - if dstart != None: + if dstart is not None: delta.append([dstart, dend, "".join(dline)]) dstart = start dend = end dline = [l] - if dstart != None: + if dstart is not None: delta.append([dstart, dend, "".join(dline)]) # apply the delta to the addlist, and get a delta for addrevision cachedelta = (self.rev(p1), addlistdelta(addlist, delta))
--- a/mercurial/match.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/match.py Wed Feb 16 14:13:22 2011 -0600 @@ -39,11 +39,11 @@ self._anypats = bool(include or exclude) if include: - im = _buildmatch(_normalize(include, 'glob', root, cwd, auditor), - '(?:/|$)') + pats = _normalize(include, 'glob', root, cwd, auditor) + self.includepat, im = _buildmatch(pats, '(?:/|$)') if exclude: - em = _buildmatch(_normalize(exclude, 'glob', root, cwd, auditor), - '(?:/|$)') + pats = _normalize(exclude, 'glob', root, cwd, auditor) + self.excludepat, em = _buildmatch(pats, '(?:/|$)') if exact: self._files = patterns pm = self.exact @@ -51,7 +51,7 @@ pats = _normalize(patterns, default, root, cwd, auditor) self._files = _roots(pats) self._anypats = self._anypats or _anypats(pats) - pm = _buildmatch(pats, '$') + self.patternspat, pm = _buildmatch(pats, '$') if patterns or exact: if include: @@ -161,7 +161,8 @@ actual pattern.""" if ':' in pat: kind, val = pat.split(':', 1) - if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre'): + if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre', + 'listfile', 'listfile0'): return kind, val return default, pat @@ -245,7 +246,7 @@ pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats]) if len(pat) > 20000: raise OverflowError() - return re.compile(pat).match + return pat, re.compile(pat).match except OverflowError: # We're using a Python with a tiny regex engine and we # made it explode, so we'll divide the pattern list in two @@ -253,8 +254,9 @@ l = len(pats) if l < 2: raise - a, b = _buildmatch(pats[:l//2], tail), _buildmatch(pats[l//2:], tail) - return lambda s: a(s) or b(s) + pata, a = _buildmatch(pats[:l//2], tail), + patb, b = _buildmatch(pats[l//2:], tail) + return pat, lambda s: a(s) or b(s) except re.error: for k, p in pats: try: @@ -270,6 +272,15 @@ name = util.canonpath(root, cwd, name, auditor) elif kind in ('relglob', 'path'): name = util.normpath(name) + elif kind in ('listfile', 'listfile0'): + delimiter = kind == 'listfile0' and '\0' or '\n' + try: + files = open(name, 'r').read().split(delimiter) + files = [f for f in files if f] + except EnvironmentError: + raise util.Abort(_("unable to read file list (%s)") % name) + pats += _normalize(files, default, root, cwd, auditor) + continue pats.append((kind, name)) return pats
--- a/mercurial/merge.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/merge.py Wed Feb 16 14:13:22 2011 -0600 @@ -32,6 +32,7 @@ else: bits = l[:-1].split("\0") self._state[bits[0]] = bits[1:] + f.close() except IOError, err: if err.errno != errno.ENOENT: raise @@ -42,6 +43,7 @@ f.write(hex(self._local) + "\n") for d, v in self._state.iteritems(): f.write("\0".join([d] + v) + "\n") + f.close() self._dirty = False def add(self, fcl, fco, fca, fd, flags): hash = util.sha1(fcl.path()).hexdigest() @@ -67,6 +69,7 @@ state, hash, lfile, afile, anode, ofile, flags = self._state[dfile] f = self._repo.opener("merge/" + hash) self._repo.wwrite(dfile, f.read(), flags) + f.close() fcd = wctx[dfile] fco = octx[ofile] fca = self._repo.filectx(afile, fileid=anode) @@ -255,6 +258,9 @@ wctx is the working copy context mctx is the context to be merged into the working copy actx is the context of the common ancestor + + Return a tuple of counts (updated, merged, removed, unresolved) that + describes how many files were affected by the update. """ updated, merged, removed, unresolved = 0, 0, 0, 0 @@ -309,7 +315,7 @@ if f == '.hgsubstate': # subrepo states need updating subrepo.submerge(repo, wctx, mctx, wctx, overwrite) try: - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) except OSError, inst: if inst.errno != errno.ENOENT: repo.ui.warn(_("update failed to remove %s: %s!\n") % @@ -347,7 +353,7 @@ repo.ui.note(_("moving %s to %s\n") % (f, fd)) t = wctx.filectx(f).data() repo.wwrite(fd, t, flags) - util.unlink(repo.wjoin(f)) + util.unlinkpath(repo.wjoin(f)) if f2: repo.ui.note(_("getting %s to %s\n") % (f2, fd)) t = mctx.filectx(f2).data() @@ -462,6 +468,8 @@ use 'hg update -C' to discard changes) 3 = abort: uncommitted local changes 4 = incompatible options (checked in commands.py) + + Return the same tuple as applyupdates(). """ onode = node @@ -524,7 +532,7 @@ action += manifestmerge(repo, wc, p2, pa, overwrite, partial) ### apply phase - if not branchmerge: # just jump to the new rev + if not branchmerge or fastforward: # just jump to the new rev fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' if not partial: repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) @@ -533,7 +541,7 @@ if not partial: repo.dirstate.setparents(fp1, fp2) - recordupdates(repo, action, branchmerge) + recordupdates(repo, action, branchmerge and not fastforward) if not branchmerge and not fastforward: repo.dirstate.setbranch(p2.branch()) finally:
--- a/mercurial/minirst.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/minirst.py Wed Feb 16 14:13:22 2011 -0600 @@ -14,27 +14,8 @@ are just indented blocks that look like they are nested. This relies on the user to keep the right indentation for the blocks. -It only supports a small subset of reStructuredText: - -- sections - -- paragraphs - -- literal blocks - -- definition lists - -- specific admonitions - -- bullet lists (items must start with '-') - -- enumerated lists (no autonumbering) - -- field lists (colons cannot be escaped) - -- option lists (supports only long options without arguments) - -- inline literals (no other inline markup is not recognized) +Remember to update http://mercurial.selenic.com/wiki/HelpStyleGuide +when adding support for new constructs. """ import re, sys @@ -118,7 +99,8 @@ return blocks _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') -_optionre = re.compile(r'^(--[a-z-]+)((?:[ =][a-zA-Z][\w-]*)? +)(.*)$') +_optionre = re.compile(r'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' + r'((.*) +)(.*)$') _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') _definitionre = re.compile(r'[^ ]') @@ -192,6 +174,42 @@ return blocks +def updateoptionlists(blocks): + i = 0 + while i < len(blocks): + if blocks[i]['type'] != 'option': + i += 1 + continue + + optstrwidth = 0 + j = i + while j < len(blocks) and blocks[j]['type'] == 'option': + m = _optionre.match(blocks[j]['lines'][0]) + + shortoption = m.group(2) + group3 = m.group(3) + longoption = group3[2:].strip() + desc = m.group(6).strip() + longoptionarg = m.group(5).strip() + blocks[j]['lines'][0] = desc + + noshortop = '' + if not shortoption: + noshortop = ' ' + + opt = "%s%s" % (shortoption and "-%s " % shortoption or '', + ("%s--%s %s") % (noshortop, longoption, + longoptionarg)) + opt = opt.rstrip() + blocks[j]['optstr'] = opt + optstrwidth = max(optstrwidth, encoding.colwidth(opt)) + j += 1 + + for block in blocks[i:j]: + block['optstrwidth'] = optstrwidth + i = j + 1 + return blocks + def prunecontainers(blocks, keep): """Prune unwanted containers. @@ -297,8 +315,11 @@ i = 0 while i < len(blocks): b = blocks[i] - if b['type'] == 'paragraph' and b['lines'][0].startswith('.. '): + if b['type'] == 'paragraph' and (b['lines'][0].startswith('.. ') or + b['lines'] == ['..']): del blocks[i] + if i < len(blocks) and blocks[i]['type'] == 'margin': + del blocks[i] else: i += 1 return blocks @@ -338,6 +359,17 @@ 'tip': _('Tip:'), 'warning': _('Warning!')} +def formatoption(block, width): + desc = ' '.join(map(str.strip, block['lines'])) + colwidth = encoding.colwidth(block['optstr']) + usablewidth = width - 1 + hanging = block['optstrwidth'] + initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth))) + hangindent = ' ' * (encoding.colwidth(initindent) + 1) + return ' %s' % (util.wrap(desc, usablewidth, + initindent=initindent, + hangindent=hangindent)) + def formatblock(block, width): """Format a block according to width.""" if width <= 0: @@ -394,9 +426,7 @@ key = key.ljust(_fieldwidth) block['lines'][0] = key + block['lines'][0] elif block['type'] == 'option': - m = _optionre.match(block['lines'][0]) - option, arg, rest = m.groups() - subindent = indent + (len(option) + len(arg)) * ' ' + return formatoption(block, width) text = ' '.join(map(str.strip, block['lines'])) return util.wrap(text, width=width, @@ -416,8 +446,9 @@ blocks = hgrole(blocks) blocks = splitparagraphs(blocks) blocks = updatefieldlists(blocks) + blocks = updateoptionlists(blocks) + blocks = addmargins(blocks) blocks = prunecomments(blocks) - blocks = addmargins(blocks) blocks = findadmonitions(blocks) text = '\n'.join(formatblock(b, width) for b in blocks) if keep is None: @@ -443,8 +474,9 @@ blocks = debug(inlineliterals, blocks) blocks = debug(splitparagraphs, blocks) blocks = debug(updatefieldlists, blocks) + blocks = debug(updateoptionlists, blocks) blocks = debug(findsections, blocks) + blocks = debug(addmargins, blocks) blocks = debug(prunecomments, blocks) - blocks = debug(addmargins, blocks) blocks = debug(findadmonitions, blocks) print '\n'.join(formatblock(b, 30) for b in blocks)
--- a/mercurial/osutil.c Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/osutil.c Wed Feb 16 14:13:22 2011 -0600 @@ -436,7 +436,14 @@ } else flags = _O_TEXT; - if (plus) { + if (m0 == 'r' && !plus) { + flags |= _O_RDONLY; + access = GENERIC_READ; + } else { + /* + work around http://support.microsoft.com/kb/899149 and + set _O_RDWR for 'w' and 'a', even if mode has no '+' + */ flags |= _O_RDWR; access = GENERIC_READ | GENERIC_WRITE; fpmode[fppos++] = '+'; @@ -446,25 +453,13 @@ switch (m0) { case 'r': creation = OPEN_EXISTING; - if (!plus) { - flags |= _O_RDONLY; - access = GENERIC_READ; - } break; case 'w': creation = CREATE_ALWAYS; - if (!plus) { - access = GENERIC_WRITE; - flags |= _O_WRONLY; - } break; case 'a': creation = OPEN_ALWAYS; flags |= _O_APPEND; - if (!plus) { - flags |= _O_WRONLY; - access = GENERIC_WRITE; - } break; default: PyErr_Format(PyExc_ValueError,
--- a/mercurial/parser.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/parser.py Wed Feb 16 14:13:22 2011 -0600 @@ -22,6 +22,7 @@ self._tokenizer = tokenizer self._elements = elements self._methods = methods + self.current = None def _advance(self): 'advance the tokenizer' t = self.current @@ -76,7 +77,7 @@ def parse(self, message): 'generate a parse tree from a message' self._iter = self._tokenizer(message) - self.current = self._iter.next() + self._advance() return self._parse() def eval(self, tree): 'recursively evaluate a parse tree using node methods'
--- a/mercurial/parsers.c Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/parsers.c Wed Feb 16 14:13:22 2011 -0600 @@ -244,41 +244,6 @@ const char nullid[20]; const int nullrev = -1; -/* create an index tuple, insert into the nodemap */ -static PyObject * _build_idx_entry(PyObject *nodemap, int n, uint64_t offset_flags, - int comp_len, int uncomp_len, int base_rev, - int link_rev, int parent_1, int parent_2, - const char *c_node_id) -{ - int err; - PyObject *entry, *node_id, *n_obj; - - node_id = PyBytes_FromStringAndSize(c_node_id, 20); - n_obj = PyInt_FromLong(n); - - if (!node_id || !n_obj) - err = -1; - else - err = PyDict_SetItem(nodemap, node_id, n_obj); - - Py_XDECREF(n_obj); - if (err) - goto error_dealloc; - - entry = Py_BuildValue("LiiiiiiN", offset_flags, comp_len, - uncomp_len, base_rev, link_rev, - parent_1, parent_2, node_id); - if (!entry) - goto error_dealloc; - PyObject_GC_UnTrack(entry); /* don't waste time with this */ - - return entry; - -error_dealloc: - Py_XDECREF(node_id); - return NULL; -} - /* RevlogNG format (all in big endian, data may be inlined): * 6 bytes: offset * 2 bytes: flags @@ -290,8 +255,8 @@ * 4 bytes: parent 2 revision * 32 bytes: nodeid (only 20 bytes used) */ -static int _parse_index_ng (const char *data, int size, int inlined, - PyObject *index, PyObject *nodemap) +static int _parse_index_ng(const char *data, int size, int inlined, + PyObject *index) { PyObject *entry; int n = 0, err; @@ -321,13 +286,15 @@ parent_2 = ntohl(*((uint32_t *)(decode + 28))); c_node_id = decode + 32; - entry = _build_idx_entry(nodemap, n, offset_flags, - comp_len, uncomp_len, base_rev, - link_rev, parent_1, parent_2, - c_node_id); + entry = Py_BuildValue("Liiiiiis#", offset_flags, comp_len, + uncomp_len, base_rev, link_rev, + parent_1, parent_2, c_node_id, 20); + if (!entry) return 0; + PyObject_GC_UnTrack(entry); /* don't waste time with this */ + if (inlined) { err = PyList_Append(index, entry); Py_DECREF(entry); @@ -348,12 +315,14 @@ return 0; } - /* create the nullid/nullrev entry in the nodemap and the - * magic nullid entry in the index at [-1] */ - entry = _build_idx_entry(nodemap, - nullrev, 0, 0, 0, -1, -1, -1, -1, nullid); + /* create the magic nullid entry in the index at [-1] */ + entry = Py_BuildValue("Liiiiiis#", (uint64_t)0, 0, 0, -1, -1, -1, -1, nullid, 20); + if (!entry) return 0; + + PyObject_GC_UnTrack(entry); /* don't waste time with this */ + if (inlined) { err = PyList_Append(index, entry); Py_DECREF(entry); @@ -366,17 +335,16 @@ } /* This function parses a index file and returns a Python tuple of the - * following format: (index, nodemap, cache) + * following format: (index, cache) * * index: a list of tuples containing the RevlogNG records - * nodemap: a dict mapping node ids to indices in the index list * cache: if data is inlined, a tuple (index_file_content, 0) else None */ -static PyObject *parse_index(PyObject *self, PyObject *args) +static PyObject *parse_index2(PyObject *self, PyObject *args) { const char *data; int size, inlined; - PyObject *rval = NULL, *index = NULL, *nodemap = NULL, *cache = NULL; + PyObject *rval = NULL, *index = NULL, *cache = NULL; PyObject *data_obj = NULL, *inlined_obj; if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj)) @@ -384,16 +352,12 @@ inlined = inlined_obj && PyObject_IsTrue(inlined_obj); /* If no data is inlined, we know the size of the index list in - * advance: size divided by size of one one revlog record (64 bytes) - * plus one for the nullid */ + * advance: size divided by the size of one revlog record (64 bytes) + * plus one for nullid */ index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1); if (!index) goto quit; - nodemap = PyDict_New(); - if (!nodemap) - goto quit; - /* set up the cache return value */ if (inlined) { /* Note that the reference to data_obj is only borrowed */ @@ -406,18 +370,17 @@ Py_INCREF(Py_None); } - /* actually populate the index and the nodemap with data */ - if (!_parse_index_ng (data, size, inlined, index, nodemap)) + /* actually populate the index with data */ + if (!_parse_index_ng(data, size, inlined, index)) goto quit; - rval = Py_BuildValue("NNN", index, nodemap, cache); + rval = Py_BuildValue("NN", index, cache); if (!rval) goto quit; return rval; quit: Py_XDECREF(index); - Py_XDECREF(nodemap); Py_XDECREF(cache); Py_XDECREF(rval); return NULL; @@ -429,7 +392,7 @@ static PyMethodDef methods[] = { {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, - {"parse_index", parse_index, METH_VARARGS, "parse a revlog index\n"}, + {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, {NULL, NULL} };
--- a/mercurial/patch.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/patch.py Wed Feb 16 14:13:22 2011 -0600 @@ -6,7 +6,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import cStringIO, email.Parser, os, re +import cStringIO, email.Parser, os, errno, re import tempfile, zlib from i18n import _ @@ -429,10 +429,16 @@ # Ensure supplied data ends in fname, being a regular file or # a symlink. cmdutil.updatedir will -too magically- take care # of setting it to the proper type afterwards. + st_mode = None islink = os.path.islink(fname) if islink: fp = cStringIO.StringIO() else: + try: + st_mode = os.lstat(fname).st_mode & 0777 + except OSError, e: + if e.errno != errno.ENOENT: + raise fp = self.opener(fname, 'w') try: if self.eolmode == 'auto': @@ -451,6 +457,8 @@ fp.writelines(lines) if islink: self.opener.symlink(fp.getvalue(), fname) + if st_mode is not None: + os.chmod(fname, st_mode) finally: fp.close() @@ -976,7 +984,7 @@ fp.seek(pos) return gitpatches -def iterhunks(ui, fp, sourcefile=None): +def iterhunks(ui, fp): """Read a patch and yield the following events: - ("file", afile, bfile, firsthunk): select a new target file. - ("hunk", hunk): a new hunk is ready to be applied, follows a @@ -997,10 +1005,6 @@ BFILE = 1 context = None lr = linereader(fp) - # gitworkdone is True if a git operation (copy, rename, ...) was - # performed already for the current file. Useful when the file - # section may have no hunk. - gitworkdone = False while True: newfile = newgitfile = False @@ -1012,7 +1016,7 @@ current_hunk.fix_newline() yield 'hunk', current_hunk current_hunk = None - if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or + if (state == BFILE and ((not context and x[0] == '@') or ((context is not False) and x.startswith('***************')))): if context is None and x.startswith('***************'): context = True @@ -1034,7 +1038,6 @@ elif x.startswith('diff --git'): # check for git diff, scanning the whole patch file if needed m = gitre.match(x) - gitworkdone = False if m: afile, bfile = m.group(1, 2) if not git: @@ -1049,7 +1052,6 @@ if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD') or gp.mode): afile = bfile - gitworkdone = True newgitfile = True elif x.startswith('---'): # check for a unified diff @@ -1077,9 +1079,6 @@ afile = parsefilename(x) bfile = parsefilename(l2) - if newfile: - gitworkdone = False - if newgitfile or newfile: emitfile = True state = BFILE @@ -1091,7 +1090,7 @@ raise PatchError(_("malformed patch %s %s") % (afile, current_hunk.desc)) -def applydiff(ui, fp, changed, strip=1, sourcefile=None, eolmode='strict'): +def applydiff(ui, fp, changed, strip=1, eolmode='strict'): """Reads a patch from fp and tries to apply it. The dict 'changed' is filled in with all of the filenames changed @@ -1105,13 +1104,10 @@ Callers probably want to call 'cmdutil.updatedir' after this to apply certain categories of changes not done by this function. """ - return _applydiff( - ui, fp, patchfile, copyfile, - changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode) + return _applydiff(ui, fp, patchfile, copyfile, changed, strip=strip, + eolmode=eolmode) - -def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, - sourcefile=None, eolmode='strict'): +def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, eolmode='strict'): rejects = 0 err = 0 current_file = None @@ -1126,7 +1122,7 @@ current_file.write_rej() return len(current_file.rej) - for state, values in iterhunks(ui, fp, sourcefile): + for state, values in iterhunks(ui, fp): if state == 'hunk': if not current_file: continue @@ -1139,14 +1135,10 @@ rejects += closefile() afile, bfile, first_hunk = values try: - if sourcefile: - current_file = patcher(ui, sourcefile, opener, - eolmode=eolmode) - else: - current_file, missing = selectfile(afile, bfile, - first_hunk, strip) - current_file = patcher(ui, current_file, opener, - missing=missing, eolmode=eolmode) + current_file, missing = selectfile(afile, bfile, + first_hunk, strip) + current_file = patcher(ui, current_file, opener, + missing=missing, eolmode=eolmode) except PatchError, err: ui.warn(str(err) + '\n') current_file = None @@ -1537,6 +1529,8 @@ yield text def diffstatdata(lines): + diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$') + filename, adds, removes = None, 0, 0 for line in lines: if line.startswith('diff'): @@ -1547,9 +1541,9 @@ adds, removes = 0, 0 if line.startswith('diff --git'): filename = gitre.search(line).group(1) - else: + elif line.startswith('diff -r'): # format: "diff -r ... -r ... filename" - filename = line.split(None, 5)[-1] + filename = diffre.search(line).group(1) elif line.startswith('+') and not line.startswith('+++'): adds += 1 elif line.startswith('-') and not line.startswith('---'):
--- a/mercurial/posix.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/posix.py Wed Feb 16 14:13:22 2011 -0600 @@ -13,6 +13,8 @@ nulldev = '/dev/null' normpath = os.path.normpath samestat = os.path.samestat +os_link = os.link +unlink = os.unlink rename = os.rename expandglobs = False @@ -23,6 +25,10 @@ '''return true if it is safe to hold open file handles to hardlinks''' return True +def nlinks(name): + '''return number of hardlinks for the given file''' + return os.lstat(name).st_nlink + def rcfiles(path): rcs = [os.path.join(path, 'hgrc')] rcdir = os.path.join(path, 'hgrc.d') @@ -71,20 +77,26 @@ if l: if not stat.S_ISLNK(s): # switch file to link - data = open(f).read() + fp = open(f) + data = fp.read() + fp.close() os.unlink(f) try: os.symlink(data, f) except: # failed to make a link, rewrite file - open(f, "w").write(data) + fp = open(f, "w") + fp.write(data) + fp.close() # no chmod needed at this point return if stat.S_ISLNK(s): # switch link to file data = os.readlink(f) os.unlink(f) - open(f, "w").write(data) + fp = open(f, "w") + fp.write(data) + fp.close() s = 0666 & ~umask # avoid restatting for chmod sx = s & 0100
--- a/mercurial/pure/parsers.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/pure/parsers.py Wed Feb 16 14:13:22 2011 -0600 @@ -24,7 +24,7 @@ else: mfdict[f] = bin(n) -def parse_index(data, inline): +def parse_index2(data, inline): def gettype(q): return int(q & 0xFFFF) @@ -36,16 +36,14 @@ s = struct.calcsize(indexformatng) index = [] cache = None - nodemap = {nullid: nullrev} n = off = 0 - # if we're not using lazymap, always read the whole index + l = len(data) - s append = index.append if inline: cache = (0, data) while off <= l: e = _unpack(indexformatng, data[off:off + s]) - nodemap[e[7]] = n append(e) n += 1 if e[1] < 0: @@ -54,7 +52,6 @@ else: while off <= l: e = _unpack(indexformatng, data[off:off + s]) - nodemap[e[7]] = n append(e) n += 1 off += s @@ -67,7 +64,7 @@ # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - return index, nodemap, cache + return index, cache def parse_dirstate(dmap, copymap, st): parents = [st[:20], st[20: 40]]
--- a/mercurial/pushkey.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/pushkey.py Wed Feb 16 14:13:22 2011 -0600 @@ -5,13 +5,16 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +import bookmarks + def _nslist(repo): n = {} for k in _namespaces: n[k] = "" return n -_namespaces = {"namespaces": (lambda *x: False, _nslist)} +_namespaces = {"namespaces": (lambda *x: False, _nslist), + "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks)} def register(namespace, pushkey, listkeys): _namespaces[namespace] = (pushkey, listkeys)
--- a/mercurial/repair.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/repair.py Wed Feb 16 14:13:22 2011 -0600 @@ -6,7 +6,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import changegroup +import changegroup, bookmarks from node import nullrev, short from i18n import _ import os @@ -105,6 +105,13 @@ saveheads.difference_update(parents) saveheads.add(r) + bm = repo._bookmarks + updatebm = [] + for m in bm: + rev = repo[bm[m]].rev() + if rev in tostrip: + updatebm.append(m) + saveheads = [cl.node(r) for r in saveheads] files = _collectfiles(repo, striprev) @@ -155,6 +162,11 @@ f.close() if not keeppartialbundle: os.unlink(chgrpfile) + + for m in updatebm: + bm[m] = repo['.'].node() + bookmarks.write(repo) + except: if backupfile: ui.warn(_("strip failed, full bundle stored in '%s'\n")
--- a/mercurial/repo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/repo.py Wed Feb 16 14:13:22 2011 -0600 @@ -35,3 +35,6 @@ def cancopy(self): return self.local() + + def close(self): + pass
--- a/mercurial/revlog.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/revlog.py Wed Feb 16 14:13:22 2011 -0600 @@ -38,11 +38,9 @@ REVIDX_PUNCHED_FLAG = 2 REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA -# amount of data read unconditionally, should be >= 4 -# when not inline: threshold for using lazy index -_prereadsize = 1048576 # max size of revlog with inline data _maxinline = 131072 +_chunksize = 1048576 RevlogError = error.RevlogError LookupError = error.LookupError @@ -121,209 +119,6 @@ return bin[1:] raise RevlogError(_("unknown compression type %r") % t) -class lazyparser(object): - """ - this class avoids the need to parse the entirety of large indices - """ - - # lazyparser is not safe to use on windows if win32 extensions not - # available. it keeps file handle open, which make it not possible - # to break hardlinks on local cloned repos. - - def __init__(self, dataf): - try: - size = util.fstat(dataf).st_size - except AttributeError: - size = 0 - self.dataf = dataf - self.s = struct.calcsize(indexformatng) - self.datasize = size - self.l = size // self.s - self.index = [None] * self.l - self.map = {nullid: nullrev} - self.allmap = 0 - self.all = 0 - self.mapfind_count = 0 - - def loadmap(self): - """ - during a commit, we need to make sure the rev being added is - not a duplicate. This requires loading the entire index, - which is fairly slow. loadmap can load up just the node map, - which takes much less time. - """ - if self.allmap: - return - end = self.datasize - self.allmap = 1 - cur = 0 - count = 0 - blocksize = self.s * 256 - self.dataf.seek(0) - while cur < end: - data = self.dataf.read(blocksize) - off = 0 - for x in xrange(256): - n = data[off + ngshaoffset:off + ngshaoffset + 20] - self.map[n] = count - count += 1 - if count >= self.l: - break - off += self.s - cur += blocksize - - def loadblock(self, blockstart, blocksize, data=None): - if self.all: - return - if data is None: - self.dataf.seek(blockstart) - if blockstart + blocksize > self.datasize: - # the revlog may have grown since we've started running, - # but we don't have space in self.index for more entries. - # limit blocksize so that we don't get too much data. - blocksize = max(self.datasize - blockstart, 0) - data = self.dataf.read(blocksize) - lend = len(data) // self.s - i = blockstart // self.s - off = 0 - # lazyindex supports __delitem__ - if lend > len(self.index) - i: - lend = len(self.index) - i - for x in xrange(lend): - if self.index[i + x] is None: - b = data[off : off + self.s] - self.index[i + x] = b - n = b[ngshaoffset:ngshaoffset + 20] - self.map[n] = i + x - off += self.s - - def findnode(self, node): - """search backwards through the index file for a specific node""" - if self.allmap: - return None - - # hg log will cause many many searches for the manifest - # nodes. After we get called a few times, just load the whole - # thing. - if self.mapfind_count > 8: - self.loadmap() - if node in self.map: - return node - return None - self.mapfind_count += 1 - last = self.l - 1 - while self.index[last] != None: - if last == 0: - self.all = 1 - self.allmap = 1 - return None - last -= 1 - end = (last + 1) * self.s - blocksize = self.s * 256 - while end >= 0: - start = max(end - blocksize, 0) - self.dataf.seek(start) - data = self.dataf.read(end - start) - findend = end - start - while True: - # we're searching backwards, so we have to make sure - # we don't find a changeset where this node is a parent - off = data.find(node, 0, findend) - findend = off - if off >= 0: - i = off / self.s - off = i * self.s - n = data[off + ngshaoffset:off + ngshaoffset + 20] - if n == node: - self.map[n] = i + start / self.s - return node - else: - break - end -= blocksize - return None - - def loadindex(self, i=None, end=None): - if self.all: - return - all = False - if i is None: - blockstart = 0 - blocksize = (65536 / self.s) * self.s - end = self.datasize - all = True - else: - if end: - blockstart = i * self.s - end = end * self.s - blocksize = end - blockstart - else: - blockstart = (i & ~1023) * self.s - blocksize = self.s * 1024 - end = blockstart + blocksize - while blockstart < end: - self.loadblock(blockstart, blocksize) - blockstart += blocksize - if all: - self.all = True - -class lazyindex(object): - """a lazy version of the index array""" - def __init__(self, parser): - self.p = parser - def __len__(self): - return len(self.p.index) - def load(self, pos): - if pos < 0: - pos += len(self.p.index) - self.p.loadindex(pos) - return self.p.index[pos] - def __getitem__(self, pos): - return _unpack(indexformatng, self.p.index[pos] or self.load(pos)) - def __setitem__(self, pos, item): - self.p.index[pos] = _pack(indexformatng, *item) - def __delitem__(self, pos): - del self.p.index[pos] - def insert(self, pos, e): - self.p.index.insert(pos, _pack(indexformatng, *e)) - def append(self, e): - self.p.index.append(_pack(indexformatng, *e)) - -class lazymap(object): - """a lazy version of the node map""" - def __init__(self, parser): - self.p = parser - def load(self, key): - n = self.p.findnode(key) - if n is None: - raise KeyError(key) - def __contains__(self, key): - if key in self.p.map: - return True - self.p.loadmap() - return key in self.p.map - def __iter__(self): - yield nullid - for i, ret in enumerate(self.p.index): - if not ret: - self.p.loadindex(i) - ret = self.p.index[i] - if isinstance(ret, str): - ret = _unpack(indexformatng, ret) - yield ret[7] - def __getitem__(self, key): - try: - return self.p.map[key] - except KeyError: - try: - self.load(key) - return self.p.map[key] - except KeyError: - raise KeyError("node " + hex(key)) - def __setitem__(self, key, val): - self.p.map[key] = val - def __delitem__(self, key): - del self.p.map[key] - indexformatv0 = ">4l20s20s20s" v0shaoffset = 56 @@ -331,13 +126,11 @@ def __init__(self): self.size = struct.calcsize(indexformatv0) - def parseindex(self, fp, data, inline): + def parseindex(self, data, inline): s = self.size index = [] nodemap = {nullid: nullrev} n = off = 0 - if len(data) == _prereadsize: - data += fp.read() # read the rest l = len(data) while off + s <= l: cur = data[off:off + s] @@ -350,6 +143,9 @@ nodemap[e[6]] = n n += 1 + # add the magic null revision at -1 + index.append((0, 0, 0, -1, -1, -1, -1, nullid)) + return index, nodemap, None def packentry(self, entry, node, version, rev): @@ -377,24 +173,10 @@ def __init__(self): self.size = struct.calcsize(indexformatng) - def parseindex(self, fp, data, inline): - if len(data) == _prereadsize: - if util.openhardlinks() and not inline: - # big index, let's parse it on demand - parser = lazyparser(fp) - index = lazyindex(parser) - nodemap = lazymap(parser) - e = list(index[0]) - type = gettype(e[0]) - e[0] = offset_type(0, type) - index[0] = e - return index, nodemap, None - else: - data += fp.read() - + def parseindex(self, data, inline): # call the C implementation to parse the index data - index, nodemap, cache = parsers.parse_index(data, inline) - return index, nodemap, cache + index, cache = parsers.parse_index2(data, inline) + return index, None, cache def packentry(self, entry, node, version, rev): p = _pack(indexformatng, *entry) @@ -439,10 +221,12 @@ self.opener = opener self._cache = None self._chunkcache = (0, '') - self.nodemap = {nullid: nullrev} self.index = [] self._shallowroot = shallowroot self._parentdelta = 0 + self._pcache = {} + self._nodecache = {nullid: nullrev} + self._nodepos = None v = REVLOG_DEFAULT_VERSION if hasattr(opener, 'options') and 'defversion' in opener.options: @@ -458,10 +242,8 @@ i = '' try: f = self.opener(self.indexfile) - if "nonlazy" in getattr(self.opener, 'options', {}): - i = f.read() - else: - i = f.read(_prereadsize) + i = f.read() + f.close() if len(i) > 0: v = struct.unpack(versionformat, i[:4])[0] except IOError, inst: @@ -486,37 +268,15 @@ self._io = revlogio() if self.version == REVLOGV0: self._io = revlogoldio() - if i: - try: - d = self._io.parseindex(f, i, self._inline) - except (ValueError, IndexError): - raise RevlogError(_("index %s is corrupted") % (self.indexfile)) - self.index, self.nodemap, self._chunkcache = d - if not self._chunkcache: - self._chunkclear() - - # add the magic null revision at -1 (if it hasn't been done already) - if (self.index == [] or isinstance(self.index, lazyindex) or - self.index[-1][7] != nullid) : - self.index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - - def _loadindex(self, start, end): - """load a block of indexes all at once from the lazy parser""" - if isinstance(self.index, lazyindex): - self.index.p.loadindex(start, end) - - def _loadindexmap(self): - """loads both the map and the index from the lazy parser""" - if isinstance(self.index, lazyindex): - p = self.index.p - p.loadindex() - self.nodemap = p.map - - def _loadmap(self): - """loads the map from the lazy parser""" - if isinstance(self.nodemap, lazymap): - self.nodemap.p.loadmap() - self.nodemap = self.nodemap.p.map + try: + d = self._io.parseindex(i, self._inline) + except (ValueError, IndexError): + raise RevlogError(_("index %s is corrupted") % (self.indexfile)) + self.index, nodemap, self._chunkcache = d + if nodemap is not None: + self.nodemap = self._nodecache = nodemap + if not self._chunkcache: + self._chunkclear() def tip(self): return self.node(len(self.index) - 2) @@ -525,11 +285,29 @@ def __iter__(self): for i in xrange(len(self)): yield i + + @util.propertycache + def nodemap(self): + n = self.rev(self.node(0)) + return self._nodecache + def rev(self, node): try: - return self.nodemap[node] + return self._nodecache[node] except KeyError: + n = self._nodecache + i = self.index + p = self._nodepos + if p is None: + p = len(i) - 2 + for r in xrange(p, -1, -1): + v = i[r][7] + n[v] = r + if v == node: + self._nodepos = r - 1 + return r raise LookupError(node, self.indexfile, _('no node')) + def node(self, rev): return self.index[rev][7] def linkrev(self, rev): @@ -937,15 +715,19 @@ pass def _partialmatch(self, id): + if id in self._pcache: + return self._pcache[id] + if len(id) < 40: try: # hex(node)[:...] l = len(id) // 2 # grab an even number of digits - bin_id = bin(id[:l * 2]) - nl = [n for n in self.nodemap if n[:l] == bin_id] + prefix = bin(id[:l * 2]) + nl = [e[7] for e in self.index if e[7].startswith(prefix)] nl = [n for n in nl if hex(n).startswith(id)] if len(nl) > 0: if len(nl) == 1: + self._pcache[id] = nl[0] return nl[0] raise LookupError(id, self.indexfile, _('ambiguous identifier')) @@ -978,7 +760,7 @@ def _addchunk(self, offset, data): o, d = self._chunkcache # try to add to existing cache - if o + len(d) == offset and len(d) + len(data) < _prereadsize: + if o + len(d) == offset and len(d) + len(data) < _chunksize: self._chunkcache = o, d + data else: self._chunkcache = offset, data @@ -1060,7 +842,6 @@ (self.flags(rev) & ~REVIDX_KNOWN_FLAGS)) # build delta chain - self._loadindex(base, rev + 1) chain = [] index = self.index # for performance iterrev = rev @@ -1088,13 +869,18 @@ bins = [self._chunk(r) for r in chain] text = mdiff.patches(text, bins) + + text = self._checkhash(text, node, rev) + + self._cache = (node, rev, text) + return text + + def _checkhash(self, text, node, rev): p1, p2 = self.parents(node) if (node != hash(text, p1, p2) and not (self.flags(rev) & REVIDX_PUNCHED_FLAG)): raise RevlogError(_("integrity check failed on %s:%d") % (self.indexfile, rev)) - - self._cache = (node, rev, text) return text def checkinlinesize(self, tr, fp=None): @@ -1382,6 +1168,7 @@ if not dfh and not self._inline: # addrevision switched from inline to conventional # reopen the index + ifh.close() dfh = self.opener(self.datafile, "a") ifh = self.opener(self.indexfile, "a") finally: @@ -1408,9 +1195,6 @@ if len(self) == 0: return - if isinstance(self.index, lazyindex): - self._loadindexmap() - for rev in self: if self.index[rev][4] >= minlink: break @@ -1444,6 +1228,7 @@ f = self.opener(self.datafile) f.seek(0, 2) actual = f.tell() + f.close() dd = actual - expected except IOError, inst: if inst.errno != errno.ENOENT: @@ -1454,6 +1239,7 @@ f = self.opener(self.indexfile) f.seek(0, 2) actual = f.tell() + f.close() s = self._io.size i = max(0, actual // s) di = actual - (i * s)
--- a/mercurial/revset.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/revset.py Wed Feb 16 14:13:22 2011 -0600 @@ -7,6 +7,7 @@ import re import parser, util, error, discovery +import bookmarks as bookmarksmod import match as matchmod from i18n import _, gettext @@ -202,9 +203,13 @@ return [r for r in subset if r == l] def p1(repo, subset, x): - """``p1(set)`` - First parent of changesets in set. + """``p1([set])`` + First parent of changesets in set, or the working directory. """ + if x is None: + p = repo[x].parents()[0].rev() + return [r for r in subset if r == p] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -212,9 +217,17 @@ return [r for r in subset if r in ps] def p2(repo, subset, x): - """``p2(set)`` - Second parent of changesets in set. + """``p2([set])`` + Second parent of changesets in set, or the working directory. """ + if x is None: + ps = repo[x].parents() + try: + p = ps[1].rev() + return [r for r in subset if r == p] + except IndexError: + return [] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -222,9 +235,13 @@ return [r for r in subset if r in ps] def parents(repo, subset, x): - """``parents(set)`` - The set of all parents for all changesets in set. + """``parents([set])`` + The set of all parents for all changesets in set, or the working directory. """ + if x is None: + ps = tuple(p.rev() for p in repo[x].parents()) + return [r for r in subset if r in ps] + ps = set() cl = repo.changelog for r in getset(repo, range(len(repo)), x): @@ -648,12 +665,31 @@ def tagged(repo, subset, x): return tag(repo, subset, x) +def bookmark(repo, subset, x): + """``bookmark([name])`` + The named bookmark or all bookmarks. + """ + # i18n: "bookmark" is a keyword + args = getargs(x, 0, 1, _('bookmark takes one or no arguments')) + if args: + bm = getstring(args[0], + # i18n: "bookmark" is a keyword + _('the argument to bookmark must be a string')) + bmrev = bookmarksmod.listbookmarks(repo).get(bm, None) + if bmrev: + bmrev = repo[bmrev].rev() + return [r for r in subset if r == bmrev] + bms = set([repo[r].rev() + for r in bookmarksmod.listbookmarks(repo).values()]) + return [r for r in subset if r in bms] + symbols = { "adds": adds, "all": getall, "ancestor": ancestor, "ancestors": ancestors, "author": author, + "bookmark": bookmark, "branch": branch, "children": children, "closed": closed, @@ -699,7 +735,7 @@ } def optimize(x, small): - if x == None: + if x is None: return 0, x smallbonus = 1
--- a/mercurial/sshrepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/sshrepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -91,10 +91,11 @@ size = util.fstat(self.pipee).st_size if size == 0: break - l = self.pipee.readline() - if not l: + s = self.pipee.read(size) + if not s: break - self.ui.status(_("remote: "), l) + for l in s.splitlines(): + self.ui.status(_("remote: "), l, '\n') def _abort(self, exception): self.cleanup()
--- a/mercurial/statichttprepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/statichttprepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -77,7 +77,6 @@ return httprangereader(f, urlopener) return o - opener.options = {'nonlazy': 1} return opener class statichttprepository(localrepo.localrepository): @@ -99,7 +98,9 @@ raise # check if it is a non-empty old-style repository try: - self.opener("00changelog.i").read(1) + fp = self.opener("00changelog.i") + fp.read(1) + fp.close() except IOError, inst: if inst.errno != errno.ENOENT: raise @@ -114,9 +115,7 @@ raise error.RepoError(_("requirement '%s' not supported") % r) # setup store - def pjoin(a, b): - return a + '/' + b - self.store = store.store(requirements, self.path, opener, pjoin) + self.store = store.store(requirements, self.path, opener) self.spath = self.store.path self.sopener = self.store.opener self.sjoin = self.store.join
--- a/mercurial/store.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/store.py Wed Feb 16 14:13:22 2011 -0600 @@ -169,8 +169,7 @@ class basicstore(object): '''base class for local repository stores''' - def __init__(self, path, opener, pathjoiner): - self.pathjoiner = pathjoiner + def __init__(self, path, opener): self.path = path self.createmode = _calcmode(path) op = opener(self.path) @@ -178,19 +177,21 @@ self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw) def join(self, f): - return self.pathjoiner(self.path, encodedir(f)) + return self.path + '/' + encodedir(f) def _walk(self, relpath, recurse): '''yields (unencoded, encoded, size)''' - path = self.pathjoiner(self.path, relpath) - striplen = len(self.path) + len(os.sep) + path = self.path + if relpath: + path += '/' + relpath + striplen = len(self.path) + 1 l = [] if os.path.isdir(path): visit = [path] while visit: p = visit.pop() for f, kind, st in osutil.listdir(p, stat=True): - fp = self.pathjoiner(p, f) + fp = p + '/' + f if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): n = util.pconvert(fp[striplen:]) l.append((decodedir(n), n, st.st_size)) @@ -213,10 +214,12 @@ def copylist(self): return ['requires'] + _data.split() + def write(self): + pass + class encodedstore(basicstore): - def __init__(self, path, opener, pathjoiner): - self.pathjoiner = pathjoiner - self.path = self.pathjoiner(path, 'store') + def __init__(self, path, opener): + self.path = path + '/store' self.createmode = _calcmode(self.path) op = opener(self.path) op.createmode = self.createmode @@ -231,11 +234,11 @@ yield a, b, size def join(self, f): - return self.pathjoiner(self.path, encodefilename(f)) + return self.path + '/' + encodefilename(f) def copylist(self): return (['requires', '00changelog.i'] + - [self.pathjoiner('store', f) for f in _data.split()]) + ['store/' + f for f in _data.split()]) class fncache(object): # the filename used to be partially encoded @@ -243,10 +246,12 @@ def __init__(self, opener): self.opener = opener self.entries = None + self._dirty = False def _load(self): '''fill the entries from the fncache file''' self.entries = set() + self._dirty = False try: fp = self.opener('fncache', mode='rb') except IOError: @@ -265,12 +270,22 @@ fp.write(encodedir(p) + '\n') fp.close() self.entries = set(files) + self._dirty = False + + def write(self): + if not self._dirty: + return + fp = self.opener('fncache', mode='wb', atomictemp=True) + for p in self.entries: + fp.write(encodedir(p) + '\n') + fp.rename() + self._dirty = False def add(self, fn): if self.entries is None: self._load() if fn not in self.entries: - self.opener('fncache', 'ab').write(encodedir(fn) + '\n') + self._dirty = True self.entries.add(fn) def __contains__(self, fn): @@ -284,10 +299,9 @@ return iter(self.entries) class fncachestore(basicstore): - def __init__(self, path, opener, pathjoiner, encode): + def __init__(self, path, opener, encode): self.encode = encode - self.pathjoiner = pathjoiner - self.path = self.pathjoiner(path, 'store') + self.path = path + '/store' self.createmode = _calcmode(self.path) op = opener(self.path) op.createmode = self.createmode @@ -301,17 +315,16 @@ self.opener = fncacheopener def join(self, f): - return self.pathjoiner(self.path, self.encode(f)) + return self.path + '/' + self.encode(f) def datafiles(self): rewrite = False existing = [] - pjoin = self.pathjoiner spath = self.path for f in self.fncache: ef = self.encode(f) try: - st = os.stat(pjoin(spath, ef)) + st = os.stat(spath + '/' + ef) yield f, ef, st.st_size existing.append(f) except OSError: @@ -326,14 +339,16 @@ d = ('data dh fncache' ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') return (['requires', '00changelog.i'] + - [self.pathjoiner('store', f) for f in d.split()]) + ['store/' + f for f in d.split()]) -def store(requirements, path, opener, pathjoiner=None): - pathjoiner = pathjoiner or os.path.join + def write(self): + self.fncache.write() + +def store(requirements, path, opener): if 'store' in requirements: if 'fncache' in requirements: auxencode = lambda f: _auxencode(f, 'dotencode' in requirements) encode = lambda f: _hybridencode(f, auxencode) - return fncachestore(path, opener, pathjoiner, encode) - return encodedstore(path, opener, pathjoiner) - return basicstore(path, opener, pathjoiner) + return fncachestore(path, opener, encode) + return encodedstore(path, opener) + return basicstore(path, opener)
--- a/mercurial/subrepo.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/subrepo.py Wed Feb 16 14:13:22 2011 -0600 @@ -6,7 +6,7 @@ # GNU General Public License version 2 or any later version. import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath -import stat, subprocess +import stat, subprocess, tarfile from i18n import _ import config, util, node, error, cmdutil hg = None @@ -163,6 +163,17 @@ # record merged .hgsubstate writestate(repo, sm) +def _updateprompt(ui, sub, dirty, local, remote): + if dirty: + msg = (_(' subrepository sources for %s differ\n' + 'use (l)ocal source (%s) or (r)emote source (%s)?\n') + % (subrelpath(sub), local, remote)) + else: + msg = (_(' subrepository sources for %s differ (in checked out version)\n' + 'use (l)ocal source (%s) or (r)emote source (%s)?\n') + % (subrelpath(sub), local, remote)) + return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0) + def reporelpath(repo): """return path to this (sub)repo as seen from outermost repo""" parent = repo @@ -172,6 +183,8 @@ def subrelpath(sub): """return path to this subrepo as seen from outermost repo""" + if hasattr(sub, '_relpath'): + return sub._relpath if not hasattr(sub, '_repo'): return sub._path return reporelpath(sub._repo) @@ -236,9 +249,10 @@ class abstractsubrepo(object): - def dirty(self): - """returns true if the dirstate of the subrepo does not match - current stored state + def dirty(self, ignoreupdate=False): + """returns true if the dirstate of the subrepo is dirty or does not + match current stored state. If ignoreupdate is true, only check + whether the subrepo has uncommitted changes in its dirstate. """ raise NotImplementedError @@ -266,7 +280,7 @@ """ raise NotImplementedError - def merge(self, state, overwrite=False): + def merge(self, state): """merge currently-saved state with the new state.""" raise NotImplementedError @@ -304,13 +318,21 @@ """return file flags""" return '' - def archive(self, archiver, prefix): - for name in self.files(): + def archive(self, ui, archiver, prefix): + files = self.files() + total = len(files) + relpath = subrelpath(self) + ui.progress(_('archiving (%s)') % relpath, 0, + unit=_('files'), total=total) + for i, name in enumerate(files): flags = self.fileflags(name) mode = 'x' in flags and 0755 or 0644 symlink = 'l' in flags archiver.addfile(os.path.join(prefix, self._path, name), mode, symlink, self.filedata(name)) + ui.progress(_('archiving (%s)') % relpath, i + 1, + unit=_('files'), total=total) + ui.progress(_('archiving (%s)') % relpath, None) class hgsubrepo(abstractsubrepo): @@ -373,21 +395,22 @@ self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) - def archive(self, archiver, prefix): - abstractsubrepo.archive(self, archiver, prefix) + def archive(self, ui, archiver, prefix): + abstractsubrepo.archive(self, ui, archiver, prefix) rev = self._state[1] ctx = self._repo[rev] for subpath in ctx.substate: s = subrepo(ctx, subpath) - s.archive(archiver, os.path.join(prefix, self._path)) + s.archive(ui, archiver, os.path.join(prefix, self._path)) - def dirty(self): + def dirty(self, ignoreupdate=False): r = self._state[1] - if r == '': + if r == '' and not ignoreupdate: # no state recorded return True w = self._repo[None] - if w.p1() != self._repo[r]: # version checked out change + if w.p1() != self._repo[r] and not ignoreupdate: + # different version checked out return True return w.dirty() # working directory changed @@ -430,14 +453,26 @@ cur = self._repo['.'] dst = self._repo[state[1]] anc = dst.ancestor(cur) - if anc == cur: - self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) - hg.update(self._repo, state[1]) - elif anc == dst: - self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) + + def mergefunc(): + if anc == cur: + self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) + hg.update(self._repo, state[1]) + elif anc == dst: + self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) + else: + self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) + hg.merge(self._repo, state[1], remind=False) + + wctx = self._repo[None] + if self.dirty(): + if anc != dst: + if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst): + mergefunc() + else: + mergefunc() else: - self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) - hg.merge(self._repo, state[1], remind=False) + mergefunc() def push(self, force): # push subrepos depth-first for coherent ordering @@ -484,13 +519,10 @@ def _svncommand(self, commands, filename=''): path = os.path.join(self._ctx._repo.origroot, self._path, filename) cmd = ['svn'] + commands + [path] - cmd = [util.shellquote(arg) for arg in cmd] - cmd = util.quotecommand(' '.join(cmd)) env = dict(os.environ) # Avoid localized output, preserve current locale for everything else. env['LC_MESSAGES'] = 'C' - p = subprocess.Popen(cmd, shell=True, bufsize=-1, - close_fds=util.closefds, + p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env) stdout, stderr = p.communicate() @@ -543,9 +575,10 @@ return True, True return bool(changes), False - def dirty(self): - if self._state[1] in self._wcrevs() and not self._wcchanged()[0]: - return False + def dirty(self, ignoreupdate=False): + if not self._wcchanged()[0]: + if self._state[1] in self._wcrevs() or ignoreupdate: + return False return True def commit(self, text, user, date): @@ -598,10 +631,12 @@ self._ui.status(status) def merge(self, state): - old = int(self._state[1]) - new = int(state[1]) - if new > old: - self.get(state) + old = self._state[1] + new = state[1] + if new != self._wcrev(): + dirty = old == self._wcrev() or self._wcchanged()[0] + if _updateprompt(self._ui, self, dirty, self._wcrev(), new): + self.get(state, False) def push(self, force): # push is a no-op for SVN @@ -616,7 +651,347 @@ return self._svncommand(['cat'], name) +class gitsubrepo(abstractsubrepo): + def __init__(self, ctx, path, state): + # TODO add git version check. + self._state = state + self._ctx = ctx + self._path = path + self._relpath = os.path.join(reporelpath(ctx._repo), path) + self._abspath = ctx._repo.wjoin(path) + self._ui = ctx._repo.ui + + def _gitcommand(self, commands, env=None, stream=False): + return self._gitdir(commands, env=env, stream=stream)[0] + + def _gitdir(self, commands, env=None, stream=False): + return self._gitnodir(commands, env=env, stream=stream, + cwd=self._abspath) + + def _gitnodir(self, commands, env=None, stream=False, cwd=None): + """Calls the git command + + The methods tries to call the git command. versions previor to 1.6.0 + are not supported and very probably fail. + """ + self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) + # unless ui.quiet is set, print git's stderr, + # which is mostly progress and useful info + errpipe = None + if self._ui.quiet: + errpipe = open(os.devnull, 'w') + p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env, + close_fds=util.closefds, + stdout=subprocess.PIPE, stderr=errpipe) + if stream: + return p.stdout, None + + retdata = p.stdout.read().strip() + # wait for the child to exit to avoid race condition. + p.wait() + + if p.returncode != 0 and p.returncode != 1: + # there are certain error codes that are ok + command = commands[0] + if command in ('cat-file', 'symbolic-ref'): + return retdata, p.returncode + # for all others, abort + raise util.Abort('git %s error %d in %s' % + (command, p.returncode, self._relpath)) + + return retdata, p.returncode + + def _gitstate(self): + return self._gitcommand(['rev-parse', 'HEAD']) + + def _gitcurrentbranch(self): + current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet']) + if err: + current = None + return current + + def _githavelocally(self, revision): + out, code = self._gitdir(['cat-file', '-e', revision]) + return code == 0 + + def _gitisancestor(self, r1, r2): + base = self._gitcommand(['merge-base', r1, r2]) + return base == r1 + + def _gitbranchmap(self): + '''returns 2 things: + a map from git branch to revision + a map from revision to branches''' + branch2rev = {} + rev2branch = {} + + out = self._gitcommand(['for-each-ref', '--format', + '%(objectname) %(refname)']) + for line in out.split('\n'): + revision, ref = line.split(' ') + if ref.startswith('refs/tags/'): + continue + if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'): + continue # ignore remote/HEAD redirects + branch2rev[ref] = revision + rev2branch.setdefault(revision, []).append(ref) + return branch2rev, rev2branch + + def _gittracking(self, branches): + 'return map of remote branch to local tracking branch' + # assumes no more than one local tracking branch for each remote + tracking = {} + for b in branches: + if b.startswith('refs/remotes/'): + continue + remote = self._gitcommand(['config', 'branch.%s.remote' % b]) + if remote: + ref = self._gitcommand(['config', 'branch.%s.merge' % b]) + tracking['refs/remotes/%s/%s' % + (remote, ref.split('/', 2)[2])] = b + return tracking + + def _fetch(self, source, revision): + if not os.path.exists(os.path.join(self._abspath, '.git')): + self._ui.status(_('cloning subrepo %s\n') % self._relpath) + self._gitnodir(['clone', source, self._abspath]) + if self._githavelocally(revision): + return + self._ui.status(_('pulling subrepo %s\n') % self._relpath) + # first try from origin + self._gitcommand(['fetch']) + if self._githavelocally(revision): + return + # then try from known subrepo source + self._gitcommand(['fetch', source]) + if not self._githavelocally(revision): + raise util.Abort(_("revision %s does not exist in subrepo %s\n") % + (revision, self._relpath)) + + def dirty(self, ignoreupdate=False): + if not ignoreupdate and self._state[1] != self._gitstate(): + # different version checked out + return True + # check for staged changes or modified files; ignore untracked files + out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) + return code == 1 + + def get(self, state, overwrite=False): + source, revision, kind = state + self._fetch(source, revision) + # if the repo was set to be bare, unbare it + if self._gitcommand(['config', '--bool', 'core.bare']) == 'true': + self._gitcommand(['config', 'core.bare', 'false']) + if self._gitstate() == revision: + self._gitcommand(['reset', '--hard', 'HEAD']) + return + elif self._gitstate() == revision: + if overwrite: + # first reset the index to unmark new files for commit, because + # reset --hard will otherwise throw away files added for commit, + # not just unmark them. + self._gitcommand(['reset', 'HEAD']) + self._gitcommand(['reset', '--hard', 'HEAD']) + return + branch2rev, rev2branch = self._gitbranchmap() + + def checkout(args): + cmd = ['checkout'] + if overwrite: + # first reset the index to unmark new files for commit, because + # the -f option will otherwise throw away files added for + # commit, not just unmark them. + self._gitcommand(['reset', 'HEAD']) + cmd.append('-f') + self._gitcommand(cmd + args) + + def rawcheckout(): + # no branch to checkout, check it out with no branch + self._ui.warn(_('checking out detached HEAD in subrepo %s\n') % + self._relpath) + self._ui.warn(_('check out a git branch if you intend ' + 'to make changes\n')) + checkout(['-q', revision]) + + if revision not in rev2branch: + rawcheckout() + return + branches = rev2branch[revision] + firstlocalbranch = None + for b in branches: + if b == 'refs/heads/master': + # master trumps all other branches + checkout(['refs/heads/master']) + return + if not firstlocalbranch and not b.startswith('refs/remotes/'): + firstlocalbranch = b + if firstlocalbranch: + checkout([firstlocalbranch]) + return + + tracking = self._gittracking(branch2rev.keys()) + # choose a remote branch already tracked if possible + remote = branches[0] + if remote not in tracking: + for b in branches: + if b in tracking: + remote = b + break + + if remote not in tracking: + # create a new local tracking branch + local = remote.split('/', 2)[2] + checkout(['-b', local, remote]) + elif self._gitisancestor(branch2rev[tracking[remote]], remote): + # When updating to a tracked remote branch, + # if the local tracking branch is downstream of it, + # a normal `git pull` would have performed a "fast-forward merge" + # which is equivalent to updating the local branch to the remote. + # Since we are only looking at branching at update, we need to + # detect this situation and perform this action lazily. + if tracking[remote] != self._gitcurrentbranch(): + checkout([tracking[remote]]) + self._gitcommand(['merge', '--ff', remote]) + else: + # a real merge would be required, just checkout the revision + rawcheckout() + + def commit(self, text, user, date): + cmd = ['commit', '-a', '-m', text] + env = os.environ.copy() + if user: + cmd += ['--author', user] + if date: + # git's date parser silently ignores when seconds < 1e9 + # convert to ISO8601 + env['GIT_AUTHOR_DATE'] = util.datestr(date, + '%Y-%m-%dT%H:%M:%S %1%2') + self._gitcommand(cmd, env=env) + # make sure commit works otherwise HEAD might not exist under certain + # circumstances + return self._gitstate() + + def merge(self, state): + source, revision, kind = state + self._fetch(source, revision) + base = self._gitcommand(['merge-base', revision, self._state[1]]) + out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) + + def mergefunc(): + if base == revision: + self.get(state) # fast forward merge + elif base != self._state[1]: + self._gitcommand(['merge', '--no-commit', revision]) + + if self.dirty(): + if self._gitstate() != revision: + dirty = self._gitstate() == self._state[1] or code != 0 + if _updateprompt(self._ui, self, dirty, self._state[1], revision): + mergefunc() + else: + mergefunc() + + def push(self, force): + # if a branch in origin contains the revision, nothing to do + branch2rev, rev2branch = self._gitbranchmap() + if self._state[1] in rev2branch: + for b in rev2branch[self._state[1]]: + if b.startswith('refs/remotes/origin/'): + return True + for b, revision in branch2rev.iteritems(): + if b.startswith('refs/remotes/origin/'): + if self._gitisancestor(self._state[1], revision): + return True + # otherwise, try to push the currently checked out branch + cmd = ['push'] + if force: + cmd.append('--force') + + current = self._gitcurrentbranch() + if current: + # determine if the current branch is even useful + if not self._gitisancestor(self._state[1], current): + self._ui.warn(_('unrelated git branch checked out ' + 'in subrepo %s\n') % self._relpath) + return False + self._ui.status(_('pushing branch %s of subrepo %s\n') % + (current.split('/', 2)[2], self._relpath)) + self._gitcommand(cmd + ['origin', current]) + return True + else: + self._ui.warn(_('no branch checked out in subrepo %s\n' + 'cannot push revision %s') % + (self._relpath, self._state[1])) + return False + + def remove(self): + if self.dirty(): + self._ui.warn(_('not removing repo %s because ' + 'it has changes.\n') % self._relpath) + return + # we can't fully delete the repository as it may contain + # local-only history + self._ui.note(_('removing subrepo %s\n') % self._relpath) + self._gitcommand(['config', 'core.bare', 'true']) + for f in os.listdir(self._abspath): + if f == '.git': + continue + path = os.path.join(self._abspath, f) + if os.path.isdir(path) and not os.path.islink(path): + shutil.rmtree(path) + else: + os.remove(path) + + def archive(self, ui, archiver, prefix): + source, revision = self._state + self._fetch(source, revision) + + # Parse git's native archive command. + # This should be much faster than manually traversing the trees + # and objects with many subprocess calls. + tarstream = self._gitcommand(['archive', revision], stream=True) + tar = tarfile.open(fileobj=tarstream, mode='r|') + relpath = subrelpath(self) + ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files')) + for i, info in enumerate(tar): + if info.isdir(): + continue + if info.issym(): + data = info.linkname + else: + data = tar.extractfile(info).read() + archiver.addfile(os.path.join(prefix, self._path, info.name), + info.mode, info.issym(), data) + ui.progress(_('archiving (%s)') % relpath, i + 1, + unit=_('files')) + ui.progress(_('archiving (%s)') % relpath, None) + + + def status(self, rev2, **opts): + rev1 = self._state[1] + modified, added, removed = [], [], [] + if rev2: + command = ['diff-tree', rev1, rev2] + else: + command = ['diff-index', rev1] + out = self._gitcommand(command) + for line in out.split('\n'): + tab = line.find('\t') + if tab == -1: + continue + status, f = line[tab - 1], line[tab + 1:] + if status == 'M': + modified.append(f) + elif status == 'A': + added.append(f) + elif status == 'D': + removed.append(f) + + deleted = unknown = ignored = clean = [] + return modified, added, removed, deleted, unknown, ignored, clean + types = { 'hg': hgsubrepo, 'svn': svnsubrepo, + 'git': gitsubrepo, }
--- a/mercurial/tags.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/tags.py Wed Feb 16 14:13:22 2011 -0600 @@ -12,6 +12,7 @@ from node import nullid, bin, hex, short from i18n import _ +import os.path import encoding import error @@ -99,9 +100,6 @@ except TypeError: warn(_("node '%s' is not well formed") % nodehex) continue - if nodebin not in repo.changelog.nodemap: - # silently ignore as pull -r might cause this - continue # update filetags hist = [] @@ -154,7 +152,7 @@ set, caller is responsible for reading tag info from each head.''' try: - cachefile = repo.opener('tags.cache', 'r') + cachefile = repo.opener('cache/tags', 'r') # force reading the file for static-http cachelines = iter(cachefile) except IOError: @@ -188,8 +186,8 @@ fnode = bin(line[2]) cachefnode[headnode] = fnode except (ValueError, TypeError): - # corruption of tags.cache, just recompute it - ui.warn(_('.hg/tags.cache is corrupt, rebuilding it\n')) + # corruption of the tags cache, just recompute it + ui.warn(_('.hg/cache/tags is corrupt, rebuilding it\n')) cacheheads = [] cacherevs = [] cachefnode = {} @@ -251,7 +249,7 @@ def _writetagcache(ui, repo, heads, tagfnode, cachetags): try: - cachefile = repo.opener('tags.cache', 'w', atomictemp=True) + cachefile = repo.opener('cache/tags', 'w', atomictemp=True) except (OSError, IOError): return
--- a/mercurial/templatekw.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templatekw.py Wed Feb 16 14:13:22 2011 -0600 @@ -145,12 +145,18 @@ def showauthor(repo, ctx, templ, **args): return ctx.user() +def showbranch(**args): + return args['ctx'].branch() + def showbranches(**args): branch = args['ctx'].branch() if branch != 'default': - branch = encoding.tolocal(branch) return showlist('branch', [branch], plural='branches', **args) +def showbookmarks(**args): + bookmarks = args['ctx'].bookmarks() + return showlist('bookmark', bookmarks, **args) + def showchildren(**args): ctx = args['ctx'] childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()] @@ -163,9 +169,8 @@ return ctx.description().strip() def showdiffstat(repo, ctx, templ, **args): - diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node()) files, adds, removes = 0, 0, 0 - for i in patch.diffstatdata(util.iterlines(diff)): + for i in patch.diffstatdata(util.iterlines(ctx.diff())): files += 1 adds += i[1] removes += i[2] @@ -249,7 +254,9 @@ # revcache - a cache dictionary for the current revision keywords = { 'author': showauthor, + 'branch': showbranch, 'branches': showbranches, + 'bookmarks': showbookmarks, 'children': showchildren, 'date': showdate, 'desc': showdescription,
--- a/mercurial/templater.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templater.py Wed Feb 16 14:13:22 2011 -0600 @@ -7,7 +7,192 @@ from i18n import _ import sys, os -import util, config, templatefilters +import util, config, templatefilters, parser, error + +# template parsing + +elements = { + "(": (20, ("group", 1, ")"), ("func", 1, ")")), + ",": (2, None, ("list", 2)), + "|": (5, None, ("|", 5)), + "%": (6, None, ("%", 6)), + ")": (0, None, None), + "symbol": (0, ("symbol",), None), + "string": (0, ("string",), None), + "end": (0, None, None), +} + +def tokenizer(data): + program, start, end = data + pos = start + while pos < end: + c = program[pos] + if c.isspace(): # skip inter-token whitespace + pass + elif c in "(,)%|": # handle simple operators + yield (c, None, pos) + elif (c in '"\'' or c == 'r' and + program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings + if c == 'r': + pos += 1 + c = program[pos] + decode = lambda x: x + else: + decode = lambda x: x.decode('string-escape') + pos += 1 + s = pos + while pos < end: # find closing quote + d = program[pos] + if d == '\\': # skip over escaped characters + pos += 2 + continue + if d == c: + yield ('string', decode(program[s:pos]), s) + break + pos += 1 + else: + raise error.ParseError(_("unterminated string"), s) + elif c.isalnum() or c in '_': + s = pos + pos += 1 + while pos < end: # find end of symbol + d = program[pos] + if not (d.isalnum() or d == "_"): + break + pos += 1 + sym = program[s:pos] + yield ('symbol', sym, s) + pos -= 1 + elif c == '}': + pos += 1 + break + else: + raise error.ParseError(_("syntax error"), pos) + pos += 1 + data[2] = pos + yield ('end', None, pos) + +def compiletemplate(tmpl, context): + parsed = [] + pos, stop = 0, len(tmpl) + p = parser.parser(tokenizer, elements) + + while pos < stop: + n = tmpl.find('{', pos) + if n < 0: + parsed.append(("string", tmpl[pos:])) + break + if n > 0 and tmpl[n - 1] == '\\': + # escaped + parsed.append(("string", tmpl[pos:n - 1] + "{")) + pos = n + 1 + continue + if n > pos: + parsed.append(("string", tmpl[pos:n])) + + pd = [tmpl, n + 1, stop] + parsed.append(p.parse(pd)) + pos = pd[2] + + return [compileexp(e, context) for e in parsed] + +def compileexp(exp, context): + t = exp[0] + if t in methods: + return methods[t](exp, context) + raise error.ParseError(_("unknown method '%s'") % t) + +# template evaluation + +def getsymbol(exp): + if exp[0] == 'symbol': + return exp[1] + raise error.ParseError(_("expected a symbol")) + +def getlist(x): + if not x: + return [] + if x[0] == 'list': + return getlist(x[1]) + [x[2]] + return [x] + +def getfilter(exp, context): + f = getsymbol(exp) + if f not in context._filters: + raise error.ParseError(_("unknown function '%s'") % f) + return context._filters[f] + +def gettemplate(exp, context): + if exp[0] == 'string': + return compiletemplate(exp[1], context) + if exp[0] == 'symbol': + return context._load(exp[1]) + raise error.ParseError(_("expected template specifier")) + +def runstring(context, mapping, data): + return data + +def runsymbol(context, mapping, key): + v = mapping.get(key) + if v is None: + v = context._defaults.get(key, '') + if hasattr(v, '__call__'): + return v(**mapping) + return v + +def buildfilter(exp, context): + func, data = compileexp(exp[1], context) + filt = getfilter(exp[2], context) + return (runfilter, (func, data, filt)) + +def runfilter(context, mapping, data): + func, data, filt = data + return filt(func(context, mapping, data)) + +def buildmap(exp, context): + func, data = compileexp(exp[1], context) + ctmpl = gettemplate(exp[2], context) + return (runmap, (func, data, ctmpl)) + +def runmap(context, mapping, data): + func, data, ctmpl = data + d = func(context, mapping, data) + lm = mapping.copy() + + for i in d: + if isinstance(i, dict): + lm.update(i) + for f, d in ctmpl: + yield f(context, lm, d) + else: + # v is not an iterable of dicts, this happen when 'key' + # has been fully expanded already and format is useless. + # If so, return the expanded value. + yield i + +def buildfunc(exp, context): + n = getsymbol(exp[1]) + args = [compileexp(x, context) for x in getlist(exp[2])] + if n in context._filters: + if len(args) != 1: + raise error.ParseError(_("filter %s expects one argument") % n) + f = context._filters[n] + return (runfilter, (args[0][0], args[0][1], f)) + elif n in context._funcs: + f = context._funcs[n] + return (f, args) + +methods = { + "string": lambda e, c: (runstring, e[1]), + "symbol": lambda e, c: (runsymbol, e[1]), + "group": lambda e, c: compileexp(e[1], c), +# ".": buildmember, + "|": buildfilter, + "%": buildmap, + "func": buildfunc, + } + +# template engine path = ['templates', '../templates'] stringify = templatefilters.stringify @@ -66,104 +251,18 @@ self._defaults = defaults self._cache = {} + def _load(self, t): + '''load, parse, and cache a template''' + if t not in self._cache: + self._cache[t] = compiletemplate(self._loader(t), self) + return self._cache[t] + def process(self, t, mapping): '''Perform expansion. t is name of map element to expand. mapping contains added elements for use during expansion. Is a generator.''' - return _flatten(self._process(self._load(t), mapping)) - - def _load(self, t): - '''load, parse, and cache a template''' - if t not in self._cache: - self._cache[t] = self._parse(self._loader(t)) - return self._cache[t] - - def _get(self, mapping, key): - v = mapping.get(key) - if v is None: - v = self._defaults.get(key, '') - if hasattr(v, '__call__'): - v = v(**mapping) - return v - - def _filter(self, mapping, parts): - filters, val = parts - x = self._get(mapping, val) - for f in filters: - x = f(x) - return x - - def _format(self, mapping, args): - key, parsed = args - v = self._get(mapping, key) - if not hasattr(v, '__iter__'): - raise SyntaxError(_("error expanding '%s%%%s'") - % (key, parsed)) - lm = mapping.copy() - for i in v: - if isinstance(i, dict): - lm.update(i) - yield self._process(parsed, lm) - else: - # v is not an iterable of dicts, this happen when 'key' - # has been fully expanded already and format is useless. - # If so, return the expanded value. - yield i - - def _parse(self, tmpl): - '''preparse a template''' - parsed = [] - pos, stop = 0, len(tmpl) - while pos < stop: - n = tmpl.find('{', pos) - if n < 0: - parsed.append((None, tmpl[pos:stop])) - break - if n > 0 and tmpl[n - 1] == '\\': - # escaped - parsed.append((None, tmpl[pos:n - 1] + "{")) - pos = n + 1 - continue - if n > pos: - parsed.append((None, tmpl[pos:n])) - - pos = n - n = tmpl.find('}', pos) - if n < 0: - # no closing - parsed.append((None, tmpl[pos:stop])) - break - - expr = tmpl[pos + 1:n] - pos = n + 1 - - if '%' in expr: - # the keyword should be formatted with a template - key, t = expr.split('%') - parsed.append((self._format, (key.strip(), - self._load(t.strip())))) - elif '|' in expr: - # process the keyword value with one or more filters - parts = expr.split('|') - val = parts[0].strip() - try: - filters = [self._filters[f.strip()] for f in parts[1:]] - except KeyError, i: - raise SyntaxError(_("unknown filter '%s'") % i[0]) - parsed.append((self._filter, (filters, val))) - else: - # just get the keyword - parsed.append((self._get, expr.strip())) - - return parsed - - def _process(self, parsed, mapping): - '''Render a template. Returns a generator.''' - for f, e in parsed: - if f: - yield f(mapping, e) - else: - yield e + return _flatten(func(self, mapping, data) for func, data in + self._load(t)) engines = {'default': engine} @@ -183,7 +282,7 @@ self.filters.update(filters) self.defaults = defaults self.minchunk, self.maxchunk = minchunk, maxchunk - self.engines = {} + self.ecache = {} if not mapfile: return @@ -214,6 +313,8 @@ if not t in self.cache: try: self.cache[t] = open(self.map[t][1]).read() + except KeyError, inst: + raise util.Abort(_('"%s" not in template map') % inst.args[0]) except IOError, inst: raise IOError(inst.args[0], _('template file %s: %s') % (self.map[t][1], inst.args[1])) @@ -221,10 +322,10 @@ def __call__(self, t, **mapping): ttype = t in self.map and self.map[t][0] or 'default' - proc = self.engines.get(ttype) - if proc is None: - proc = engines[ttype](self.load, self.filters, self.defaults) - self.engines[ttype] = proc + if ttype not in self.ecache: + self.ecache[ttype] = engines[ttype](self.load, + self.filters, self.defaults) + proc = self.ecache[ttype] stream = proc.process(t, mapping) if self.minchunk:
--- a/mercurial/templates/map-cmdline.default Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templates/map-cmdline.default Wed Feb 16 14:13:22 2011 -0600 @@ -1,7 +1,7 @@ -changeset = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' +changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' changeset_quiet = '{rev}:{node|short}\n' -changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n' -changeset_debug = 'changeset: {rev}:{node}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n' +changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n' +changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n' start_files = 'files: ' file = ' {file}' end_files = '\n' @@ -21,4 +21,5 @@ manifest = 'manifest: {rev}:{node}\n' branch = 'branch: {branch}\n' tag = 'tag: {tag}\n' +bookmark = 'bookmark: {bookmark}\n' extra = 'extra: {key}={value|stringescape}\n'
--- a/mercurial/templates/map-cmdline.xml Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templates/map-cmdline.xml Wed Feb 16 14:13:22 2011 -0600 @@ -1,9 +1,9 @@ header = '<?xml version="1.0"?>\n<log>\n' footer = '</log>\n' -changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n' -changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n' -changeset_debug = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}{extras}</logentry>\n' +changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n' +changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n' +changeset_debug = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}{extras}</logentry>\n' file_add = '<path action="A">{file_add|xmlescape}</path>\n' file_mod = '<path action="M">{file_mod|xmlescape}</path>\n' @@ -16,4 +16,5 @@ parent = '<parent revision="{rev}" node="{node}" />\n' branch = '<branch>{branch|xmlescape}</branch>\n' tag = '<tag>{tag|xmlescape}</tag>\n' +bookmark = '<bookmark>{bookmark|xmlescape}</bookmark>\n' extra = '<extra key="{key|xmlescape}">{value|xmlescape}</extra>\n'
--- a/mercurial/templates/paper/branches.tmpl Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templates/paper/branches.tmpl Wed Feb 16 14:13:22 2011 -0600 @@ -40,7 +40,18 @@ <th>branch</th> <th>node</th> </tr> -{entries%branchentry} +{entries % +' <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}"> + {branch|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +} </table> </div> </div>
--- a/mercurial/templates/paper/shortlogentry.tmpl Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/templates/paper/shortlogentry.tmpl Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,5 @@ <tr class="parity{parity}"> - <td class="age">{date|age}</td> + <td class="age">{age(date)}</td> <td class="author">{author|person}</td> - <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}</td> + <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags % '<span class="tag">{name|escape}</span> '}</td> </tr>
--- a/mercurial/transaction.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/transaction.py Wed Feb 16 14:13:22 2011 -0600 @@ -13,7 +13,7 @@ from i18n import _ import os, errno -import error +import error, util def active(func): def _active(self, *args, **kwds): @@ -27,18 +27,22 @@ for f, o, ignore in entries: if o or not unlink: try: - opener(f, 'a').truncate(o) + fp = opener(f, 'a') + fp.truncate(o) + fp.close() except IOError: report(_("failed to truncate %s\n") % f) raise else: try: - fn = opener(f).name - os.unlink(fn) + fp = opener(f) + fn = fp.name + fp.close() + util.unlink(fn) except (IOError, OSError), inst: if inst.errno != errno.ENOENT: raise - os.unlink(journal) + util.unlink(journal) class transaction(object): def __init__(self, report, opener, journal, after=None, createmode=None): @@ -52,7 +56,7 @@ self.journal = journal self._queue = [] - self.file = open(self.journal, "w") + self.file = util.posixfile(self.journal, "w") if createmode is not None: os.chmod(self.journal, createmode & 0666) @@ -133,7 +137,7 @@ if self.after: self.after() if os.path.isfile(self.journal): - os.unlink(self.journal) + util.unlink(self.journal) self.journal = None @active @@ -151,7 +155,7 @@ try: if not self.entries: if self.journal: - os.unlink(self.journal) + util.unlink(self.journal) return self.report(_("transaction abort!\n")) @@ -169,7 +173,10 @@ def rollback(opener, file, report): entries = [] - for l in open(file).readlines(): + fp = util.posixfile(file) + lines = fp.readlines() + fp.close() + for l in lines: f, o = l.split('\0') entries.append((f, int(o), None))
--- a/mercurial/ui.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/ui.py Wed Feb 16 14:13:22 2011 -0600 @@ -153,6 +153,16 @@ "%s.%s = %s\n") % (section, name, uvalue)) return value + def configpath(self, section, name, default=None, untrusted=False): + 'get a path config item, expanded relative to config file' + v = self.config(section, name, default, untrusted) + if not os.path.isabs(v) or "://" not in v: + src = self.configsource(section, name, untrusted) + if ':' in src: + base = os.path.dirname(src.rsplit(':')) + v = os.path.join(base, os.path.expanduser(v)) + return v + def configbool(self, section, name, default=False, untrusted=False): v = self.config(section, name, None, untrusted) if v is None: @@ -589,7 +599,7 @@ termination. ''' - if pos == None or not self.debugflag: + if pos is None or not self.debugflag: return if unit:
--- a/mercurial/url.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/url.py Wed Feb 16 14:13:22 2011 -0600 @@ -71,6 +71,38 @@ return userpass + '@' + hostport return hostport +def readauthforuri(ui, uri): + # Read configuration + config = dict() + for key, val in ui.configitems('auth'): + if '.' not in key: + ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) + continue + group, setting = key.rsplit('.', 1) + gdict = config.setdefault(group, dict()) + if setting in ('username', 'cert', 'key'): + val = util.expandpath(val) + gdict[setting] = val + + # Find the best match + scheme, hostpath = uri.split('://', 1) + bestlen = 0 + bestauth = None + for group, auth in config.iteritems(): + prefix = auth.get('prefix') + if not prefix: + continue + p = prefix.split('://', 1) + if len(p) > 1: + schemes, prefix = [p[0]], p[1] + else: + schemes = (auth.get('schemes') or 'https').split() + if (prefix == '*' or hostpath.startswith(prefix)) and \ + len(prefix) > bestlen and scheme in schemes: + bestlen = len(prefix) + bestauth = group, auth + return bestauth + _safe = ('abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789' '_.-/') @@ -123,9 +155,11 @@ return (user, passwd) if not user: - auth = self.readauthtoken(authuri) - if auth: + res = readauthforuri(self.ui, authuri) + if res: + group, auth = res user, passwd = auth.get('username'), auth.get('password') + self.ui.debug("using auth.%s.* for authentication\n" % group) if not user or not passwd: if not self.ui.interactive(): raise util.Abort(_('http authorization required')) @@ -148,38 +182,6 @@ msg = _('http auth: user %s, password %s\n') self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) - def readauthtoken(self, uri): - # Read configuration - config = dict() - for key, val in self.ui.configitems('auth'): - if '.' not in key: - self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) - continue - group, setting = key.split('.', 1) - gdict = config.setdefault(group, dict()) - if setting in ('username', 'cert', 'key'): - val = util.expandpath(val) - gdict[setting] = val - - # Find the best match - scheme, hostpath = uri.split('://', 1) - bestlen = 0 - bestauth = None - for auth in config.itervalues(): - prefix = auth.get('prefix') - if not prefix: - continue - p = prefix.split('://', 1) - if len(p) > 1: - schemes, prefix = [p[0]], p[1] - else: - schemes = (auth.get('schemes') or 'https').split() - if (prefix == '*' or hostpath.startswith(prefix)) and \ - len(prefix) > bestlen and scheme in schemes: - bestlen = len(prefix) - bestauth = auth - return bestauth - class proxyhandler(urllib2.ProxyHandler): def __init__(self, ui): proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') @@ -258,29 +260,47 @@ defines a __len__ attribute to feed the Content-Length header. """ - def __init__(self, *args, **kwargs): + def __init__(self, ui, *args, **kwargs): # We can't just "self._data = open(*args, **kwargs)" here because there # is an "open" function defined in this module that shadows the global # one + self.ui = ui self._data = __builtin__.open(*args, **kwargs) - self.read = self._data.read self.seek = self._data.seek self.close = self._data.close self.write = self._data.write + self._len = os.fstat(self._data.fileno()).st_size + self._pos = 0 + self._total = len(self) / 1024 * 2 + + def read(self, *args, **kwargs): + try: + ret = self._data.read(*args, **kwargs) + except EOFError: + self.ui.progress(_('sending'), None) + self._pos += len(ret) + # We pass double the max for total because we currently have + # to send the bundle twice in the case of a server that + # requires authentication. Since we can't know until we try + # once whether authentication will be required, just lie to + # the user and maybe the push succeeds suddenly at 50%. + self.ui.progress(_('sending'), self._pos / 1024, + unit=_('kb'), total=self._total) + return ret def __len__(self): - return os.fstat(self._data.fileno()).st_size + return self._len -def _gen_sendfile(connection): +def _gen_sendfile(orgsend): def _sendfile(self, data): # send a file if isinstance(data, httpsendfile): # if auth required, some data sent twice, so rewind here data.seek(0) for chunk in util.filechunkiter(data): - connection.send(self, chunk) + orgsend(self, chunk) else: - connection.send(self, data) + orgsend(self, data) return _sendfile has_https = hasattr(urllib2, 'HTTPSHandler') @@ -333,7 +353,7 @@ class httpconnection(keepalive.HTTPConnection): # must be able to send big bundle as stream. - send = _gen_sendfile(keepalive.HTTPConnection) + send = _gen_sendfile(keepalive.HTTPConnection.send) def connect(self): if has_https and self.realhostport: # use CONNECT proxy @@ -522,32 +542,36 @@ return _('no commonName or subjectAltName found in certificate') if has_https: - class BetterHTTPS(httplib.HTTPSConnection): - send = keepalive.safesend + class httpsconnection(httplib.HTTPSConnection): + response_class = keepalive.HTTPResponse + # must be able to send big bundle as stream. + send = _gen_sendfile(keepalive.safesend) + getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) def connect(self): - if hasattr(self, 'ui'): - cacerts = self.ui.config('web', 'cacerts') - if cacerts: - cacerts = util.expandpath(cacerts) - else: - cacerts = None + self.sock = _create_connection((self.host, self.port)) + + host = self.host + if self.realhostport: # use CONNECT proxy + something = _generic_proxytunnel(self) + host = self.realhostport.rsplit(':', 1)[0] - hostfingerprint = self.ui.config('hostfingerprints', self.host) + cacerts = self.ui.config('web', 'cacerts') + hostfingerprint = self.ui.config('hostfingerprints', host) + if cacerts and not hostfingerprint: - sock = _create_connection((self.host, self.port)) - self.sock = _ssl_wrap_socket(sock, self.key_file, - self.cert_file, cert_reqs=CERT_REQUIRED, - ca_certs=cacerts) - msg = _verifycert(self.sock.getpeercert(), self.host) + self.sock = _ssl_wrap_socket(self.sock, self.key_file, + self.cert_file, cert_reqs=CERT_REQUIRED, + ca_certs=util.expandpath(cacerts)) + msg = _verifycert(self.sock.getpeercert(), host) if msg: raise util.Abort(_('%s certificate error: %s ' '(use --insecure to connect ' - 'insecurely)') % (self.host, msg)) - self.ui.debug('%s certificate successfully verified\n' % - self.host) + 'insecurely)') % (host, msg)) + self.ui.debug('%s certificate successfully verified\n' % host) else: - httplib.HTTPSConnection.connect(self) + self.sock = _ssl_wrap_socket(self.sock, self.key_file, + self.cert_file) if hasattr(self.sock, 'getpeercert'): peercert = self.sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() @@ -558,38 +582,22 @@ hostfingerprint.replace(':', '').lower(): raise util.Abort(_('invalid certificate for %s ' 'with fingerprint %s') % - (self.host, nicefingerprint)) + (host, nicefingerprint)) self.ui.debug('%s certificate matched fingerprint %s\n' % - (self.host, nicefingerprint)) + (host, nicefingerprint)) else: self.ui.warn(_('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % - (self.host, nicefingerprint)) + (host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: - raise util.Abort(_('no certificate for %s ' - 'with fingerprint') % self.host) + raise util.Abort(_('no certificate for %s with ' + 'configured hostfingerprint') % host) self.ui.warn(_('warning: %s certificate not verified ' '(check web.cacerts config setting)\n') % - self.host) - - class httpsconnection(BetterHTTPS): - response_class = keepalive.HTTPResponse - # must be able to send big bundle as stream. - send = _gen_sendfile(BetterHTTPS) - getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) - - def connect(self): - if self.realhostport: # use CONNECT proxy - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.sock.connect((self.host, self.port)) - if _generic_proxytunnel(self): - self.sock = _ssl_wrap_socket(self.sock, self.key_file, - self.cert_file) - else: - BetterHTTPS.connect(self) + host) class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): def __init__(self, ui): @@ -603,7 +611,13 @@ return keepalive.KeepAliveHandler._start_transaction(self, h, req) def https_open(self, req): - self.auth = self.pwmgr.readauthtoken(req.get_full_url()) + res = readauthforuri(self.ui, req.get_full_url()) + if res: + group, auth = res + self.auth = auth + self.ui.debug("using auth.%s.* for authentication\n" % group) + else: + self.auth = None return self.do_open(self._makeconnection, req) def _makeconnection(self, host, port=None, *args, **kwargs):
--- a/mercurial/util.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/util.py Wed Feb 16 14:13:22 2011 -0600 @@ -198,7 +198,10 @@ if code: raise Abort(_("command '%s' failed: %s") % (cmd, explain_exit(code))) - return open(outname, 'rb').read() + fp = open(outname, 'rb') + r = fp.read() + fp.close() + return r finally: try: if inname: @@ -431,7 +434,7 @@ return check -def unlink(f): +def unlinkpath(f): """unlink and remove the directory if it is empty""" os.unlink(f) # try removing directories that might now be empty @@ -451,7 +454,7 @@ else: try: shutil.copyfile(src, dest) - shutil.copystat(src, dest) + shutil.copymode(src, dest) except shutil.Error, inst: raise Abort(str(inst)) @@ -487,6 +490,7 @@ '''ensure that a filesystem path contains no banned components. the following properties of a path are checked: + - ends with a directory separator - under top-level .hg - starts at the root of a windows drive - contains ".." @@ -504,6 +508,9 @@ def __call__(self, path): if path in self.audited: return + # AIX ignores "/" at end of path, others raise EISDIR. + if endswithsep(path): + raise Abort(_("path ends in directory separator: %s") % path) normpath = os.path.normcase(path) parts = splitpath(normpath) if (os.path.splitdrive(path)[0] @@ -550,16 +557,6 @@ # want to add "foo/bar/baz" before checking if there's a "foo/.hg" self.auditeddir.update(prefixes) -def nlinks(pathname): - """Return number of hardlinks for the given file.""" - return os.lstat(pathname).st_nlink - -if hasattr(os, 'link'): - os_link = os.link -else: - def os_link(src, dst): - raise OSError(0, _("Hardlinks not supported")) - def lookup_reg(key, name=None, scope=None): return None @@ -597,7 +594,10 @@ raise except AttributeError: # no symlink in os pass - return posixfile(pathname).read() + fp = posixfile(pathname) + r = fp.read() + fp.close() + return r def fstat(fp): '''stat file object that may not have fileno method.''' @@ -738,7 +738,7 @@ # nlinks() may behave differently for files on Windows shares if # the file is open. - fd = open(f2) + fd = posixfile(f2) return nlinks(f2) > 1 finally: if fd is not None: @@ -837,7 +837,7 @@ self._fp.close() rename(self.temp, localpath(self.__name)) - def __del__(self): + def close(self): if not self._fp: return if not self._fp.closed: @@ -846,6 +846,9 @@ except: pass self._fp.close() + def __del__(self): + self.close() + def makedirs(name, mode=None): """recursive directory creation with parent mode inheritance""" parent = os.path.abspath(os.path.dirname(name)) @@ -894,7 +897,6 @@ mode += "b" # for that other OS nlink = -1 - st_mode = None dirname, basename = os.path.split(f) # If basename is empty, then the path is malformed because it points # to a directory. Let the posixfile() call below raise IOError. @@ -905,18 +907,19 @@ return atomictempfile(f, mode, self.createmode) try: if 'w' in mode: - st_mode = os.lstat(f).st_mode & 0777 - os.unlink(f) + unlink(f) nlink = 0 else: # nlinks() may behave differently for files on Windows # shares if the file is open. - fd = open(f) + fd = posixfile(f) nlink = nlinks(f) if nlink < 1: nlink = 2 # force mktempcopy (issue1922) fd.close() - except (OSError, IOError): + except (OSError, IOError), e: + if e.errno != errno.ENOENT: + raise nlink = 0 if not os.path.isdir(dirname): makedirs(dirname, self.createmode) @@ -927,10 +930,7 @@ rename(mktempcopy(f), f) fp = posixfile(f, mode) if nlink == 0: - if st_mode is None: - self._fixfilemode(f) - else: - os.chmod(f, st_mode) + self._fixfilemode(f) return fp def symlink(self, src, dst): @@ -1075,7 +1075,7 @@ # NOTE: unixtime = localunixtime + offset offset, date = timezone(string), string - if offset != None: + if offset is not None: date = " ".join(string.split()[:-1]) # add missing elements from defaults @@ -1120,7 +1120,7 @@ now = makedate() defaults = {} nowmap = {} - for part in "d mb yY HI M S".split(): + for part in ("d", "mb", "yY", "HI", "M", "S"): # this piece is for rounding the specific end of unknowns b = bias.get(part) if b is None: @@ -1190,7 +1190,7 @@ def upper(date): d = dict(mb="12", HI="23", M="59", S="59") - for days in "31 30 29".split(): + for days in ("31", "30", "29"): try: d["d"] = days return parsedate(date, extendeddateformats, d)[0] @@ -1387,37 +1387,48 @@ # Avoid double backslash in Windows path repr() return repr(s).replace('\\\\', '\\') -#### naming convention of below implementation follows 'textwrap' module +# delay import of textwrap +def MBTextWrapper(**kwargs): + class tw(textwrap.TextWrapper): + """ + Extend TextWrapper for double-width characters. -class MBTextWrapper(textwrap.TextWrapper): - def __init__(self, **kwargs): - textwrap.TextWrapper.__init__(self, **kwargs) + Some Asian characters use two terminal columns instead of one. + A good example of this behavior can be seen with u'\u65e5\u672c', + the two Japanese characters for "Japan": + len() returns 2, but when printed to a terminal, they eat 4 columns. + + (Note that this has nothing to do whatsoever with unicode + representation, or encoding of the underlying string) + """ + def __init__(self, **kwargs): + textwrap.TextWrapper.__init__(self, **kwargs) - def _cutdown(self, str, space_left): - l = 0 - ucstr = unicode(str, encoding.encoding) - w = unicodedata.east_asian_width - for i in xrange(len(ucstr)): - l += w(ucstr[i]) in 'WFA' and 2 or 1 - if space_left < l: - return (ucstr[:i].encode(encoding.encoding), - ucstr[i:].encode(encoding.encoding)) - return str, '' + def _cutdown(self, str, space_left): + l = 0 + ucstr = unicode(str, encoding.encoding) + colwidth = unicodedata.east_asian_width + for i in xrange(len(ucstr)): + l += colwidth(ucstr[i]) in 'WFA' and 2 or 1 + if space_left < l: + return (ucstr[:i].encode(encoding.encoding), + ucstr[i:].encode(encoding.encoding)) + return str, '' - # ---------------------------------------- - # overriding of base class - - def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): - space_left = max(width - cur_len, 1) + # overriding of base class + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + space_left = max(width - cur_len, 1) - if self.break_long_words: - cut, res = self._cutdown(reversed_chunks[-1], space_left) - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) + if self.break_long_words: + cut, res = self._cutdown(reversed_chunks[-1], space_left) + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) -#### naming convention of above implementation follows 'textwrap' module + global MBTextWrapper + MBTextWrapper = tw + return tw(**kwargs) def wrap(line, width, initindent='', hangindent=''): maxindent = max(len(hangindent), len(initindent)) @@ -1497,7 +1508,7 @@ return False return True -def interpolate(prefix, mapping, s, fn=None): +def interpolate(prefix, mapping, s, fn=None, escape_prefix=False): """Return the result of interpolating items in the mapping into string s. prefix is a single character string, or a two character string with @@ -1506,9 +1517,20 @@ fn is an optional function that will be applied to the replacement text just before replacement. + + escape_prefix is an optional flag that allows using doubled prefix for + its escaping. """ fn = fn or (lambda s: s) - r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys()))) + patterns = '|'.join(mapping.keys()) + if escape_prefix: + patterns += '|' + prefix + if len(prefix) > 1: + prefix_char = prefix[1:] + else: + prefix_char = prefix + mapping[prefix_char] = prefix_char + r = re.compile(r'%s(%s)' % (prefix, patterns)) return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) def getport(port):
--- a/mercurial/verify.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/verify.py Wed Feb 16 14:13:22 2011 -0600 @@ -34,7 +34,7 @@ raise util.Abort(_("cannot verify bundle or remote repos")) def err(linkrev, msg, filename=None): - if linkrev != None: + if linkrev is not None: badrevs.add(linkrev) else: linkrev = '?'
--- a/mercurial/win32.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/win32.py Wed Feb 16 14:13:22 2011 -0600 @@ -5,73 +5,173 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -"""Utility functions that use win32 API. +import encoding +import ctypes, errno, os, struct, subprocess + +_kernel32 = ctypes.windll.kernel32 + +_BOOL = ctypes.c_long +_WORD = ctypes.c_ushort +_DWORD = ctypes.c_ulong +_LPCSTR = _LPSTR = ctypes.c_char_p +_HANDLE = ctypes.c_void_p +_HWND = _HANDLE + +_INVALID_HANDLE_VALUE = -1 + +# GetLastError +_ERROR_SUCCESS = 0 +_ERROR_INVALID_PARAMETER = 87 +_ERROR_INSUFFICIENT_BUFFER = 122 + +# WPARAM is defined as UINT_PTR (unsigned type) +# LPARAM is defined as LONG_PTR (signed type) +if ctypes.sizeof(ctypes.c_long) == ctypes.sizeof(ctypes.c_void_p): + _WPARAM = ctypes.c_ulong + _LPARAM = ctypes.c_long +elif ctypes.sizeof(ctypes.c_longlong) == ctypes.sizeof(ctypes.c_void_p): + _WPARAM = ctypes.c_ulonglong + _LPARAM = ctypes.c_longlong + +class _FILETIME(ctypes.Structure): + _fields_ = [('dwLowDateTime', _DWORD), + ('dwHighDateTime', _DWORD)] + +class _BY_HANDLE_FILE_INFORMATION(ctypes.Structure): + _fields_ = [('dwFileAttributes', _DWORD), + ('ftCreationTime', _FILETIME), + ('ftLastAccessTime', _FILETIME), + ('ftLastWriteTime', _FILETIME), + ('dwVolumeSerialNumber', _DWORD), + ('nFileSizeHigh', _DWORD), + ('nFileSizeLow', _DWORD), + ('nNumberOfLinks', _DWORD), + ('nFileIndexHigh', _DWORD), + ('nFileIndexLow', _DWORD)] + +# CreateFile +_FILE_SHARE_READ = 0x00000001 +_FILE_SHARE_WRITE = 0x00000002 +_FILE_SHARE_DELETE = 0x00000004 + +_OPEN_EXISTING = 3 + +# Process Security and Access Rights +_PROCESS_QUERY_INFORMATION = 0x0400 + +# GetExitCodeProcess +_STILL_ACTIVE = 259 + +# registry +_HKEY_CURRENT_USER = 0x80000001L +_HKEY_LOCAL_MACHINE = 0x80000002L +_KEY_READ = 0x20019 +_REG_SZ = 1 +_REG_DWORD = 4 -Mark Hammond's win32all package allows better functionality on -Windows. This module overrides definitions in util.py. If not -available, import of this module will fail, and generic code will be -used. -""" +class _STARTUPINFO(ctypes.Structure): + _fields_ = [('cb', _DWORD), + ('lpReserved', _LPSTR), + ('lpDesktop', _LPSTR), + ('lpTitle', _LPSTR), + ('dwX', _DWORD), + ('dwY', _DWORD), + ('dwXSize', _DWORD), + ('dwYSize', _DWORD), + ('dwXCountChars', _DWORD), + ('dwYCountChars', _DWORD), + ('dwFillAttribute', _DWORD), + ('dwFlags', _DWORD), + ('wShowWindow', _WORD), + ('cbReserved2', _WORD), + ('lpReserved2', ctypes.c_char_p), + ('hStdInput', _HANDLE), + ('hStdOutput', _HANDLE), + ('hStdError', _HANDLE)] + +class _PROCESS_INFORMATION(ctypes.Structure): + _fields_ = [('hProcess', _HANDLE), + ('hThread', _HANDLE), + ('dwProcessId', _DWORD), + ('dwThreadId', _DWORD)] + +_DETACHED_PROCESS = 0x00000008 +_STARTF_USESHOWWINDOW = 0x00000001 +_SW_HIDE = 0 -import win32api +class _COORD(ctypes.Structure): + _fields_ = [('X', ctypes.c_short), + ('Y', ctypes.c_short)] + +class _SMALL_RECT(ctypes.Structure): + _fields_ = [('Left', ctypes.c_short), + ('Top', ctypes.c_short), + ('Right', ctypes.c_short), + ('Bottom', ctypes.c_short)] + +class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): + _fields_ = [('dwSize', _COORD), + ('dwCursorPosition', _COORD), + ('wAttributes', _WORD), + ('srWindow', _SMALL_RECT), + ('dwMaximumWindowSize', _COORD)] -import errno, os, sys, pywintypes, win32con, win32file, win32process -import winerror, win32gui, win32console -import osutil, encoding -from win32com.shell import shell, shellcon +_STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12 + +def _raiseoserror(name): + err = ctypes.WinError() + raise OSError(err.errno, '%s: %s' % (name, err.strerror)) + +def _getfileinfo(name): + fh = _kernel32.CreateFileA(name, 0, + _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE, + None, _OPEN_EXISTING, 0, None) + if fh == _INVALID_HANDLE_VALUE: + _raiseoserror(name) + try: + fi = _BY_HANDLE_FILE_INFORMATION() + if not _kernel32.GetFileInformationByHandle(fh, ctypes.byref(fi)): + _raiseoserror(name) + return fi + finally: + _kernel32.CloseHandle(fh) def os_link(src, dst): - try: - win32file.CreateHardLink(dst, src) - except pywintypes.error: - raise OSError(errno.EINVAL, 'target implements hardlinks improperly') - except NotImplementedError: # Another fake error win Win98 - raise OSError(errno.EINVAL, 'Hardlinking not supported') + if not _kernel32.CreateHardLinkA(dst, src, None): + _raiseoserror(src) -def _getfileinfo(pathname): - """Return number of hardlinks for the given file.""" - try: - fh = win32file.CreateFile(pathname, - win32file.GENERIC_READ, win32file.FILE_SHARE_READ, - None, win32file.OPEN_EXISTING, 0, None) - except pywintypes.error: - raise OSError(errno.ENOENT, 'The system cannot find the file specified') - try: - return win32file.GetFileInformationByHandle(fh) - finally: - fh.Close() - -def nlinks(pathname): - """Return number of hardlinks for the given file.""" - return _getfileinfo(pathname)[7] +def nlinks(name): + '''return number of hardlinks for the given file''' + return _getfileinfo(name).nNumberOfLinks def samefile(fpath1, fpath2): - """Returns whether fpath1 and fpath2 refer to the same file. This is only - guaranteed to work for files, not directories.""" + '''Returns whether fpath1 and fpath2 refer to the same file. This is only + guaranteed to work for files, not directories.''' res1 = _getfileinfo(fpath1) res2 = _getfileinfo(fpath2) - # Index 4 is the volume serial number, and 8 and 9 contain the file ID - return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9] + return (res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber + and res1.nFileIndexHigh == res2.nFileIndexHigh + and res1.nFileIndexLow == res2.nFileIndexLow) def samedevice(fpath1, fpath2): - """Returns whether fpath1 and fpath2 are on the same device. This is only - guaranteed to work for files, not directories.""" + '''Returns whether fpath1 and fpath2 are on the same device. This is only + guaranteed to work for files, not directories.''' res1 = _getfileinfo(fpath1) res2 = _getfileinfo(fpath2) - return res1[4] == res2[4] + return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber def testpid(pid): '''return True if pid is still running or unable to determine, False otherwise''' - try: - handle = win32api.OpenProcess( - win32con.PROCESS_QUERY_INFORMATION, False, pid) - if handle: - status = win32process.GetExitCodeProcess(handle) - return status == win32con.STILL_ACTIVE - except pywintypes.error, details: - return details[0] != winerror.ERROR_INVALID_PARAMETER - return True + h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid) + if h: + try: + status = _DWORD() + if _kernel32.GetExitCodeProcess(h, ctypes.byref(status)): + return status.value == _STILL_ACTIVE + finally: + _kernel32.CloseHandle(h) + return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER def lookup_reg(key, valname=None, scope=None): ''' Look up a key/value name in the Windows registry. @@ -82,101 +182,137 @@ a sequence of scopes to look up in order. Default (CURRENT_USER, LOCAL_MACHINE). ''' - try: - from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \ - QueryValueEx, OpenKey - except ImportError: - return None - + adv = ctypes.windll.advapi32 + byref = ctypes.byref if scope is None: - scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE) + scope = (_HKEY_CURRENT_USER, _HKEY_LOCAL_MACHINE) elif not isinstance(scope, (list, tuple)): scope = (scope,) for s in scope: + kh = _HANDLE() + res = adv.RegOpenKeyExA(s, key, 0, _KEY_READ, ctypes.byref(kh)) + if res != _ERROR_SUCCESS: + continue try: - val = QueryValueEx(OpenKey(s, key), valname)[0] - # never let a Unicode string escape into the wild - return encoding.tolocal(val.encode('UTF-8')) - except EnvironmentError: - pass + size = _DWORD(600) + type = _DWORD() + buf = ctypes.create_string_buffer(size.value + 1) + res = adv.RegQueryValueExA(kh.value, valname, None, + byref(type), buf, byref(size)) + if res != _ERROR_SUCCESS: + continue + if type.value == _REG_SZ: + # never let a Unicode string escape into the wild + return encoding.tolocal(buf.value.encode('UTF-8')) + elif type.value == _REG_DWORD: + fmt = '<L' + s = ctypes.string_at(byref(buf), struct.calcsize(fmt)) + return struct.unpack(fmt, s)[0] + finally: + adv.RegCloseKey(kh.value) -def system_rcpath_win32(): - '''return default os-specific hgrc search path''' - filename = win32api.GetModuleFileName(0) - # Use mercurial.ini found in directory with hg.exe - progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') - if os.path.isfile(progrc): - return [progrc] - # Use hgrc.d found in directory with hg.exe - progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') - if os.path.isdir(progrcd): - rcpath = [] - for f, kind in osutil.listdir(progrcd): - if f.endswith('.rc'): - rcpath.append(os.path.join(progrcd, f)) - return rcpath - # else look for a system rcpath in the registry - try: - value = win32api.RegQueryValue( - win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') - rcpath = [] - for p in value.split(os.pathsep): - if p.lower().endswith('mercurial.ini'): - rcpath.append(p) - elif os.path.isdir(p): - for f, kind in osutil.listdir(p): - if f.endswith('.rc'): - rcpath.append(os.path.join(p, f)) - return rcpath - except pywintypes.error: - return [] - -def user_rcpath_win32(): - '''return os-specific hgrc search path to the user dir''' - userdir = os.path.expanduser('~') - if sys.getwindowsversion()[3] != 2 and userdir == '~': - # We are on win < nt: fetch the APPDATA directory location and use - # the parent directory as the user home dir. - appdir = shell.SHGetPathFromIDList( - shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA)) - userdir = os.path.dirname(appdir) - return [os.path.join(userdir, 'mercurial.ini'), - os.path.join(userdir, '.hgrc')] +def executable_path(): + '''return full path of hg.exe''' + size = 600 + buf = ctypes.create_string_buffer(size + 1) + len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size) + if len == 0: + raise ctypes.WinError() + elif len == size: + raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER) + return buf.value def getuser(): '''return name of current user''' - return win32api.GetUserName() + adv = ctypes.windll.advapi32 + size = _DWORD(300) + buf = ctypes.create_string_buffer(size.value + 1) + if not adv.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)): + raise ctypes.WinError() + return buf.value -def set_signal_handler_win32(): - """Register a termination handler for console events including +_SIGNAL_HANDLER = ctypes.WINFUNCTYPE(_BOOL, _DWORD) +_signal_handler = [] + +def set_signal_handler(): + '''Register a termination handler for console events including CTRL+C. python signal handlers do not work well with socket operations. - """ + ''' def handler(event): - win32process.ExitProcess(1) - win32api.SetConsoleCtrlHandler(handler) + _kernel32.ExitProcess(1) + + if _signal_handler: + return # already registered + h = _SIGNAL_HANDLER(handler) + _signal_handler.append(h) # needed to prevent garbage collection + if not _kernel32.SetConsoleCtrlHandler(h, True): + raise ctypes.WinError() + +_WNDENUMPROC = ctypes.WINFUNCTYPE(_BOOL, _HWND, _LPARAM) def hidewindow(): - def callback(*args, **kwargs): - hwnd, pid = args - wpid = win32process.GetWindowThreadProcessId(hwnd)[1] - if pid == wpid: - win32gui.ShowWindow(hwnd, win32con.SW_HIDE) + user32 = ctypes.windll.user32 - pid = win32process.GetCurrentProcessId() - win32gui.EnumWindows(callback, pid) + def callback(hwnd, pid): + wpid = _DWORD() + user32.GetWindowThreadProcessId(hwnd, ctypes.byref(wpid)) + if pid == wpid.value: + user32.ShowWindow(hwnd, _SW_HIDE) + return False # stop enumerating windows + return True + + pid = _kernel32.GetCurrentProcessId() + user32.EnumWindows(_WNDENUMPROC(callback), pid) def termwidth(): - try: - # Query stderr to avoid problems with redirections - screenbuf = win32console.GetStdHandle(win32console.STD_ERROR_HANDLE) - if screenbuf is None: - return 79 - try: - window = screenbuf.GetConsoleScreenBufferInfo()['Window'] - width = window.Right - window.Left - return width - finally: - screenbuf.Detach() - except pywintypes.error: - return 79 + # cmd.exe does not handle CR like a unix console, the CR is + # counted in the line length. On 80 columns consoles, if 80 + # characters are written, the following CR won't apply on the + # current line but on the new one. Keep room for it. + width = 79 + # Query stderr to avoid problems with redirections + screenbuf = _kernel32.GetStdHandle( + _STD_ERROR_HANDLE) # don't close the handle returned + if screenbuf is None or screenbuf == _INVALID_HANDLE_VALUE: + return width + csbi = _CONSOLE_SCREEN_BUFFER_INFO() + if not _kernel32.GetConsoleScreenBufferInfo( + screenbuf, ctypes.byref(csbi)): + return width + width = csbi.srWindow.Right - csbi.srWindow.Left + return width + +def spawndetached(args): + # No standard library function really spawns a fully detached + # process under win32 because they allocate pipes or other objects + # to handle standard streams communications. Passing these objects + # to the child process requires handle inheritance to be enabled + # which makes really detached processes impossible. + si = _STARTUPINFO() + si.cb = ctypes.sizeof(_STARTUPINFO) + si.dwFlags = _STARTF_USESHOWWINDOW + si.wShowWindow = _SW_HIDE + + pi = _PROCESS_INFORMATION() + + env = '' + for k in os.environ: + env += "%s=%s\0" % (k, os.environ[k]) + if not env: + env = '\0' + env += '\0' + + args = subprocess.list2cmdline(args) + # Not running the command in shell mode makes python26 hang when + # writing to hgweb output socket. + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args + + res = _kernel32.CreateProcessA( + None, args, None, None, False, _DETACHED_PROCESS, + env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi)) + if not res: + raise ctypes.WinError() + + return pi.dwProcessId
--- a/mercurial/windows.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/windows.py Wed Feb 16 14:13:22 2011 -0600 @@ -71,22 +71,45 @@ return 'command' in os.environ.get('comspec', '') def openhardlinks(): - return not _is_win_9x() and "win32api" in globals() + return not _is_win_9x() + +_HKEY_LOCAL_MACHINE = 0x80000002L def system_rcpath(): - try: - return system_rcpath_win32() - except: - return [r'c:\mercurial\mercurial.ini'] + '''return default os-specific hgrc search path''' + rcpath = [] + filename = executable_path() + # Use mercurial.ini found in directory with hg.exe + progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') + if os.path.isfile(progrc): + rcpath.append(progrc) + return rcpath + # Use hgrc.d found in directory with hg.exe + progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') + if os.path.isdir(progrcd): + for f, kind in osutil.listdir(progrcd): + if f.endswith('.rc'): + rcpath.append(os.path.join(progrcd, f)) + return rcpath + # else look for a system rcpath in the registry + value = lookup_reg('SOFTWARE\\Mercurial', None, _HKEY_LOCAL_MACHINE) + if not isinstance(value, str) or not value: + return rcpath + value = value.replace('/', os.sep) + for p in value.split(os.pathsep): + if p.lower().endswith('mercurial.ini'): + rcpath.append(p) + elif os.path.isdir(p): + for f, kind in osutil.listdir(p): + if f.endswith('.rc'): + rcpath.append(os.path.join(p, f)) + return rcpath def user_rcpath(): '''return os-specific hgrc search path to the user dir''' - try: - path = user_rcpath_win32() - except: - home = os.path.expanduser('~') - path = [os.path.join(home, 'mercurial.ini'), - os.path.join(home, '.hgrc')] + home = os.path.expanduser('~') + path = [os.path.join(home, 'mercurial.ini'), + os.path.join(home, '.hgrc')] userprofile = os.environ.get('USERPROFILE') if userprofile: path.append(os.path.join(userprofile, 'mercurial.ini')) @@ -106,10 +129,6 @@ args = user and ("%s@%s" % (user, host)) or host return port and ("%s %s %s" % (args, pflag, port)) or args -def testpid(pid): - '''return False if pid dead, True if running or not known''' - return True - def set_flags(f, l, x): pass @@ -208,12 +227,6 @@ return executable return findexisting(os.path.expanduser(os.path.expandvars(command))) -def set_signal_handler(): - try: - set_signal_handler_win32() - except NameError: - pass - def statfiles(files): '''Stat each file in files and yield stat or None if file does not exist. Cluster and cache stat per directory to minimize number of OS stat calls.''' @@ -241,11 +254,6 @@ cache = dircache.setdefault(dir, dmap) yield cache.get(base, None) -def getuser(): - '''return name of current user''' - raise error.Abort(_('user name not available - set USERNAME ' - 'environment variable')) - def username(uid=None): """Return the name of the user with the given uid. @@ -276,7 +284,7 @@ break head, tail = os.path.split(head) -def unlink(f): +def unlinkpath(f): """unlink and remove the directory if it is empty""" os.unlink(f) # try removing directories that might now be empty @@ -285,73 +293,56 @@ except OSError: pass +def unlink(f): + '''try to implement POSIX' unlink semantics on Windows''' + + # POSIX allows to unlink and rename open files. Windows has serious + # problems with doing that: + # - Calling os.unlink (or os.rename) on a file f fails if f or any + # hardlinked copy of f has been opened with Python's open(). There is no + # way such a file can be deleted or renamed on Windows (other than + # scheduling the delete or rename for the next reboot). + # - Calling os.unlink on a file that has been opened with Mercurial's + # posixfile (or comparable methods) will delay the actual deletion of + # the file for as long as the file is held open. The filename is blocked + # during that time and cannot be used for recreating a new file under + # that same name ("zombie file"). Directories containing such zombie files + # cannot be removed or moved. + # A file that has been opened with posixfile can be renamed, so we rename + # f to a random temporary name before calling os.unlink on it. This allows + # callers to recreate f immediately while having other readers do their + # implicit zombie filename blocking on a temporary name. + + for tries in xrange(10): + temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) + try: + os.rename(f, temp) # raises OSError EEXIST if temp exists + break + except OSError, e: + if e.errno != errno.EEXIST: + raise + else: + raise IOError, (errno.EEXIST, "No usable temporary filename found") + + try: + os.unlink(temp) + except: + # Some very rude AV-scanners on Windows may cause this unlink to fail. + # Not aborting here just leaks the temp file, whereas aborting at this + # point may leave serious inconsistencies. Ideally, we would notify + # the user in this case here. + pass + def rename(src, dst): '''atomically rename file src to dst, replacing dst if it exists''' try: os.rename(src, dst) - except OSError: # FIXME: check err (EEXIST ?) - - # On windows, rename to existing file is not allowed, so we - # must delete destination first. But if a file is open, unlink - # schedules it for delete but does not delete it. Rename - # happens immediately even for open files, so we rename - # destination to a temporary name, then delete that. Then - # rename is safe to do. - # The temporary name is chosen at random to avoid the situation - # where a file is left lying around from a previous aborted run. - - for tries in xrange(10): - temp = '%s-%08x' % (dst, random.randint(0, 0xffffffff)) - try: - os.rename(dst, temp) # raises OSError EEXIST if temp exists - break - except OSError, e: - if e.errno != errno.EEXIST: - raise - else: - raise IOError, (errno.EEXIST, "No usable temporary filename found") - - try: - os.unlink(temp) - except: - # Some rude AV-scanners on Windows may cause the unlink to - # fail. Not aborting here just leaks the temp file, whereas - # aborting at this point may leave serious inconsistencies. - # Ideally, we would notify the user here. - pass + except OSError, e: + if e.errno != errno.EEXIST: + raise + unlink(dst) os.rename(src, dst) -def spawndetached(args): - # No standard library function really spawns a fully detached - # process under win32 because they allocate pipes or other objects - # to handle standard streams communications. Passing these objects - # to the child process requires handle inheritance to be enabled - # which makes really detached processes impossible. - class STARTUPINFO: - dwFlags = subprocess.STARTF_USESHOWWINDOW - hStdInput = None - hStdOutput = None - hStdError = None - wShowWindow = subprocess.SW_HIDE - - args = subprocess.list2cmdline(args) - # Not running the command in shell mode makes python26 hang when - # writing to hgweb output socket. - comspec = os.environ.get("COMSPEC", "cmd.exe") - args = comspec + " /c " + args - hp, ht, pid, tid = subprocess.CreateProcess( - None, args, - # no special security - None, None, - # Do not inherit handles - 0, - # DETACHED_PROCESS - 0x00000008, - os.environ, - os.getcwd(), - STARTUPINFO()) - return pid - def gethgcmd(): return [sys.executable] + sys.argv[:1] @@ -366,10 +357,6 @@ # Don't support groups on Windows for now raise KeyError() -try: - # override functions with win32 versions if possible - from win32 import * -except ImportError: - pass +from win32 import * expandglobs = True
--- a/mercurial/wireproto.py Sat Feb 12 16:08:41 2011 +0800 +++ b/mercurial/wireproto.py Wed Feb 16 14:13:22 2011 -0600 @@ -25,7 +25,7 @@ class wirerepository(repo.repository): def lookup(self, key): self.requirecap('lookup', _('look up remote revision')) - d = self._call("lookup", key=key) + d = self._call("lookup", key=encoding.fromlocal(key)) success, data = d[:-1].split(" ", 1) if int(success): return bin(data) @@ -44,14 +44,7 @@ branchmap = {} for branchpart in d.splitlines(): branchname, branchheads = branchpart.split(' ', 1) - branchname = urllib.unquote(branchname) - # Earlier servers (1.3.x) send branch names in (their) local - # charset. The best we can do is assume it's identical to our - # own local charset, in case it's not utf-8. - try: - branchname.decode('utf-8') - except UnicodeDecodeError: - branchname = encoding.fromlocal(branchname) + branchname = encoding.tolocal(urllib.unquote(branchname)) branchheads = decodelist(branchheads) branchmap[branchname] = branchheads return branchmap @@ -83,17 +76,20 @@ if not self.capable('pushkey'): return False d = self._call("pushkey", - namespace=namespace, key=key, old=old, new=new) + namespace=encoding.fromlocal(namespace), + key=encoding.fromlocal(key), + old=encoding.fromlocal(old), + new=encoding.fromlocal(new)) return bool(int(d)) def listkeys(self, namespace): if not self.capable('pushkey'): return {} - d = self._call("listkeys", namespace=namespace) + d = self._call("listkeys", namespace=encoding.fromlocal(namespace)) r = {} for l in d.splitlines(): k, v = l.split('\t') - r[k.decode('string-escape')] = v.decode('string-escape') + r[encoding.tolocal(k)] = encoding.tolocal(v) return r def stream_out(self): @@ -162,7 +158,7 @@ branchmap = repo.branchmap() heads = [] for branch, nodes in branchmap.iteritems(): - branchname = urllib.quote(branch) + branchname = urllib.quote(encoding.fromlocal(branch)) branchnodes = encodelist(nodes) heads.append('%s %s' % (branchname, branchnodes)) return '\n'.join(heads) @@ -213,14 +209,14 @@ return "capabilities: %s\n" % (capabilities(repo, proto)) def listkeys(repo, proto, namespace): - d = pushkeymod.list(repo, namespace).items() - t = '\n'.join(['%s\t%s' % (k.encode('string-escape'), - v.encode('string-escape')) for k, v in d]) + d = pushkeymod.list(repo, encoding.tolocal(namespace)).items() + t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v)) + for k, v in d]) return t def lookup(repo, proto, key): try: - r = hex(repo.lookup(key)) + r = hex(repo.lookup(encoding.tolocal(key))) success = 1 except Exception, inst: r = str(inst) @@ -228,7 +224,21 @@ return "%s %s\n" % (success, r) def pushkey(repo, proto, namespace, key, old, new): - r = pushkeymod.push(repo, namespace, key, old, new) + # compatibility with pre-1.8 clients which were accidentally + # sending raw binary nodes rather than utf-8-encoded hex + if len(new) == 20 and new.encode('string-escape') != new: + # looks like it could be a binary node + try: + u = new.decode('utf-8') + new = encoding.tolocal(new) # but cleanly decodes as UTF-8 + except UnicodeDecodeError: + pass # binary, leave unmodified + else: + new = encoding.tolocal(new) # normal path + + r = pushkeymod.push(repo, + encoding.tolocal(namespace), encoding.tolocal(key), + encoding.tolocal(old), new) return '%s\n' % int(r) def _allowstream(ui):
--- a/setup.py Sat Feb 12 16:08:41 2011 +0800 +++ b/setup.py Wed Feb 16 14:13:22 2011 -0600 @@ -294,14 +294,18 @@ libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):] for outfile in self.outfiles: - data = open(outfile, 'rb').read() + fp = open(outfile, 'rb') + data = fp.read() + fp.close() # skip binary files if '\0' in data: continue data = data.replace('@LIBDIR@', libdir.encode('string_escape')) - open(outfile, 'wb').write(data) + fp = open(outfile, 'wb') + fp.write(data) + fp.close() cmdclass = {'build_mo': hgbuildmo, 'build_ext': hgbuildext,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/cgienv Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,29 @@ +DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT +GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE +HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT +HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET +HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING +HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE +HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL +HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION +HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST +HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE +HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT +PATH_INFO="/"; export PATH_INFO +PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED +QUERY_STRING=""; export QUERY_STRING +REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR +REMOTE_PORT="44703"; export REMOTE_PORT +REQUEST_METHOD="GET"; export REQUEST_METHOD +REQUEST_URI="/test/"; export REQUEST_URI +SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME +SCRIPT_NAME="/test"; export SCRIPT_NAME +SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI +SCRIPT_URL="/test/"; export SCRIPT_URL +SERVER_ADDR="127.0.0.1"; export SERVER_ADDR +SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN +SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME +SERVER_PORT="80"; export SERVER_PORT +SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL +SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE +SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/filtercr.py Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,10 @@ +#!/usr/bin/env python + +# Filter output by the progress extension to make it readable in tests + +import sys, re + +for line in sys.stdin: + line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) + sys.stdout.write(line) +print
--- a/tests/hghave Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/hghave Wed Feb 16 14:13:22 2011 -0600 @@ -101,15 +101,6 @@ def has_fifo(): return hasattr(os, "mkfifo") -def has_hotshot(): - try: - # hotshot.stats tests hotshot and many problematic dependencies - # like profile. - import hotshot.stats - return True - except ImportError: - return False - def has_lsprof(): try: import _lsprof @@ -182,6 +173,8 @@ def has_ssl(): try: import ssl + import OpenSSL + OpenSSL.SSL.Context return True except ImportError: return False @@ -198,7 +191,6 @@ "fifo": (has_fifo, "named pipes"), "git": (has_git, "git command line client"), "gpg": (has_gpg, "gpg client"), - "hotshot": (has_hotshot, "python hotshot module"), "icasefs": (has_icasefs, "case insensitive file system"), "inotify": (has_inotify, "inotify extension support"), "lsprof": (has_lsprof, "python lsprof module"),
--- a/tests/printenv.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/printenv.py Wed Feb 16 14:13:22 2011 -0600 @@ -1,12 +1,9 @@ # simple script to be used in hooks -# copy it to the current directory when the test starts: -# -# cp "$TESTDIR"/printenv.py . # # put something like this in the repo .hg/hgrc: # # [hooks] -# changegroup = python ../printenv.py <hookname> [exit] [output] +# changegroup = python "$TESTDIR"/printenv.py <hookname> [exit] [output] # # - <hookname> is a mandatory argument (e.g. "changegroup") # - [exit] is the exit code of the hook (default: 0) @@ -39,13 +36,6 @@ if k.startswith("HG_") and v] env.sort() -# edit the variable part of the variable -url = os.environ.get("HG_URL", "") -if url.startswith("file:"): - os.environ["HG_URL"] = "file:" -elif url.startswith("remote:http"): - os.environ["HG_URL"] = "remote:http" - out.write("%s hook: " % name) for v in env: out.write("%s=%s " % (v, os.environ[v]))
--- a/tests/run-tests.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/run-tests.py Wed Feb 16 14:13:22 2011 -0600 @@ -231,6 +231,8 @@ if line and not line.startswith('#'): blacklist[line] = filename + f.close() + options.blacklist = blacklist return (options, args) @@ -491,6 +493,8 @@ # non-command/result - queue up for merged output after.setdefault(pos, []).append(l) + t.close() + script.append('echo %s %s $?\n' % (salt, n + 1)) fd, name = tempfile.mkstemp(suffix='hg-tst') @@ -504,7 +508,8 @@ vlog("# Running", cmd) exitcode, output = run(cmd, options, replacements) # do not merge output if skipped, return hghave message instead - if exitcode == SKIPPED_STATUS: + # similarly, with --debug, output is None + if exitcode == SKIPPED_STATUS or output is None: return exitcode, output finally: os.remove(name) @@ -579,6 +584,7 @@ return exitcode, postout +wifexited = getattr(os, "WIFEXITED", lambda x: False) def run(cmd, options, replacements): """Run command in a sub-process, capturing the output (stdout and stderr). Return a tuple (exitcode, output). output is None in debug mode.""" @@ -593,7 +599,7 @@ tochild.close() output = fromchild.read() ret = fromchild.close() - if ret == None: + if ret is None: ret = 0 else: proc = Popen4(cmd) @@ -610,7 +616,7 @@ proc.tochild.close() output = proc.fromchild.read() ret = proc.wait() - if os.WIFEXITED(ret): + if wifexited(ret): ret = os.WEXITSTATUS(ret) except Timeout: vlog('# Process %d timed out - killing it' % proc.pid) @@ -713,7 +719,7 @@ # If we're not in --debug mode and reference output file exists, # check test output against it. if options.debug: - refout = None # to match out == None + refout = None # to match "out is None" elif os.path.exists(ref): f = open(ref, "r") refout = splitnewlines(f.read()) @@ -925,7 +931,9 @@ continue if options.keywords: - t = open(test).read().lower() + test.lower() + fp = open(test) + t = fp.read().lower() + test.lower() + fp.close() for k in options.keywords.lower().split(): if k in t: break @@ -1108,4 +1116,5 @@ time.sleep(1) cleanup(options) -main() +if __name__ == '__main__': + main()
--- a/tests/test-acl.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-acl.t Wed Feb 16 14:13:22 2011 -0600 @@ -90,38 +90,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -141,6 +141,7 @@ files: 3/3 chunks (100.00%) added 3 changesets with 3 changes to 3 files updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -166,38 +167,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -219,6 +220,7 @@ calling hook pretxnchangegroup.acl: hgext.acl.hook acl: changes have source "push" - skipping updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -245,38 +247,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -307,6 +309,7 @@ acl: branch access granted: "911600dab2ae" on branch "default" acl: allowing changeset 911600dab2ae updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -333,38 +336,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -420,38 +423,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -512,38 +515,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -601,38 +604,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -695,38 +698,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -786,38 +789,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -879,38 +882,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -941,6 +944,7 @@ acl: branch access granted: "911600dab2ae" on branch "default" acl: allowing changeset 911600dab2ae updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -974,38 +978,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1074,38 +1078,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1168,38 +1172,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1274,38 +1278,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1336,6 +1340,7 @@ acl: branch access granted: "911600dab2ae" on branch "default" acl: allowing changeset 911600dab2ae updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -1370,38 +1375,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1432,6 +1437,7 @@ acl: branch access granted: "911600dab2ae" on branch "default" acl: allowing changeset 911600dab2ae updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -1462,38 +1468,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1558,38 +1564,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks @@ -1621,6 +1627,7 @@ acl: branch access granted: "911600dab2ae" on branch "default" acl: allowing changeset 911600dab2ae updating the branch cache + checking for updated bookmarks rolling back to revision 0 (undo push) 0:6675d58eff77 @@ -1651,38 +1658,38 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 adding changesets - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling changes: 7 chunks - bundling changes: 8 chunks - bundling changes: 9 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling manifests: 7 chunks - bundling manifests: 8 chunks - bundling manifests: 9 chunks - bundling files: foo/Bar/file.txt 0 chunks - bundling files: foo/Bar/file.txt 1 chunks - bundling files: foo/Bar/file.txt 2 chunks - bundling files: foo/Bar/file.txt 3 chunks - bundling files: foo/file.txt 4 chunks - bundling files: foo/file.txt 5 chunks - bundling files: foo/file.txt 6 chunks - bundling files: foo/file.txt 7 chunks - bundling files: quux/file.py 8 chunks - bundling files: quux/file.py 9 chunks - bundling files: quux/file.py 10 chunks - bundling files: quux/file.py 11 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 2 changesets + bundling: 3 changesets + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 0/3 manifests (0.00%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 1/3 manifests (33.33%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 2/3 manifests (66.67%) + bundling: 3/3 manifests (100.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/Bar/file.txt 0/3 files (0.00%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: foo/file.txt 1/3 files (33.33%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) + bundling: quux/file.py 2/3 files (66.67%) changesets: 1 chunks add changeset ef1ea85a6374 changesets: 2 chunks
--- a/tests/test-archive.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-archive.t Wed Feb 16 14:13:22 2011 -0600 @@ -30,7 +30,6 @@ check http return codes - $ test_archtype gz tar.gz tar.bz2 zip % gz allowed should give 200 200 Script output follows @@ -150,9 +149,8 @@ > print h1 == h2 or "md5 differ: " + repr((h1, h2)) > EOF -archive name is stored in the archive, so create similar - -archives and rename them afterwards. +archive name is stored in the archive, so create similar archives and +rename them afterwards. $ hg archive -t tgz tip.tar.gz $ mv tip.tar.gz tip1.tar.gz @@ -208,6 +206,38 @@ abort: unknown archive type 'bogus' [255] +enable progress extension: + + $ cp $HGRCPATH $HGRCPATH.no-progress + $ cat >> $HGRCPATH <<EOF + > [extensions] + > progress = + > [progress] + > assume-tty = 1 + > format = topic bar number + > delay = 0 + > refresh = 0 + > width = 60 + > EOF + + $ hg archive ../with-progress 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/4 + archiving [ ] 0/4 + archiving [=========> ] 1/4 + archiving [=========> ] 1/4 + archiving [====================> ] 2/4 + archiving [====================> ] 2/4 + archiving [===============================> ] 3/4 + archiving [===============================> ] 3/4 + archiving [==========================================>] 4/4 + archiving [==========================================>] 4/4 + \r (esc) + +cleanup after progress extension test: + + $ cp $HGRCPATH.no-progress $HGRCPATH + server errors $ cat errors.log @@ -219,6 +249,7 @@ $ hg archive ../test-empty abort: no working directory: please specify a revision [255] + old file -- date clamped to 1980 $ touch -t 197501010000 old
--- a/tests/test-bookmarks-current.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bookmarks-current.t Wed Feb 16 14:13:22 2011 -0600 @@ -2,7 +2,6 @@ $ echo "bookmarks=" >> $HGRCPATH $ echo "[bookmarks]" >> $HGRCPATH - $ echo "track.current = True" >> $HGRCPATH $ hg init
--- a/tests/test-bookmarks-pushpull.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bookmarks-pushpull.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,9 +1,6 @@ $ echo "[extensions]" >> $HGRCPATH $ echo "bookmarks=" >> $HGRCPATH - $ echo "[bookmarks]" >> $HGRCPATH - $ echo "track.current = True" >> $HGRCPATH - initialize $ hg init a @@ -48,8 +45,8 @@ no changes found importing bookmark X $ hg bookmark + X 0:4e3505fd9583 Y 0:4e3505fd9583 - X 0:4e3505fd9583 export bookmark by name @@ -62,23 +59,30 @@ no changes found exporting bookmark W $ hg -R ../a bookmarks - Y 0:4e3505fd9583 + W -1:000000000000 X 0:4e3505fd9583 + Y 0:4e3505fd9583 * Z 0:4e3505fd9583 - W -1:000000000000 delete a remote bookmark $ hg book -d W $ hg push -B W ../a + pushing to ../a + searching for changes + no changes found deleting remote bookmark W push/pull name that doesn't exist $ hg push -B badname ../a + pushing to ../a + searching for changes + no changes found bookmark badname does not exist on the local or remote repository! [2] $ hg pull -B anotherbadname ../a + pulling from ../a abort: remote bookmark anotherbadname not found! [255] @@ -90,8 +94,8 @@ adding f1 $ hg book -f X $ hg book + * X 1:0d2164f0ce0d Y 0:4e3505fd9583 - * X 1:0d2164f0ce0d Z 1:0d2164f0ce0d $ cd ../b @@ -102,8 +106,8 @@ adding f2 $ hg book -f X $ hg book + * X 1:9b140be10808 Y 0:4e3505fd9583 - * X 1:9b140be10808 foo -1:000000000000 foobar -1:000000000000 @@ -117,8 +121,8 @@ not updating divergent bookmark X (run 'hg heads' to see heads, 'hg merge' to merge) $ hg book + * X 1:9b140be10808 Y 0:4e3505fd9583 - * X 1:9b140be10808 foo -1:000000000000 foobar -1:000000000000 $ hg push -f ../a @@ -129,8 +133,8 @@ adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ hg -R ../a book + * X 1:0d2164f0ce0d Y 0:4e3505fd9583 - * X 1:0d2164f0ce0d Z 1:0d2164f0ce0d hgweb
--- a/tests/test-bookmarks-rebase.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bookmarks-rebase.t Wed Feb 16 14:13:22 2011 -0600 @@ -31,8 +31,8 @@ bookmark list $ hg bookmark + one 1:925d80f479bb * two 3:2ae46b1d99a7 - one 1:925d80f479bb rebase @@ -41,9 +41,8 @@ $ hg log changeset: 3:9163974d1cb5 - tag: one + bookmark: two tag: tip - tag: two parent: 1:925d80f479bb parent: 2:db815d6d32e6 user: test @@ -57,6 +56,7 @@ summary: 2 changeset: 1:925d80f479bb + bookmark: one user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1
--- a/tests/test-bookmarks-strip.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bookmarks-strip.t Wed Feb 16 14:13:22 2011 -0600 @@ -61,7 +61,7 @@ list bookmarks $ hg book - * test 1:8cf31af87a2b + test 1:8cf31af87a2b * test2 1:8cf31af87a2b immediate rollback and reentrancy issue @@ -93,6 +93,8 @@ rollback dry run with rollback information $ hg rollback -n + no rollback information available + [1] $ hg bookmarks * markb 0:07f494440405
--- a/tests/test-bookmarks.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bookmarks.t Wed Feb 16 14:13:22 2011 -0600 @@ -36,7 +36,7 @@ $ hg log -r X changeset: 0:f7b1eb17ad24 - tag: X + bookmark: X tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -54,8 +54,8 @@ list bookmarks $ hg bookmarks - * X2 0:f7b1eb17ad24 - * X 0:f7b1eb17ad24 + X 0:f7b1eb17ad24 + X2 0:f7b1eb17ad24 Y -1:000000000000 $ echo b > b @@ -65,23 +65,21 @@ bookmarks revset $ hg log -r 'bookmark()' - changeset: 1:925d80f479bb - tag: X - tag: X2 - tag: tip + changeset: 0:f7b1eb17ad24 + bookmark: X + bookmark: X2 user: test date: Thu Jan 01 00:00:00 1970 +0000 - summary: 1 + summary: 0 $ hg log -r 'bookmark(Y)' $ hg log -r 'bookmark(X2)' - changeset: 1:925d80f479bb - tag: X - tag: X2 - tag: tip + changeset: 0:f7b1eb17ad24 + bookmark: X + bookmark: X2 user: test date: Thu Jan 01 00:00:00 1970 +0000 - summary: 1 + summary: 0 $ hg help revsets | grep 'bookmark(' "bookmark([name])" @@ -89,25 +87,28 @@ bookmarks X and X2 moved to rev 1, Y at rev -1 $ hg bookmarks - * X2 1:925d80f479bb - * X 1:925d80f479bb + X 0:f7b1eb17ad24 + X2 0:f7b1eb17ad24 Y -1:000000000000 bookmark rev 0 again $ hg bookmark -r 0 Z + $ hg update X + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg add c $ hg commit -m 2 + created new head -bookmarks X and X2 moved to rev 2, Y at rev -1, Z at rev 0 +bookmarks X moved to rev 2, Y at rev -1, Z at rev 0 $ hg bookmarks - * X2 2:0316ce92851d - * X 2:0316ce92851d + * X 2:db815d6d32e6 + X2 0:f7b1eb17ad24 + Y -1:000000000000 Z 0:f7b1eb17ad24 - Y -1:000000000000 rename nonexistent bookmark @@ -128,8 +129,8 @@ list bookmarks $ hg bookmark - * X2 2:0316ce92851d - * Y 2:0316ce92851d + X2 0:f7b1eb17ad24 + * Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 rename without new name @@ -157,19 +158,19 @@ list bookmarks $ hg bookmarks - * X2 2:0316ce92851d - * Y 2:0316ce92851d + X2 0:f7b1eb17ad24 + Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 - * x y 2:0316ce92851d + * x y 2:db815d6d32e6 look up stripped bookmark name $ hg log -r '"x y"' - changeset: 2:0316ce92851d - tag: X2 - tag: Y + changeset: 2:db815d6d32e6 + bookmark: Y + bookmark: x y tag: tip - tag: x y + parent: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 @@ -195,10 +196,10 @@ list bookmarks $ hg bookmark - * X2 2:0316ce92851d - * Y 2:0316ce92851d - * Z 2:0316ce92851d - * x y 2:0316ce92851d + X2 0:f7b1eb17ad24 + Y 2:db815d6d32e6 + * Z 2:db815d6d32e6 + x y 2:db815d6d32e6 revision but no bookmark name @@ -211,3 +212,10 @@ $ hg bookmark ' ' abort: bookmark names cannot consist entirely of whitespace [255] + +invalid bookmark + + $ hg bookmark 'foo:bar' + abort: bookmark 'foo:bar' contains illegal character + [255] +
--- a/tests/test-bundle.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-bundle.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,3 @@ - $ cp "$TESTDIR"/printenv.py . - Setting up test $ hg init test @@ -188,11 +186,18 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.0 +Make sure bundlerepo doesn't leak tempfiles (issue2491) + + $ ls .hg + 00changelog.i + cache + requires + store Pull ../full.hg into empty (with hook) $ echo '[hooks]' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc doesn't work (yet ?) @@ -543,26 +548,26 @@ list of changesets: d2ae7f538514cd87c17547b0de4cea71fe1af9fb 5ece8e77363e2b5269e27c66828b72da29e4341a - bundling changes: 0 chunks - bundling changes: 1 chunks - bundling changes: 2 chunks - bundling changes: 3 chunks - bundling changes: 4 chunks - bundling changes: 5 chunks - bundling changes: 6 chunks - bundling manifests: 0 chunks - bundling manifests: 1 chunks - bundling manifests: 2 chunks - bundling manifests: 3 chunks - bundling manifests: 4 chunks - bundling manifests: 5 chunks - bundling manifests: 6 chunks - bundling files: b 0 chunks - bundling files: b 1 chunks - bundling files: b 2 chunks - bundling files: b 3 chunks - bundling files: b1 4 chunks - bundling files: b1 5 chunks - bundling files: b1 6 chunks - bundling files: b1 7 chunks + bundling: 0 changesets + bundling: 0 changesets + bundling: 0 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 1 changesets + bundling: 2 changesets + bundling: 0/2 manifests (0.00%) + bundling: 0/2 manifests (0.00%) + bundling: 0/2 manifests (0.00%) + bundling: 1/2 manifests (50.00%) + bundling: 1/2 manifests (50.00%) + bundling: 1/2 manifests (50.00%) + bundling: 2/2 manifests (100.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b 0/2 files (0.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%) + bundling: b1 1/2 files (50.00%)
--- a/tests/test-check-code.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-check-code.t Wed Feb 16 14:13:22 2011 -0600 @@ -34,7 +34,7 @@ gratuitous whitespace in () or [] ./wrong.py:2: > del(arg2) - del isn't a function + Python keyword is not a function ./wrong.py:3: > return ( 5+6, 9) missing whitespace in expression @@ -52,3 +52,44 @@ > y = format(x) any/all/format not available in Python 2.4 [1] + + $ cat > is-op.py <<EOF + > # is-operator comparing number or string literal + > x = None + > y = x is 'foo' + > y = x is "foo" + > y = x is 5346 + > y = x is -6 + > y = x is not 'foo' + > y = x is not "foo" + > y = x is not 5346 + > y = x is not -6 + > EOF + + $ "$check_code" ./is-op.py + ./is-op.py:3: + > y = x is 'foo' + object comparison with literal + ./is-op.py:4: + > y = x is "foo" + object comparison with literal + ./is-op.py:5: + > y = x is 5346 + object comparison with literal + ./is-op.py:6: + > y = x is -6 + object comparison with literal + ./is-op.py:7: + > y = x is not 'foo' + object comparison with literal + ./is-op.py:8: + > y = x is not "foo" + object comparison with literal + ./is-op.py:9: + > y = x is not 5346 + object comparison with literal + ./is-op.py:10: + > y = x is not -6 + object comparison with literal + [1] +
--- a/tests/test-clone-cgi.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-clone-cgi.t Wed Feb 16 14:13:22 2011 -0600 @@ -19,37 +19,10 @@ > wsgicgi.launch(application) > HGWEB $ chmod 755 hgweb.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE try hgweb request + $ . "$TESTDIR/cgienv" $ QUERY_STRING="cmd=changegroup&roots=0000000000000000000000000000000000000000"; export QUERY_STRING $ python hgweb.cgi >page1 2>&1 $ python "$TESTDIR/md5sum.py" page1
--- a/tests/test-clone-failure.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-clone-failure.t Wed Feb 16 14:13:22 2011 -0600 @@ -39,7 +39,6 @@ > rm a > else > echo "abort: repository a not found!" - > echo 255 > fi abort: repository a not found!
--- a/tests/test-command-template.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-command-template.t Wed Feb 16 14:13:22 2011 -0600 @@ -449,7 +449,7 @@ $ echo 'q = q' > t $ hg log --style ./t - abort: ./t: no key named 'changeset' + abort: "changeset" not in template map [255] Error if include fails: @@ -570,7 +570,7 @@ Keys work: - $ for key in author branches date desc file_adds file_dels file_mods \ + $ for key in author branch branches date desc file_adds file_dels file_mods \ > file_copies file_copies_switch files \ > manifest node parents rev tags diffstat extras; do > for mode in '' --verbose --debug; do @@ -604,6 +604,33 @@ author--debug: other@place author--debug: A. N. Other <other@place> author--debug: User Name <user@hostname> + branch: default + branch: default + branch: default + branch: default + branch: foo + branch: default + branch: default + branch: default + branch: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: foo + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--verbose: default + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: foo + branch--debug: default + branch--debug: default + branch--debug: default + branch--debug: default branches: branches: branches:
--- a/tests/test-confused-revert.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-confused-revert.t Wed Feb 16 14:13:22 2011 -0600 @@ -58,8 +58,8 @@ Revert should fail: - $ hg revert --all - abort: uncommitted merge - please provide a specific revision + $ hg revert + abort: uncommitted merge - use "hg update", see "hg help revert" [255] Revert should be ok now:
--- a/tests/test-convert-svn-move.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-convert-svn-move.t Wed Feb 16 14:13:22 2011 -0600 @@ -167,83 +167,73 @@ > [progress] > assume-tty = 1 > delay = 0 + > format = topic bar number > refresh = 0 - > EOF - $ cat > filtercr.py <<EOF - > import sys, re - > for line in sys.stdin: - > line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) - > sys.stdout.write(line) + > width = 60 > EOF - $ hg convert svn-repo hg-progress 2>&1 | python filtercr.py + $ hg convert svn-repo hg-progress 2>&1 | $TESTDIR/filtercr.py - scanning [ <=> ] 1 - scanning [ <=> ] 2 - scanning [ <=> ] 3 - scanning [ <=> ] 4 - scanning [ <=> ] 5 - scanning [ <=> ] 6 - scanning [ <=> ] 7 - - converting [ ] 0/7 - getting files [========> ] 1/6 - getting files [==================> ] 2/6 - getting files [============================> ] 3/6 - getting files [======================================> ] 4/6 - getting files [================================================> ] 5/6 - getting files [==========================================================>] 6/6 - - converting [=======> ] 1/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [================> ] 2/7 - scanning paths [ ] 0/2 - scanning paths [============================> ] 1/2 - - getting files [=============> ] 1/4 - getting files [============================> ] 2/4 - getting files [===========================================> ] 3/4 - getting files [==========================================================>] 4/4 - - converting [=========================> ] 3/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [==================================> ] 4/7 - scanning paths [ ] 0/1 - - getting files [==========================================================>] 1/1 - - converting [===========================================> ] 5/7 - scanning paths [ ] 0/3 - scanning paths [==================> ] 1/3 - scanning paths [=====================================> ] 2/3 - - getting files [======> ] 1/8 - getting files [=============> ] 2/8 - getting files [=====================> ] 3/8 - getting files [============================> ] 4/8 - getting files [===================================> ] 5/8 - getting files [===========================================> ] 6/8 - getting files [==================================================> ] 7/8 - getting files [==========================================================>] 8/8 - - converting [====================================================> ] 6/7 - scanning paths [ ] 0/1 - - getting files [======> ] 1/8 - getting files [=============> ] 2/8 - getting files [=====================> ] 3/8 - getting files [============================> ] 4/8 - getting files [===================================> ] 5/8 - getting files [===========================================> ] 6/8 - getting files [==================================================> ] 7/8 - getting files [==========================================================>] 8/8 - + scanning [ <=> ] 1 + scanning [ <=> ] 2 + scanning [ <=> ] 3 + scanning [ <=> ] 4 + scanning [ <=> ] 5 + scanning [ <=> ] 6 + scanning [ <=> ] 7 + + converting [ ] 0/7 + getting files [=====> ] 1/6 + getting files [============> ] 2/6 + getting files [==================> ] 3/6 + getting files [=========================> ] 4/6 + getting files [===============================> ] 5/6 + getting files [======================================>] 6/6 + + converting [=====> ] 1/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [===========> ] 2/7 + scanning paths [ ] 0/2 + scanning paths [==================> ] 1/2 + getting files [========> ] 1/4 + getting files [==================> ] 2/4 + getting files [============================> ] 3/4 + getting files [======================================>] 4/4 + + converting [=================> ] 3/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [=======================> ] 4/7 + scanning paths [ ] 0/1 + getting files [======================================>] 1/1 + + converting [=============================> ] 5/7 + scanning paths [ ] 0/3 + scanning paths [===========> ] 1/3 + scanning paths [========================> ] 2/3 + getting files [===> ] 1/8 + getting files [========> ] 2/8 + getting files [=============> ] 3/8 + getting files [==================> ] 4/8 + getting files [=======================> ] 5/8 + getting files [============================> ] 6/8 + getting files [=================================> ] 7/8 + getting files [======================================>] 8/8 + + converting [===================================> ] 6/7 + scanning paths [ ] 0/1 + getting files [===> ] 1/8 + getting files [========> ] 2/8 + getting files [=============> ] 3/8 + getting files [==================> ] 4/8 + getting files [=======================> ] 5/8 + getting files [============================> ] 6/8 + getting files [=================================> ] 7/8 + getting files [======================================>] 8/8 + initializing destination hg-progress repository scanning source... sorting... @@ -255,3 +245,4 @@ 2 adddb 1 branch 0 clobberdir +
--- a/tests/test-convert.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-convert.t Wed Feb 16 14:13:22 2011 -0600 @@ -40,16 +40,16 @@ have the following effects: --branchsort convert from parent to child revision when possible, which - means branches are usually converted one after the other. It - generates more compact repositories. + means branches are usually converted one after the other. + It generates more compact repositories. --datesort sort revisions by date. Converted repositories have good- looking changelogs but are often an order of magnitude larger than the same ones generated by --branchsort. --sourcesort try to preserve source revisions order, only supported by Mercurial sources. - If <REVMAP> isn't given, it will be put in a default location - (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file that + If "REVMAP" isn't given, it will be put in a default location + ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that maps each source commit ID to the destination ID for that revision, like so: @@ -123,16 +123,19 @@ Mercurial Source '''''''''''''''' - --config convert.hg.ignoreerrors=False (boolean) - ignore integrity errors when reading. Use it to fix Mercurial - repositories with missing revlogs, by converting from and to - Mercurial. + The Mercurial source recognizes the following configuration options, which + you can set on the command line with "--config": - --config convert.hg.saverev=False (boolean) - store original revision ID in changeset (forces target IDs to change) - - --config convert.hg.startrev=0 (hg revision identifier) - convert start revision and its descendants + convert.hg.ignoreerrors + ignore integrity errors when reading. Use it to fix Mercurial + repositories with missing revlogs, by converting from and to + Mercurial. Default is False. + convert.hg.saverev + store original. revision ID in changeset (forces target IDs to + change). It takes and boolean argument and defaults to False. + convert.hg.startrev + convert start revision and its descendants. It takes a hg + revision identifier and defaults to 0. CVS Source '''''''''' @@ -140,46 +143,45 @@ CVS source will use a sandbox (i.e. a checked-out copy) from CVS to indicate the starting point of what will be converted. Direct access to the repository files is not needed, unless of course the repository is - :local:. The conversion uses the top level directory in the sandbox to + ":local:". The conversion uses the top level directory in the sandbox to find the CVS repository, and then uses CVS rlog commands to find files to convert. This means that unless a filemap is given, all files under the starting directory will be converted, and that any directory reorganization in the CVS sandbox is ignored. - The options shown are the defaults. - - --config convert.cvsps.cache=True (boolean) - Set to False to disable remote log caching, for testing and debugging - purposes. - - --config convert.cvsps.fuzz=60 (integer) - Specify the maximum time (in seconds) that is allowed between commits - with identical user and log message in a single changeset. When very - large files were checked in as part of a changeset then the default - may not be long enough. + The following options can be used with "--config": - --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}' - Specify a regular expression to which commit log messages are matched. - If a match occurs, then the conversion process will insert a dummy - revision merging the branch on which this log message occurs to the - branch indicated in the regex. - - --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}' - Specify a regular expression to which commit log messages are matched. - If a match occurs, then the conversion process will add the most - recent revision on the branch indicated in the regex as the second - parent of the changeset. - - --config hook.cvslog - Specify a Python function to be called at the end of gathering the CVS - log. The function is passed a list with the log entries, and can - modify the entries in-place, or add or delete them. - - --config hook.cvschangesets - Specify a Python function to be called after the changesets are - calculated from the the CVS log. The function is passed a list with - the changeset entries, and can modify the changesets in-place, or add - or delete them. + convert.cvsps.cache + Set to False to disable remote log caching, for testing and + debugging purposes. Default is True. + convert.cvsps.fuzz + Specify the maximum time (in seconds) that is allowed between + commits with identical user and log message in a single + changeset. When very large files were checked in as part of a + changeset then the default may not be long enough. The default + is 60. + convert.cvsps.mergeto + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + insert a dummy revision merging the branch on which this log + message occurs to the branch indicated in the regex. Default + is "{{mergetobranch ([-\w]+)}}" + convert.cvsps.mergefrom + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + add the most recent revision on the branch indicated in the + regex as the second parent of the changeset. Default is + "{{mergefrombranch ([-\w]+)}}" + hook.cvslog + Specify a Python function to be called at the end of gathering + the CVS log. The function is passed a list with the log + entries, and can modify the entries in-place, or add or delete + them. + hook.cvschangesets + Specify a Python function to be called after the changesets + are calculated from the the CVS log. The function is passed a + list with the changeset entries, and can modify the changesets + in-place, or add or delete them. An additional "debugcvsps" Mercurial command allows the builtin changeset merging code to be run without doing a conversion. Its parameters and @@ -199,21 +201,22 @@ them to paths relative to the source URL, or leave them blank to disable auto detection. - --config convert.svn.branches=branches (directory name) - specify the directory containing branches + The following options can be set with "--config": - --config convert.svn.tags=tags (directory name) - specify the directory containing tags - - --config convert.svn.trunk=trunk (directory name) - specify the name of the trunk branch + convert.svn.branches + specify the directory containing branches. The defaults is + "branches". + convert.svn.tags + specify the directory containing tags. The default is "tags". + convert.svn.trunk + specify the name of the trunk branch The defauls is "trunk". Source history can be retrieved starting at a specific revision, instead of being integrally converted. Only single branch conversions are supported. - --config convert.svn.startrev=0 (svn revision number) - specify start Subversion revision. + convert.svn.startrev + specify start Subversion revision number. The default is 0. Perforce Source ''''''''''''''' @@ -222,25 +225,27 @@ specification as source. It will convert all files in the source to a flat Mercurial repository, ignoring labels, branches and integrations. Note that when a depot path is given you then usually should specify a target - directory, because otherwise the target may be named ...-hg. + directory, because otherwise the target may be named "...-hg". It is possible to limit the amount of source history to be converted by - specifying an initial Perforce revision. + specifying an initial Perforce revision: - --config convert.p4.startrev=0 (perforce changelist number) - specify initial Perforce revision. + convert.p4.startrev + specify initial Perforce revision, a Perforce changelist + number). Mercurial Destination ''''''''''''''''''''' - --config convert.hg.clonebranches=False (boolean) - dispatch source branches in separate clones. + The following options are supported: - --config convert.hg.tagsbranch=default (branch name) - tag revisions branch name - - --config convert.hg.usebranchnames=True (boolean) - preserve branch names + convert.hg.clonebranches + dispatch source branches in separate clones. The default is + False. + convert.hg.tagsbranch + branch name for tag revisions, defaults to "default". + convert.hg.usebranchnames + preserve branch names. The default is True options: @@ -376,7 +381,7 @@ contents of fncache file: - $ cat b/.hg/store/fncache + $ cat b/.hg/store/fncache | sort data/a.i data/b.i
--- a/tests/test-debugcomplete.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-debugcomplete.t Wed Feb 16 14:13:22 2011 -0600 @@ -6,6 +6,7 @@ archive backout bisect + bookmarks branch branches bundle @@ -74,6 +75,7 @@ debugdata debugdate debugfsinfo + debugignore debugindex debugindexdot debuginstall @@ -187,8 +189,8 @@ init: ssh, remotecmd, insecure log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude merge: force, tool, rev, preview - pull: update, force, rev, branch, ssh, remotecmd, insecure - push: force, rev, branch, new-branch, ssh, remotecmd, insecure + pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure + push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure remove: after, force, include, exclude serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos @@ -198,6 +200,7 @@ archive: no-decode, prefix, rev, type, subrepos, include, exclude backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user bisect: reset, good, bad, skip, command, noupdate + bookmarks: force, rev, delete, rename branch: force, clean branches: active, closed bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure @@ -212,6 +215,7 @@ debugdata: debugdate: extended debugfsinfo: + debugignore: debugindex: format debugindexdot: debuginstall: @@ -228,10 +232,10 @@ help: identify: rev, num, id, branch, tags import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity - incoming: force, newest-first, bundle, rev, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos + incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos locate: rev, print0, fullpath, include, exclude manifest: rev - outgoing: force, rev, newest-first, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos + outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos parents: rev, style, template paths: recover:
--- a/tests/test-demandimport.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-demandimport.py Wed Feb 16 14:13:22 2011 -0600 @@ -8,6 +8,7 @@ l = repr(obj) l = rsub("0x[0-9a-fA-F]+", "0x?", l) l = rsub("from '.*'", "from '?'", l) + l = rsub("'<[a-z]*>'", "'<whatever>'", l) return l import os
--- a/tests/test-demandimport.py.out Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-demandimport.py.out Wed Feb 16 14:13:22 2011 -0600 @@ -11,5 +11,5 @@ fred.sub = <function sub at 0x?> fred = <proxied module 're'> re = <unloaded module 'sys'> -re.stderr = <open file '<stderr>', mode 'w' at 0x?> +re.stderr = <open file '<whatever>', mode 'w' at 0x?> re = <proxied module 'sys'>
--- a/tests/test-diffstat.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-diffstat.t Wed Feb 16 14:13:22 2011 -0600 @@ -46,3 +46,20 @@ b | Bin 1 files changed, 0 insertions(+), 0 deletions(-) + $ hg ci -m createb + + $ printf '\0' > "file with spaces" + $ hg add "file with spaces" + +Filename with spaces diffstat: + + $ hg diff --stat + file with spaces | 0 + 1 files changed, 0 insertions(+), 0 deletions(-) + +Filename with spaces git diffstat: + + $ hg diff --stat --git + file with spaces | Bin + 1 files changed, 0 insertions(+), 0 deletions(-) +
--- a/tests/test-doctest.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-doctest.py Wed Feb 16 14:13:22 2011 -0600 @@ -19,5 +19,11 @@ import mercurial.util doctest.testmod(mercurial.util) +import mercurial.encoding +doctest.testmod(mercurial.encoding) + +import mercurial.hgweb.hgwebdir_mod +doctest.testmod(mercurial.hgweb.hgwebdir_mod) + import hgext.convert.cvsps doctest.testmod(hgext.convert.cvsps)
--- a/tests/test-encoding.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-encoding.t Wed Feb 16 14:13:22 2011 -0600 @@ -240,6 +240,4 @@ abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc) [255] $ cp latin-1-tag .hg/branch - $ HGENCODING=latin-1 hg ci -m 'should fail' - abort: branch name not in UTF-8! - [255] + $ HGENCODING=latin-1 hg ci -m 'auto-promote legacy name'
--- a/tests/test-extension.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-extension.t Wed Feb 16 14:13:22 2011 -0600 @@ -315,6 +315,11 @@ use "hg help extensions" for information on enabling extensions + $ cat > hgext/forest.py <<EOF + > cmdtable = None + > EOF $ hg --config extensions.path=./path.py help foo > /dev/null + warning: error finding commands in $TESTTMP/hgext/forest.py hg: unknown command 'foo' + warning: error finding commands in $TESTTMP/hgext/forest.py [255]
--- a/tests/test-fncache.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-fncache.t Wed Feb 16 14:13:22 2011 -0600 @@ -6,7 +6,7 @@ $ hg add adding a $ hg ci -m first - $ cat .hg/store/fncache + $ cat .hg/store/fncache | sort data/a.i Testing a.i/b: @@ -16,7 +16,7 @@ $ hg add adding a.i/b $ hg ci -m second - $ cat .hg/store/fncache + $ cat .hg/store/fncache | sort data/a.i data/a.i.hg/b.i @@ -27,10 +27,10 @@ $ hg add adding a.i.hg/c $ hg ci -m third - $ cat .hg/store/fncache + $ cat .hg/store/fncache | sort data/a.i + data/a.i.hg.hg/c.i data/a.i.hg/b.i - data/a.i.hg.hg/c.i Testing verify:
--- a/tests/test-globalopts.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-globalopts.t Wed Feb 16 14:13:22 2011 -0600 @@ -284,6 +284,7 @@ archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets + bookmarks track a line of development with movable markers branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -360,6 +361,7 @@ archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets + bookmarks track a line of development with movable markers branch set or show the current branch name branches list repository named branches bundle create a changegroup file
--- a/tests/test-hardlinks-safety.t Sat Feb 12 16:08:41 2011 +0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -some implementations of cp can't create hardlinks - - $ cat > cp.py <<EOF - > from mercurial import util - > import sys - > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) - > EOF - -Test hardlinking outside hg: - - $ mkdir x - $ echo foo > x/a - - $ python cp.py x y - $ echo bar >> y/a - -No diff if hardlink: - - $ diff x/a y/a - -Test mq hardlinking: - - $ echo "[extensions]" >> $HGRCPATH - $ echo "mq=" >> $HGRCPATH - - $ hg init a - $ cd a - - $ hg qimport -n foo - << EOF - > # HG changeset patch - > # Date 1 0 - > diff -r 2588a8b53d66 a - > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 - > +++ b/a Wed Jul 23 15:54:29 2008 +0200 - > @@ -0,0 +1,1 @@ - > +a - > EOF - adding foo to series file - - $ hg qpush - applying foo - now at: foo - - $ cd .. - $ python cp.py a b - $ cd b - - $ hg qimport -n bar - << EOF - > # HG changeset patch - > # Date 2 0 - > diff -r 2588a8b53d66 a - > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 - > +++ b/b Wed Jul 23 15:54:29 2008 +0200 - > @@ -0,0 +1,1 @@ - > +b - > EOF - adding bar to series file - - $ hg qpush - applying bar - now at: bar - - $ cat .hg/patches/status - 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar - - $ cat .hg/patches/series - foo - bar - - $ cat ../a/.hg/patches/status - 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo - - $ cat ../a/.hg/patches/series - foo - -Test tags hardlinking: - - $ hg qdel -r qbase:qtip - patch foo finalized without changeset message - patch bar finalized without changeset message - - $ hg tag -l lfoo - $ hg tag foo - - $ cd .. - $ python cp.py b c - $ cd c - - $ hg tag -l -r 0 lbar - $ hg tag -r 0 bar - - $ cat .hgtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo - 430ed4828a74fa4047bc816a25500f7472ab4bfe bar - - $ cat .hg/localtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo - 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar - - $ cat ../b/.hgtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo - - $ cat ../b/.hg/localtags - 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo -
--- a/tests/test-hardlinks.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-hardlinks.t Wed Feb 16 14:13:22 2011 -0600 @@ -10,6 +10,19 @@ > find $1 -type f | python $TESTTMP/nlinks.py > } +Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux): + + $ cat > linkcp.py <<EOF + > from mercurial import util + > import sys + > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) + > EOF + + $ linkcp() + > { + > python $TESTTMP/linkcp.py $1 $2 + > } + Prepare repo r1: $ mkdir r1 @@ -152,3 +165,167 @@ 1 r2/.hg/store/data/f1.i 1 r2/.hg/store/fncache + + $ cd r3 + $ hg tip --template '{rev}:{node|short}\n' + 11:a6451b6bc41f + $ echo bla > f1 + $ hg ci -m1 + $ cd .. + +Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'): + + $ linkcp r3 r4 + +r4 has hardlinks in the working dir (not just inside .hg): + + $ nlinksdir r4 + 2 r4/.hg/00changelog.i + 2 r4/.hg/branch + 2 r4/.hg/cache/branchheads + 2 r4/.hg/cache/tags + 2 r4/.hg/dirstate + 2 r4/.hg/hgrc + 2 r4/.hg/last-message.txt + 2 r4/.hg/requires + 2 r4/.hg/store/00changelog.i + 2 r4/.hg/store/00manifest.i + 2 r4/.hg/store/data/d1/f2.d + 2 r4/.hg/store/data/d1/f2.i + 2 r4/.hg/store/data/f1.i + 2 r4/.hg/store/fncache + 2 r4/.hg/store/undo + 2 r4/.hg/undo.branch + 2 r4/.hg/undo.desc + 2 r4/.hg/undo.dirstate + 2 r4/d1/data1 + 2 r4/d1/f2 + 2 r4/f1 + +Update back to revision 11 in r4 should break hardlink of file f1: + + $ hg -R r4 up 11 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ nlinksdir r4 + 2 r4/.hg/00changelog.i + 1 r4/.hg/branch + 2 r4/.hg/cache/branchheads + 2 r4/.hg/cache/tags + 1 r4/.hg/dirstate + 2 r4/.hg/hgrc + 2 r4/.hg/last-message.txt + 2 r4/.hg/requires + 2 r4/.hg/store/00changelog.i + 2 r4/.hg/store/00manifest.i + 2 r4/.hg/store/data/d1/f2.d + 2 r4/.hg/store/data/d1/f2.i + 2 r4/.hg/store/data/f1.i + 2 r4/.hg/store/fncache + 2 r4/.hg/store/undo + 2 r4/.hg/undo.branch + 2 r4/.hg/undo.desc + 2 r4/.hg/undo.dirstate + 2 r4/d1/data1 + 2 r4/d1/f2 + 1 r4/f1 + + +Test hardlinking outside hg: + + $ mkdir x + $ echo foo > x/a + + $ linkcp x y + $ echo bar >> y/a + +No diff if hardlink: + + $ diff x/a y/a + +Test mq hardlinking: + + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + + $ hg init a + $ cd a + + $ hg qimport -n foo - << EOF + > # HG changeset patch + > # Date 1 0 + > diff -r 2588a8b53d66 a + > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + > +++ b/a Wed Jul 23 15:54:29 2008 +0200 + > @@ -0,0 +1,1 @@ + > +a + > EOF + adding foo to series file + + $ hg qpush + applying foo + now at: foo + + $ cd .. + $ linkcp a b + $ cd b + + $ hg qimport -n bar - << EOF + > # HG changeset patch + > # Date 2 0 + > diff -r 2588a8b53d66 a + > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + > +++ b/b Wed Jul 23 15:54:29 2008 +0200 + > @@ -0,0 +1,1 @@ + > +b + > EOF + adding bar to series file + + $ hg qpush + applying bar + now at: bar + + $ cat .hg/patches/status + 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar + + $ cat .hg/patches/series + foo + bar + + $ cat ../a/.hg/patches/status + 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo + + $ cat ../a/.hg/patches/series + foo + +Test tags hardlinking: + + $ hg qdel -r qbase:qtip + patch foo finalized without changeset message + patch bar finalized without changeset message + + $ hg tag -l lfoo + $ hg tag foo + + $ cd .. + $ linkcp b c + $ cd c + + $ hg tag -l -r 0 lbar + $ hg tag -r 0 bar + + $ cat .hgtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo + 430ed4828a74fa4047bc816a25500f7472ab4bfe bar + + $ cat .hg/localtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo + 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar + + $ cat ../b/.hgtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo + + $ cat ../b/.hg/localtags + 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo +
--- a/tests/test-help.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-help.t Wed Feb 16 14:13:22 2011 -0600 @@ -55,6 +55,7 @@ archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets + bookmarks track a line of development with movable markers branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -127,6 +128,7 @@ archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets + bookmarks track a line of development with movable markers branch set or show the current branch name branches list repository named branches bundle create a changegroup file @@ -649,6 +651,7 @@ archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets + bookmarks track a line of development with movable markers branch set or show the current branch name branches list repository named branches bundle create a changegroup file
--- a/tests/test-hgignore.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-hgignore.t Wed Feb 16 14:13:22 2011 -0600 @@ -120,3 +120,5 @@ $ hg status . A b.o + $ hg debugignore + (?:(?:|.*/)[^/]*(?:/|$))
--- a/tests/test-hgwebdir.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-hgwebdir.t Wed Feb 16 14:13:22 2011 -0600 @@ -99,6 +99,7 @@ > rcoll=$root/** > star=* > starstar=** + > astar=webdir/a/* > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-2.log @@ -130,6 +131,8 @@ /starstar/webdir/b/ /starstar/webdir/b/d/ /starstar/webdir/c/ + /astar/ + /astar/.hg/patches/ $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=paper' 200 Script output follows @@ -322,6 +325,22 @@ <td class="indexlinks"></td> </tr> + <tr class="parity0"> + <td><a href="/astar/?style=paper">astar</a></td> + <td>unknown</td> + <td>Foo Bar <foo.bar@example.com></td> + <td class="age">* ago</td> (glob) + <td class="indexlinks"></td> + </tr> + + <tr class="parity1"> + <td><a href="/astar/.hg/patches/?style=paper">astar/.hg/patches</a></td> + <td>unknown</td> + <td>Foo Bar <foo.bar@example.com></td> + <td class="age">* ago</td> (glob) + <td class="indexlinks"></td> + </tr> + </table> </div> </div> @@ -470,7 +489,7 @@ a -est [paths] '**' extension +Test [paths] '**' extension $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/?style=raw' 200 Script output follows @@ -486,6 +505,12 @@ 200 Script output follows d + +Test [paths] '*' in a repo root + + $ hg id http://localhost:$HGPORT1/astar + 8580ff50825a + $ "$TESTDIR/killdaemons.py" $ cat > paths.conf <<EOF > [paths]
--- a/tests/test-hook.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-hook.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,18 +1,16 @@ - $ cp "$TESTDIR"/printenv.py . - commit hooks can see env vars $ hg init a $ cd a $ echo "[hooks]" > .hg/hgrc - $ echo 'commit = unset HG_LOCAL HG_TAG; python ../printenv.py commit' >> .hg/hgrc - $ echo 'commit.b = unset HG_LOCAL HG_TAG; python ../printenv.py commit.b' >> .hg/hgrc - $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python ../printenv.py precommit' >> .hg/hgrc - $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python ../printenv.py pretxncommit' >> .hg/hgrc + $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc + $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc + $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc + $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc - $ echo 'pre-identify = python ../printenv.py pre-identify 1' >> .hg/hgrc - $ echo 'pre-cat = python ../printenv.py pre-cat' >> .hg/hgrc - $ echo 'post-cat = python ../printenv.py post-cat' >> .hg/hgrc + $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc + $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc + $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc $ echo a > a $ hg add a $ hg commit -m a @@ -30,9 +28,9 @@ changegroup hooks can see env vars $ echo '[hooks]' > .hg/hgrc - $ echo 'prechangegroup = python ../printenv.py prechangegroup' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc - $ echo 'incoming = python ../printenv.py incoming' >> .hg/hgrc + $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc + $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc pretxncommit and commit hooks can see both parents of merge @@ -68,21 +66,21 @@ test generic hooks $ hg id - pre-identify hook: HG_ARGS=id HG_OPTS={'tags': None, 'rev': '', 'num': None, 'branch': None, 'id': None} HG_PATS=[] + pre-identify hook: HG_ARGS=id HG_OPTS={'branch': None, 'id': None, 'num': None, 'rev': '', 'tags': None} HG_PATS=[] warning: pre-identify hook exited with status 1 [1] $ hg cat b - pre-cat hook: HG_ARGS=cat b HG_OPTS={'rev': '', 'decode': None, 'exclude': [], 'output': '', 'include': []} HG_PATS=['b'] - post-cat hook: HG_ARGS=cat b HG_OPTS={'rev': '', 'decode': None, 'exclude': [], 'output': '', 'include': []} HG_PATS=['b'] HG_RESULT=0 + pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] b + post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0 $ cd ../b $ hg pull ../a - prechangegroup hook: HG_SOURCE=pull HG_URL=file: - changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file: - incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file: - incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file: - incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file: + prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a + changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a + incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a + incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a + incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a pulling from ../a searching for changes adding changesets @@ -94,8 +92,8 @@ tag hooks can see env vars $ cd ../a - $ echo 'pretag = python ../printenv.py pretag' >> .hg/hgrc - $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python ../printenv.py tag' >> .hg/hgrc + $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc + $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc $ hg tag -d '3 0' a pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 @@ -110,7 +108,7 @@ pretag hook can forbid tagging - $ echo 'pretag.forbid = python ../printenv.py pretag.forbid 1' >> .hg/hgrc + $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc $ hg tag -d '4 0' fa pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa @@ -126,7 +124,7 @@ more there after $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc - $ echo 'pretxncommit.forbid1 = python ../printenv.py pretxncommit.forbid 1' >> .hg/hgrc + $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc $ echo z > z $ hg add z $ hg -q tip @@ -146,7 +144,7 @@ precommit hook can prevent commit - $ echo 'precommit.forbid = python ../printenv.py precommit.forbid 1' >> .hg/hgrc + $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc $ hg commit -m 'fail' -d '4 0' precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 @@ -157,14 +155,14 @@ preupdate hook can prevent update - $ echo 'preupdate = python ../printenv.py preupdate' >> .hg/hgrc + $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc $ hg update 1 preupdate hook: HG_PARENT1=ab228980c14d 0 files updated, 0 files merged, 2 files removed, 0 files unresolved update hook - $ echo 'update = python ../printenv.py update' >> .hg/hgrc + $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc $ hg update preupdate hook: HG_PARENT1=539e4b31b6dc update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc @@ -176,9 +174,9 @@ $ hg -q tip 3:07f3376c1e65 $ echo '[hooks]' > .hg/hgrc - $ echo 'prechangegroup.forbid = python ../printenv.py prechangegroup.forbid 1' >> .hg/hgrc + $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc $ hg pull ../a - prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file: + prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a pulling from ../a searching for changes abort: prechangegroup.forbid hook exited with status 1 @@ -189,10 +187,10 @@ $ echo '[hooks]' > .hg/hgrc $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc - $ echo 'pretxnchangegroup.forbid1 = python ../printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc + $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc $ hg pull ../a 4:539e4b31b6dc - pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file: + pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a pulling from ../a searching for changes adding changesets @@ -210,8 +208,8 @@ $ rm .hg/hgrc $ echo '[hooks]' > ../a/.hg/hgrc - $ echo 'preoutgoing = python ../printenv.py preoutgoing' >> ../a/.hg/hgrc - $ echo 'outgoing = python ../printenv.py outgoing' >> ../a/.hg/hgrc + $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc + $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc $ hg pull ../a preoutgoing hook: HG_SOURCE=pull outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull @@ -227,7 +225,7 @@ preoutgoing hook can prevent outgoing changes - $ echo 'preoutgoing.forbid = python ../printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc + $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc $ hg pull ../a preoutgoing hook: HG_SOURCE=pull preoutgoing.forbid hook: HG_SOURCE=pull @@ -240,8 +238,8 @@ $ cd .. $ echo '[hooks]' > a/.hg/hgrc - $ echo 'preoutgoing = python ../printenv.py preoutgoing' >> a/.hg/hgrc - $ echo 'outgoing = python ../printenv.py outgoing' >> a/.hg/hgrc + $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc + $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc $ hg clone a c preoutgoing hook: HG_SOURCE=clone outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone @@ -251,7 +249,7 @@ preoutgoing hook can prevent outgoing changes for local clones - $ echo 'preoutgoing.forbid = python ../printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc + $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc $ hg clone a zzz preoutgoing hook: HG_SOURCE=clone preoutgoing.forbid hook: HG_SOURCE=clone
--- a/tests/test-http-proxy.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-http-proxy.t Wed Feb 16 14:13:22 2011 -0600 @@ -104,13 +104,21 @@ * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) + * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob)
--- a/tests/test-http.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-http.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,4 @@ - $ cp "$TESTDIR"/printenv.py . $ hg init test $ cd test $ echo foo>foo @@ -75,7 +74,7 @@ $ cd copy-pull $ echo '[hooks]' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc $ hg pull changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=http://localhost:$HGPORT1/ pulling from http://localhost:$HGPORT1/
--- a/tests/test-https.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-https.t Wed Feb 16 14:13:22 2011 -0600 @@ -112,6 +112,7 @@ adding manifests adding file changes added 1 changesets with 4 changes to 4 files + warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy-pull @@ -140,6 +141,7 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files + warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) (run 'hg update' to get a working copy) $ cd .. @@ -222,3 +224,45 @@ - ignores that certificate doesn't match hostname $ hg -R copy-pull id https://127.0.0.1:$HGPORT/ 5fed3813f7f5 + +Prepare for connecting through proxy + + $ kill `cat hg1.pid` + $ sleep 1 + + $ ("$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null & + $ echo $! > proxy.pid) + $ cat proxy.pid >> $DAEMON_PIDS + $ sleep 2 + + $ echo "[http_proxy]" >> copy-pull/.hg/hgrc + $ echo "always=True" >> copy-pull/.hg/hgrc + $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc + $ echo "localhost =" >> copy-pull/.hg/hgrc + +Test unvalidated https through proxy + + $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback + pulling from https://localhost:$HGPORT/ + searching for changes + no changes found + +Test https with cacert and fingerprint through proxy + + $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem + pulling from https://localhost:$HGPORT/ + searching for changes + no changes found + $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/ + pulling from https://127.0.0.1:$HGPORT/ + searching for changes + no changes found + +Test https with cert problems through proxy + + $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem + abort: error: _ssl.c:499: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed + [255] + $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/ + abort: error: _ssl.c:499: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed + [255]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-i18n.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,20 @@ +Test that translations are compiled and installed correctly. + +Default encoding in tests is "ascii" and the translation is encoded +using the "replace" error handler: + + $ LANGUAGE=pt_BR hg tip + abortado: N?o h? um reposit?rio do Mercurial aqui (.hg n?o encontrado)! + [255] + +Using a more accomodating encoding: + + $ HGENCODING=UTF-8 LANGUAGE=pt_BR hg tip + abortado: N\xc3\xa3o h\xc3\xa1 um reposit\xc3\xb3rio do Mercurial aqui (.hg n\xc3\xa3o encontrado)! (esc) + [255] + +Different encoding: + + $ HGENCODING=Latin-1 LANGUAGE=pt_BR hg tip + abortado: N\xe3o h\xe1 um reposit\xf3rio do Mercurial aqui (.hg n\xe3o encontrado)! (esc) + [255]
--- a/tests/test-import.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-import.t Wed Feb 16 14:13:22 2011 -0600 @@ -437,6 +437,13 @@ $ hg revert -a reverting a + +import with --no-commit should have written .hg/last-message.txt + + $ cat .hg/last-message.txt + change (no-eol) + + test fuzziness with eol=auto $ hg --config patch.eol=auto import --no-commit -v tip.patch
--- a/tests/test-inherit-mode.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-inherit-mode.t Wed Feb 16 14:13:22 2011 -0600 @@ -105,7 +105,8 @@ $ python ../printmodes.py ../push 00770 ../push/.hg/ 00660 ../push/.hg/00changelog.i - 00660 ../push/.hg/branchheads.cache + 00770 ../push/.hg/cache/ + 00660 ../push/.hg/cache/branchheads 00660 ../push/.hg/requires 00770 ../push/.hg/store/ 00660 ../push/.hg/store/00changelog.i
--- a/tests/test-issue619.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-issue619.t Wed Feb 16 14:13:22 2011 -0600 @@ -19,7 +19,12 @@ $ hg merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) + $ hg branch + default + $ hg parent --template '{rev}:{node|short} {branches}: {desc}\n' + 1:06c2121185be b: b $ hg ci -Ammerge + created new head Bogus fast-forward should fail:
--- a/tests/test-keyword.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-keyword.t Wed Feb 16 14:13:22 2011 -0600 @@ -17,6 +17,8 @@ keyword = [keyword] demo.txt = + [keywordset] + svn = False [keywordmaps] Author = {author|user} Date = {date|utcdate} @@ -40,6 +42,8 @@ keyword = [keyword] demo.txt = + [keywordset] + svn = False [keywordmaps] Branch = {branches} $Branch: demobranch $ @@ -633,6 +637,8 @@ b = ignore demo.txt = i = ignore + [keywordset] + svn = False [keywordmaps] Xinfo = {author}: {desc} $Xinfo: test: hg keyword configuration and expansion example $
--- a/tests/test-minirst.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-minirst.py Wed Feb 16 14:13:22 2011 -0600 @@ -120,16 +120,19 @@ There is support for simple option lists, but only with long options: ---all Output all. ---both Output both (this description is - quite long). ---long Output all day long. +-X, --exclude filter an option with a short and long option with an argument +-I, --include an option with both a short option and a long option +--all Output all. +--both Output both (this description is + quite long). +--long Output all day long. ---par This option has two paragraphs in its description. - This is the first. +--par This option has two paragraphs in its description. + This is the first. - This is the second. Blank lines may be omitted between - options (as above) or left in (as here). + This is the second. Blank lines may be omitted between + options (as above) or left in (as here). + The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph: @@ -221,6 +224,10 @@ .. An indented comment Some indented text. + +.. + +Empty comment above """ debugformat('comments', comments, 30)
--- a/tests/test-minirst.py.out Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-minirst.py.out Wed Feb 16 14:13:22 2011 -0600 @@ -180,14 +180,20 @@ There is support for simple option lists, but only with long options: ---all Output all. ---both Output both (this description is quite long). ---long Output all day long. ---par This option has two paragraphs in its - description. This is the first. + -X --exclude filter an option with a short and long option + with an argument + -I --include an option with both a short option and + a long option + --all Output all. + --both Output both (this description is quite + long). + --long Output all day long. + --par This option has two paragraphs in its + description. This is the first. - This is the second. Blank lines may be omitted - between options (as above) or left in (as here). + This is the second. Blank lines may + be omitted between options (as above) + or left in (as here). The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal @@ -202,23 +208,62 @@ option lists, but only with long options: ---all Output all. ---both Output both (this - description is - quite long). ---long Output all day - long. ---par This option has two - paragraphs in its - description. This - is the first. + -X --exclude filter an + option + with a + short + and + long + option + with an + argumen + t + -I --include an + option + with + both a + short + option + and a + long + option + --all Output + all. + --both Output + both + (this d + escript + ion is + quite + long). + --long Output + all day + long. + --par This + option + has two + paragra + phs in + its des + criptio + n. This + is the + first. - This is the second. - Blank lines may be - omitted between - options (as above) - or left in (as - here). + This is + the + second. + Blank + lines + may be + omitted + between + options + (as + above) + or left + in (as + here). The next paragraph looks like an option list, but lacks the @@ -339,5 +384,7 @@ Some text. Some indented text. + +Empty comment above ----------------------------------------------------------------------
--- a/tests/test-mq-caches.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-mq-caches.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,4 +1,4 @@ - $ branches=.hg/branchheads.cache + $ branches=.hg/cache/branchheads $ echo '[extensions]' >> $HGRCPATH $ echo 'mq =' >> $HGRCPATH
--- a/tests/test-mq-qnew.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-mq-qnew.t Wed Feb 16 14:13:22 2011 -0600 @@ -107,7 +107,7 @@ abort: "foo#bar" cannot be used as the name of a patch abort: "foo:bar" cannot be used as the name of a patch % qnew with name containing slash - abort: cannot write patch "foo/": (Is a|No such file or) directory (re) + abort: path ends in directory separator: foo/ abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch @@ -172,7 +172,7 @@ abort: "foo#bar" cannot be used as the name of a patch abort: "foo:bar" cannot be used as the name of a patch % qnew with name containing slash - abort: cannot write patch "foo/": (Is a|No such file or) directory (re) + abort: path ends in directory separator: foo/ abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-qpush-exact.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,290 @@ + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "graphlog=" >> $HGRCPATH + +make a test repository that looks like this: + +o 2:28bc7b1afd6a +| +| @ 1:d7fe2034f71b +|/ +o 0/62ecad8b70e5 + + $ hg init r0 + $ cd r0 + $ touch f0 + $ hg ci -m0 -Aq + $ touch f1 + $ hg ci -m1 -Aq + + $ hg update 0 -q + $ touch f2 + $ hg ci -m2 -Aq + $ hg update 1 -q + +make some patches with a parent: 1:d7fe2034f71b -> p0 -> p1 + + $ echo cp0 >> fp0 + $ hg add fp0 + $ hg qnew p0 -d "0 0" + + $ echo cp1 >> fp1 + $ hg add fp1 + $ hg qnew p1 -d "0 0" + + $ hg qpop -aq + patch queue now empty + +qpush --exact when at the parent + + $ hg update 1 -q + $ hg qpush -e + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -e p0 + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -e p1 + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg qpush -ea + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact when at another rev + + $ hg update 0 -q + $ hg qpush -e + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -e p0 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -e p1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 0 -q + $ hg qpush -ea + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact while crossing branches + + $ hg update 2 -q + $ hg qpush -e + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -e p0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + now at: p0 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -e p1 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + + $ hg update 2 -q + $ hg qpush -ea + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + applying p0 + applying p1 + now at: p1 + $ hg parents -qr qbase + 1:d7fe2034f71b + $ hg qpop -aq + patch queue now empty + +qpush --exact --force with changes to an unpatched file + + $ hg update 1 -q + $ echo c0 >> f0 + $ hg qpush -e + abort: local changes found, refresh first + [255] + $ hg qpush -ef + applying p0 + now at: p0 + $ cat f0 + c0 + $ rm f0 + $ touch f0 + $ hg qpop -aq + patch queue now empty + + $ hg update 1 -q + $ echo c0 >> f0 + $ hg qpush -e p1 + abort: local changes found, refresh first + [255] + $ hg qpush -e p1 -f + applying p0 + applying p1 + now at: p1 + $ cat f0 + c0 + $ rm f0 + $ touch f0 + $ hg qpop -aq + patch queue now empty + +qpush --exact --force with changes to a patched file + + $ hg update 1 -q + $ echo cp0-bad >> fp0 + $ hg add fp0 + $ hg qpush -e + abort: local changes found, refresh first + [255] + $ hg qpush -ef + applying p0 + file fp0 already exists + 1 out of 1 hunks FAILED -- saving rejects to file fp0.rej + patch failed, unable to continue (try -v) + patch failed, rejects left in working dir + errors during apply, please fix and refresh p0 + [2] + $ cat fp0 + cp0-bad + $ cat fp0.rej + --- fp0 + +++ fp0 + @@ -0,0 +1,1 @@ + +cp0 + $ hg qpop -aqf + patch queue now empty + $ rm fp0 + $ rm fp0.rej + + $ hg update 1 -q + $ echo cp1-bad >> fp1 + $ hg add fp1 + $ hg qpush -e p1 + abort: local changes found, refresh first + [255] + $ hg qpush -e p1 -f + applying p0 + applying p1 + file fp1 already exists + 1 out of 1 hunks FAILED -- saving rejects to file fp1.rej + patch failed, unable to continue (try -v) + patch failed, rejects left in working dir + errors during apply, please fix and refresh p1 + [2] + $ cat fp1 + cp1-bad + $ cat fp1.rej + --- fp1 + +++ fp1 + @@ -0,0 +1,1 @@ + +cp1 + $ hg qpop -aqf + patch queue now empty + $ rm fp1 + $ rm fp1.rej + +qpush --exact when already at a patch + + $ hg update 1 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg qpush -e p0 + applying p0 + now at: p0 + $ hg qpush -e p1 + abort: cannot push --exact with applied patches + [255] + $ hg qpop -aq + patch queue now empty + +qpush --exact --move should fail + + $ hg qpush -e --move p1 + abort: cannot use --exact and --move together + [255] + +qpush --exact a patch without a parent recorded + + $ hg qpush -q + now at: p0 + $ grep -v '# Parent' .hg/patches/p0 > p0.new + $ mv p0.new .hg/patches/p0 + $ hg qpop -aq + patch queue now empty + $ hg qpush -e + abort: p0 does not have a parent recorded + [255] + $ hg qpush -e p0 + abort: p0 does not have a parent recorded + [255] + $ hg qpush -e p1 + abort: p0 does not have a parent recorded + [255] + $ hg qpush -ea + abort: p0 does not have a parent recorded + [255] +
--- a/tests/test-mq-qrefresh.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-mq-qrefresh.t Wed Feb 16 14:13:22 2011 -0600 @@ -487,74 +487,3 @@ $ cd .. - -Issue2499: refuse to add .hgsub{,state} to a patch - - $ hg init repo-2499 - $ cd repo-2499 - $ hg qinit - $ hg qnew -m 0 0.diff - $ echo a > a - $ hg init sub - $ cd sub - $ echo b > b - $ hg ci -Am 0sub - adding b - $ cd .. - -test when adding - $ echo sub = sub > .hgsub - $ echo `hg id -i --debug sub` sub > .hgsubstate - $ hg add - adding .hgsub - adding .hgsubstate - adding a - $ hg qrefresh - warning: not adding .hgsub - warning: not adding .hgsubstate - $ hg qfinish -a - $ hg status - A .hgsub - A .hgsubstate - $ hg forget .hgsubstate - $ rm .hgsubstate - -add subrepo with a real commit - $ hg ci -m 1 - committing subrepository sub - $ hg qnew -m 2 2.diff - -test when modifying - $ echo sub2 = sub2 >> .hgsub - $ hg qrefresh - warning: not refreshing .hgsub - $ echo 0000000000000000000000000000000000000000 sub2 >> .hgsubstate - $ hg qrefresh - warning: not refreshing .hgsub - warning: not refreshing .hgsubstate - $ hg revert --no-backup .hgsub .hgsubstate - -test when removing - $ hg rm .hgsub - $ hg rm .hgsubstate - $ hg qrefresh - warning: not removing .hgsub - warning: not removing .hgsubstate - $ hg status - R .hgsub - R .hgsubstate - $ hg revert --no-backup .hgsub .hgsubstate - -test when deleting - $ rm .hgsub .hgsubstate - $ hg qrefresh - warning: not removing .hgsub - warning: not removing .hgsubstate - warning: subrepo spec file .hgsub not found - $ hg status - ! .hgsub - ! .hgsubstate - $ hg cat -r1 .hgsub > .hgsub - $ hg revert --no-backup .hgsubstate - - $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-subrepo-svn.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,52 @@ + $ "$TESTDIR/hghave" svn || exit 80 + + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "[diff]" >> $HGRCPATH + $ echo "nodates=1" >> $HGRCPATH + +fn to create new repository, and cd into it + $ mkrepo() { + > hg init $1 + > cd $1 + > hg qinit + > } + + +handle svn subrepos safely + + $ svnadmin create svn-repo-2499 + $ curpath=`pwd | tr '\\\\' /` + $ expr "$svnpath" : "\/" > /dev/null + > if [ $? -ne 0 ]; then + > curpath="/$curpath" + > fi + $ svnurl="file://$curpath/svn-repo-2499/project" + $ mkdir -p svn-project-2499/trunk + $ svn import -m 'init project' svn-project-2499 "$svnurl" + Adding svn-project-2499/trunk + + Committed revision 1. + +qnew on repo w/svn subrepo + $ mkrepo repo-2499-svn-subrepo + $ svn co "$svnurl"/trunk sub + Checked out revision 1. + $ echo 'sub = [svn]sub' >> .hgsub + $ hg add .hgsub + $ hg status -S -X '**/format' + A .hgsub + ? sub/.svn/entries + $ hg qnew -m0 0.diff + committing subrepository sub + $ cd sub + $ echo a > a + $ svn add a + A a + $ svn st + A* a (glob) + $ cd .. + $ hg status -S # doesn't show status for svn subrepos (yet) + $ hg qnew -m1 1.diff + abort: uncommitted changes in subrepository sub + [255]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-mq-subrepo.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,347 @@ + $ echo "[extensions]" >> $HGRCPATH + $ echo "mq=" >> $HGRCPATH + $ echo "record=" >> $HGRCPATH + $ echo "[diff]" >> $HGRCPATH + $ echo "nodates=1" >> $HGRCPATH + + $ stdin=`pwd`/stdin.tmp + +fn to create new repository w/dirty subrepo, and cd into it + $ mkrepo() { + > hg init $1 + > cd $1 + > hg qinit + > } + +fn to create dirty subrepo + $ mksubrepo() { + > hg init $1 + > cd $1 + > echo a > a + > hg add + > cd .. + > } + + $ testadd() { + > cat - > "$stdin" + > mksubrepo sub + > echo sub = sub >> .hgsub + > hg add .hgsub + > echo % abort when adding .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo [$?] + > hg -R sub ci -m0sub + > echo % update substate when adding .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > hg debugsub + > } + + $ testmod() { + > cat - > "$stdin" + > mksubrepo sub2 + > echo sub2 = sub2 >> .hgsub + > echo % abort when modifying .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo [$?] + > hg -R sub2 ci -m0sub2 + > echo % update substate when modifying .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > hg debugsub + > } + + $ testrm1() { + > cat - > "$stdin" + > mksubrepo sub3 + > echo sub3 = sub3 >> .hgsub + > hg ci -Aqmsub3 + > $EXTRA + > echo b >> sub3/a + > hg rm .hgsub + > echo % update substate when removing .hgsub w/dirty subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo % debugsub should be empty + > hg debugsub + > } + + $ testrm2() { + > cat - > "$stdin" + > mksubrepo sub4 + > echo sub4 = sub4 >> .hgsub + > hg ci -Aqmsub4 + > $EXTRA + > hg rm .hgsub + > echo % update substate when removing .hgsub w/clean updated subrepo + > hg status -S + > echo '%' $* + > cat "$stdin" | hg $* + > echo % debugsub should be empty + > hg debugsub + > } + + +handle subrepos safely on qnew + + $ mkrepo repo-2499-qnew + $ testadd qnew -m0 0.diff + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qnew -m0 0.diff + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qnew -m0 0.diff + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ testmod qnew -m1 1.diff + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qnew -m1 1.diff + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qnew -m1 1.diff + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ testrm1 qnew -m2 2.diff + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qnew -m2 2.diff + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ testrm2 qnew -m3 3.diff + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qnew -m3 3.diff + % debugsub should be empty + + $ cd .. + + +handle subrepos safely on qrefresh + + $ mkrepo repo-2499-qrefresh + $ hg qnew -m0 0.diff + $ testadd qrefresh + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qrefresh + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qrefresh + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ hg qnew -m1 1.diff + $ testmod qrefresh + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qrefresh + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qrefresh + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ EXTRA='hg qnew -m2 2.diff' testrm1 qrefresh + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qrefresh + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ EXTRA='hg qnew -m3 3.diff' testrm2 qrefresh + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qrefresh + % debugsub should be empty + + $ cd .. + + +handle subrepos safely on qpush/qpop + + $ mkrepo repo-2499-qpush + $ mksubrepo sub + adding a + $ hg -R sub ci -m0sub + $ echo sub = sub > .hgsub + $ hg add .hgsub + $ hg qnew -m0 0.diff + committing subrepository sub + $ hg debugsub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + +qpop + $ hg qpop + popping 0.diff + patch queue now empty + $ hg status -AS + $ hg debugsub + +qpush + $ hg qpush + applying 0.diff + now at: 0.diff + $ hg status -AS + C .hgsub + C .hgsubstate + C sub/a + $ hg debugsub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ cd .. + + +handle subrepos safely on qrecord + + $ mkrepo repo-2499-qrecord + $ testadd qrecord --config ui.interactive=1 -m0 0.diff <<EOF + > y + > y + > EOF + adding a + % abort when adding .hgsub w/dirty subrepo + A .hgsub + A sub/a + % qrecord --config ui.interactive=1 -m0 0.diff + diff --git a/.hgsub b/.hgsub + new file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + abort: uncommitted changes in subrepository sub + [255] + % update substate when adding .hgsub w/clean updated subrepo + A .hgsub + % qrecord --config ui.interactive=1 -m0 0.diff + diff --git a/.hgsub b/.hgsub + new file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + committing subrepository sub + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + + $ testmod qrecord --config ui.interactive=1 -m1 1.diff <<EOF + > y + > y + > EOF + adding a + % abort when modifying .hgsub w/dirty subrepo + M .hgsub + A sub2/a + % qrecord --config ui.interactive=1 -m1 1.diff + diff --git a/.hgsub b/.hgsub + 1 hunks, 1 lines changed + examine changes to '.hgsub'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + sub = sub + +sub2 = sub2 + record this change to '.hgsub'? [Ynsfdaq?] + abort: uncommitted changes in subrepository sub2 + [255] + % update substate when modifying .hgsub w/clean updated subrepo + M .hgsub + % qrecord --config ui.interactive=1 -m1 1.diff + diff --git a/.hgsub b/.hgsub + 1 hunks, 1 lines changed + examine changes to '.hgsub'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + sub = sub + +sub2 = sub2 + record this change to '.hgsub'? [Ynsfdaq?] + committing subrepository sub2 + path sub + source sub + revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 + path sub2 + source sub2 + revision 1f94c7611cc6b74f5a17b16121a1170d44776845 + + $ hg qpop -qa + patch queue now empty + $ EXTRA= testrm1 qrecord --config ui.interactive=1 -m2 2.diff <<EOF + > y + > y + > EOF + adding a + % update substate when removing .hgsub w/dirty subrepo + M sub3/a + R .hgsub + % qrecord --config ui.interactive=1 -m2 2.diff + diff --git a/.hgsub b/.hgsub + deleted file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + % debugsub should be empty + + $ hg qpop -qa + patch queue now empty + $ EXTRA= testrm2 qrecord --config ui.interactive=1 -m3 3.diff <<EOF + > y + > y + > EOF + adding a + % update substate when removing .hgsub w/clean updated subrepo + R .hgsub + % qrecord --config ui.interactive=1 -m3 3.diff + diff --git a/.hgsub b/.hgsub + deleted file mode 100644 + examine changes to '.hgsub'? [Ynsfdaq?] + % debugsub should be empty + + $ cd ..
--- a/tests/test-mq.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-mq.t Wed Feb 16 14:13:22 2011 -0600 @@ -284,12 +284,12 @@ qpush with dump of tag cache Dump the tag cache to ensure that it has exactly one head after qpush. - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg tags > /dev/null -.hg/tags.cache (pre qpush): +.hg/cache/tags (pre qpush): - $ cat .hg/tags.cache + $ cat .hg/cache/tags 1 [\da-f]{40} (re) $ hg qpush @@ -297,9 +297,9 @@ now at: test.patch $ hg tags > /dev/null -.hg/tags.cache (post qpush): +.hg/cache/tags (post qpush): - $ cat .hg/tags.cache + $ cat .hg/cache/tags 2 [\da-f]{40} (re) $ checkundo qpush
--- a/tests/test-newbranch.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-newbranch.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,4 +1,4 @@ - $ branchcache=.hg/branchheads.cache + $ branchcache=.hg/cache/branchheads $ hg init t $ cd t @@ -208,12 +208,11 @@ $ hg branch foo $ hg commit -m'Merge ff into foo' + created new head $ hg parents - changeset: 6:917eb54e1b4b + changeset: 6:6af8030670c9 branch: foo tag: tip - parent: 4:98d14f698afe - parent: 5:6683a60370cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Merge ff into foo
--- a/tests/test-newcgi.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-newcgi.t Wed Feb 16 14:13:22 2011 -0600 @@ -49,36 +49,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2
--- a/tests/test-newercgi.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-newercgi.t Wed Feb 16 14:13:22 2011 -0600 @@ -43,36 +43,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2 @@ -81,7 +52,6 @@ $ REQUEST_URI="/test/test/" $ SCRIPT_URI="http://hg.omnifarious.org/test/test/" $ SCRIPT_URL="/test/test/" - $ python hgwebdir.cgi > page3 $ grep -i error page1 page2 page3
--- a/tests/test-no-symlinks Sat Feb 12 16:08:41 2011 +0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -#!/bin/sh - -"$TESTDIR/hghave" no-symlink || exit 80 - -# The following script was used to create the bundle: -# -# hg init symlinks -# cd symlinks -# echo a > a -# mkdir d -# echo b > d/b -# ln -s a a.lnk -# ln -s d/b d/b.lnk -# hg ci -Am t -# hg bundle --base null ../test-no-symlinks.hg - -# Extract a symlink on a platform not supporting them -echo % unbundle -hg init t -cd t -hg pull -q "$TESTDIR/test-no-symlinks.hg" -hg update - -cat a.lnk && echo -cat d/b.lnk && echo - -# Copy a symlink and move another -echo % move and copy -hg copy a.lnk d/a2.lnk -hg mv d/b.lnk b2.lnk -hg ci -Am copy -cat d/a2.lnk && echo -cat b2.lnk && echo - -# Bundle and extract again -echo % bundle -hg bundle --base null ../symlinks.hg -cd .. - -hg init t2 -cd t2 -hg pull ../symlinks.hg -hg update - -cat a.lnk && echo -cat d/a2.lnk && echo -cat b2.lnk && echo
--- a/tests/test-no-symlinks.out Sat Feb 12 16:08:41 2011 +0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -% unbundle -4 files updated, 0 files merged, 0 files removed, 0 files unresolved -a -d/b -% move and copy -a -d/b -% bundle -2 changesets found -pulling from ../symlinks.hg -requesting all changes -adding changesets -adding manifests -adding file changes -added 2 changesets with 6 changes to 6 files -(run 'hg update' to get a working copy) -5 files updated, 0 files merged, 0 files removed, 0 files unresolved -a -a -d/b
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-no-symlinks.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,59 @@ + $ "$TESTDIR/hghave" no-symlink || exit 80 + +# The following script was used to create the bundle: +# +# hg init symlinks +# cd symlinks +# echo a > a +# mkdir d +# echo b > d/b +# ln -s a a.lnk +# ln -s d/b d/b.lnk +# hg ci -Am t +# hg bundle --base null ../test-no-symlinks.hg + +Extract a symlink on a platform not supporting them + + $ hg init t + $ cd t + $ hg pull -q "$TESTDIR/test-no-symlinks.hg" + $ hg update + 4 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cat a.lnk && echo + a + $ cat d/b.lnk && echo + d/b + +Copy a symlink and move another + + $ hg copy a.lnk d/a2.lnk + $ hg mv d/b.lnk b2.lnk + $ hg ci -Am copy + $ cat d/a2.lnk && echo + a + $ cat b2.lnk && echo + d/b + +Bundle and extract again + + $ hg bundle --base null ../symlinks.hg + 2 changesets found + $ cd .. + $ hg init t2 + $ cd t2 + $ hg pull ../symlinks.hg + pulling from ../symlinks.hg + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 6 changes to 6 files + (run 'hg update' to get a working copy) + $ hg update + 5 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cat a.lnk && echo + a + $ cat d/a2.lnk && echo + a + $ cat b2.lnk && echo + d/b
--- a/tests/test-oldcgi.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-oldcgi.t Wed Feb 16 14:13:22 2011 -0600 @@ -59,36 +59,7 @@ $ chmod 755 hgwebdir.cgi - $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT - $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE - $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT - $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET - $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING - $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE - $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL - $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION - $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST - $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE - $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT - $ PATH_INFO="/"; export PATH_INFO - $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED - $ QUERY_STRING=""; export QUERY_STRING - $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR - $ REMOTE_PORT="44703"; export REMOTE_PORT - $ REQUEST_METHOD="GET"; export REQUEST_METHOD - $ REQUEST_URI="/test/"; export REQUEST_URI - $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME - $ SCRIPT_NAME="/test"; export SCRIPT_NAME - $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI - $ SCRIPT_URL="/test/"; export SCRIPT_URL - $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR - $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN - $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME - $ SERVER_PORT="80"; export SERVER_PORT - $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL - $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE - $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE - + $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2
--- a/tests/test-parentrevspec.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-parentrevspec.t Wed Feb 16 14:13:22 2011 -0600 @@ -69,12 +69,12 @@ 6^^^^^: 0 6^^^^^^: -1 6^1: 5 - 6^2: abort: unknown revision '6^2'! + 6^2: hg: parse error at 1: syntax error 6^^2: 4 6^1^2: 4 - 6^^3: abort: unknown revision '6^^3'! + 6^^3: hg: parse error at 1: syntax error $ lookup "6~" "6~1" "6~2" "6~3" "6~4" "6~5" "6~42" "6~1^2" "6~1^2~2" - 6~: abort: unknown revision '6~'! + 6~: hg: parse error at 1: syntax error 6~1: 5 6~2: 3 6~3: 2 @@ -102,4 +102,4 @@ $ hg tag -l -r 2 "foo^bar" $ lookup "foo^bar" "foo^bar^" foo^bar: 2 - foo^bar^: abort: unknown revision 'foo^bar^'! + foo^bar^: hg: parse error at 3: syntax error
--- a/tests/test-parseindex2.py Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-parseindex2.py Wed Feb 16 14:13:22 2011 -0600 @@ -21,7 +21,7 @@ index = [] nodemap = {nullid: nullrev} n = off = 0 - # if we're not using lazymap, always read the whole index + l = len(data) - s append = index.append if inline: @@ -50,7 +50,7 @@ # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) - return index, nodemap, cache + return index, cache data_inlined = '\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c' \ @@ -97,10 +97,10 @@ def runtest() : py_res_1 = py_parseindex(data_inlined, True) - c_res_1 = parsers.parse_index(data_inlined, True) + c_res_1 = parsers.parse_index2(data_inlined, True) py_res_2 = py_parseindex(data_non_inlined, False) - c_res_2 = parsers.parse_index(data_non_inlined, False) + c_res_2 = parsers.parse_index2(data_non_inlined, False) if py_res_1 != c_res_1: print "Parse index result (with inlined data) differs!"
--- a/tests/test-patchbomb.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-patchbomb.t Wed Feb 16 14:13:22 2011 -0600 @@ -145,6 +145,17 @@ +b +.hg/last-email.txt + + $ cat > editor << '__EOF__' + > #!/bin/sh + > echo "a precious introductory message" > "$1" + > __EOF__ + $ chmod +x editor + $ HGEDITOR="'`pwd`'"/editor hg email -n -t foo -s test -r 0:tip > /dev/null + $ cat .hg/last-email.txt + a precious introductory message + $ hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip \ > --config extensions.progress= --config progress.assume-tty=1 \ > --config progress.delay=0 --config progress.refresh=0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-pending.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,117 @@ +Verify that pending changesets are seen by pretxn* hooks but not by other +processes that access the destination repo while the hooks are running. + +The hooks (python and external) both reject changesets after some think time, +during which another process runs pull. Each hook creates a file ('notify') to +indicate to the controlling process that it is running; the process removes the +file to indicate the hook can terminate. + +init env vars + + $ d=`pwd` + $ maxwait=20 + +utility to run the test - start a push in the background and run pull + + $ dotest() { + > rm -f notify + > printf 'push '; hg -R child-push tip --template '{node}\n' + > hg -R child-push -q push > push.out 2>&1 & + > + > # wait for hook to create the notify file + > i=$maxwait + > while [ ! -f notify -a $i != 0 ]; do + > sleep 1 + > i=`expr $i - 1` + > done + > + > # run pull + > hg -R child-pull -q pull + > rc=$? + > + > # tell hook to finish; notify should exist. + > rm notify + > wait + > + > cat push.out + > printf 'pull '; hg -R child-pull tip --template '{node}\n' + > return $rc + > } + +python hook + + $ cat <<EOF > reject.py + > import os, time + > from mercurial import ui, localrepo + > def rejecthook(ui, repo, hooktype, node, **opts): + > ui.write('hook %s\\n' % repo['tip'].hex()) + > # create the notify file so caller knows we're running + > fpath = os.path.join('$d', 'notify') + > f = open(fpath, 'w') + > f.close() + > # wait for ack - caller should delete the notify file + > i = $maxwait + > while os.path.exists(fpath) and i > 0: + > time.sleep(1) + > i -= 1 + > return True # reject the changesets + > EOF + +external hook + + $ cat <<EOF > reject.sh + > #! /bin/sh + > printf 'hook '; hg tip --template '{node}\\n' + > # create the notify file so caller knows we're running + > fpath=$d/notify + > touch \$fpath + > # wait for ack - caller should delete the notify file + > i=$maxwait + > while [ -f \$fpath -a \$i != 0 ]; do + > sleep 1 + > i=\`expr \$i - 1\` + > done + > exit 1 # reject the changesets + > EOF + $ chmod +x reject.sh + +create repos + + $ hg init parent + $ hg clone -q parent child-push + $ hg clone -q parent child-pull + $ echo a > child-push/a + $ hg -R child-push add child-push/a + $ hg -R child-push commit -m a -d '1000000 0' + +test python hook + + $ cat <<EOF > parent/.hg/hgrc + > [extensions] + > reject = $d/reject.py + > [hooks] + > pretxnchangegroup = python:reject.rejecthook + > EOF + + $ dotest + push 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + hook 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + transaction abort! + rollback completed + abort: pretxnchangegroup hook failed + pull 0000000000000000000000000000000000000000 + +test external hook + + $ cat <<EOF > parent/.hg/hgrc + > [hooks] + > pretxnchangegroup = $d/reject.sh + > EOF + + $ dotest + push 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + hook 29b62aeb769fdf78d8d9c5f28b017f76d7ef824b + transaction abort! + rollback completed + abort: pretxnchangegroup hook exited with status 1 + pull 0000000000000000000000000000000000000000
--- a/tests/test-progress.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-progress.t Wed Feb 16 14:13:22 2011 -0600 @@ -23,78 +23,144 @@ > } > EOF - $ cat > filtercr.py <<EOF - > import sys, re - > for line in sys.stdin: - > line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) - > sys.stdout.write(line) - > print - > EOF - $ echo "[extensions]" >> $HGRCPATH $ echo "progress=" >> $HGRCPATH $ echo "loop=`pwd`/loop.py" >> $HGRCPATH $ echo "[progress]" >> $HGRCPATH + $ echo "format = topic bar number" >> $HGRCPATH $ echo "assume-tty=1" >> $HGRCPATH + $ echo "width=60" >> $HGRCPATH test default params, display nothing because of delay - $ hg -y loop 3 2>&1 | python filtercr.py + $ hg -y loop 3 2>&1 | $TESTDIR/filtercr.py $ echo "delay=0" >> $HGRCPATH $ echo "refresh=0" >> $HGRCPATH test with delay=0, refresh=0 - $ hg -y loop 3 2>&1 | python filtercr.py + $ hg -y loop 3 2>&1 | $TESTDIR/filtercr.py - loop [ ] 0/3 - loop [=====================> ] 1/3 - loop [============================================> ] 2/3 - \r (esc) + loop [ ] 0/3 + loop [===============> ] 1/3 + loop [===============================> ] 2/3 + \r (esc) test refresh is taken in account - $ hg -y --config progress.refresh=100 loop 3 2>&1 | python filtercr.py + $ hg -y --config progress.refresh=100 loop 3 2>&1 | $TESTDIR/filtercr.py test format options 1 - $ hg -y --config 'progress.format=number topic item+2' loop 2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number topic item+2' loop 2 2>&1 \ + > | $TESTDIR/filtercr.py 0/2 loop lo 1/2 loop lo - \r (esc) + \r (esc) test format options 2 - $ hg -y --config 'progress.format=number item-3 bar' loop 2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number item-3 bar' loop 2 2>&1 \ + > | $TESTDIR/filtercr.py - 0/2 p.0 [ ] - 1/2 p.1 [=================================> ] - \r (esc) + 0/2 p.0 [ ] + 1/2 p.1 [=======================> ] + \r (esc) test format options and indeterminate progress - $ hg -y --config 'progress.format=number item bar' loop -- -2 2>&1 | python filtercr.py + $ hg -y --config 'progress.format=number item bar' loop -- -2 2>&1 \ + > | $TESTDIR/filtercr.py - 0 loop.0 [ <=> ] - 1 loop.1 [ <=> ] - \r (esc) + 0 loop.0 [ <=> ] + 1 loop.1 [ <=> ] + \r (esc) make sure things don't fall over if count > total - $ hg -y loop --total 4 6 2>&1 | python filtercr.py + $ hg -y loop --total 4 6 2>&1 | $TESTDIR/filtercr.py - loop [ ] 0/4 - loop [================> ] 1/4 - loop [=================================> ] 2/4 - loop [==================================================> ] 3/4 - loop [===================================================================>] 4/4 - loop [ <=> ] 5/4 - \r (esc) + loop [ ] 0/4 + loop [===========> ] 1/4 + loop [=======================> ] 2/4 + loop [===================================> ] 3/4 + loop [===============================================>] 4/4 + loop [ <=> ] 5/4 + \r (esc) test immediate progress completion - $ hg -y loop 0 2>&1 | python filtercr.py + $ hg -y loop 0 2>&1 | $TESTDIR/filtercr.py + + +test delay time estimates + + $ cat > mocktime.py <<EOF + > import os + > import time + > + > class mocktime(object): + > def __init__(self, increment): + > self.time = 0 + > self.increment = increment + > def __call__(self): + > self.time += self.increment + > return self.time + > + > def uisetup(ui): + > time.time = mocktime(int(os.environ.get('MOCKTIME', '11'))) + > EOF + + $ echo "[extensions]" > $HGRCPATH + $ echo "mocktime=`pwd`/mocktime.py" >> $HGRCPATH + $ echo "progress=" >> $HGRCPATH + $ echo "loop=`pwd`/loop.py" >> $HGRCPATH + $ echo "[progress]" >> $HGRCPATH + $ echo "assume-tty=1" >> $HGRCPATH + $ echo "delay=25" >> $HGRCPATH + $ echo "width=60" >> $HGRCPATH + + $ hg -y loop 8 2>&1 | python $TESTDIR/filtercr.py + loop [=========> ] 2/8 1m07s + loop [===============> ] 3/8 56s + loop [=====================> ] 4/8 45s + loop [==========================> ] 5/8 34s + loop [================================> ] 6/8 23s + loop [=====================================> ] 7/8 12s + \r (esc) + + $ MOCKTIME=10000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 8h21m + loop [====================> ] 2/4 5h34m + loop [==============================> ] 3/4 2h47m + \r (esc) + + $ MOCKTIME=1000000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 5w00d + loop [====================> ] 2/4 3w03d + loop [=============================> ] 3/4 11d14h + \r (esc) + + + $ MOCKTIME=14000000 hg -y loop 4 2>&1 | python $TESTDIR/filtercr.py + + loop [ ] 0/4 + loop [=========> ] 1/4 1y18w + loop [===================> ] 2/4 46w03d + loop [=============================> ] 3/4 23w02d + \r (esc) + +Time estimates should not fail when there's no end point: + $ hg -y loop -- -4 2>&1 | python $TESTDIR/filtercr.py + + loop [ <=> ] 2 + loop [ <=> ] 3 + \r (esc)
--- a/tests/test-pull-http.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-pull-http.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,4 @@ - $ cp "$TESTDIR"/printenv.py . $ hg init test $ cd test $ echo a > a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-push-cgi.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,74 @@ +This is a test of the push wire protocol over CGI-based hgweb. + +initialize repository + + $ hg init r + $ cd r + $ echo a > a + $ hg ci -A -m "0" + adding a + $ echo '[web]' > .hg/hgrc + $ echo 'allow_push = *' >> .hg/hgrc + $ echo 'push_ssl = false' >> .hg/hgrc + +create hgweb invocation script + + $ cat >hgweb.cgi <<HGWEB + > import cgitb + > cgitb.enable() + > from mercurial import demandimport; demandimport.enable() + > from mercurial.hgweb import hgweb + > from mercurial.hgweb import wsgicgi + > application = hgweb('.', 'test repository') + > wsgicgi.launch(application) + > HGWEB + $ chmod 755 hgweb.cgi + +test preparation + + $ . "$TESTDIR/cgienv" + $ REQUEST_METHOD="POST"; export REQUEST_METHOD + $ CONTENT_TYPE="application/octet-stream"; export CONTENT_TYPE + $ hg bundle --all bundle.hg + 1 changesets found + $ CONTENT_LENGTH=279; export CONTENT_LENGTH; + +expect unsynced changes + + $ QUERY_STRING="cmd=unbundle&heads=0000000000000000000000000000000000000000"; export QUERY_STRING + $ python hgweb.cgi <bundle.hg >page1 2>&1 + $ cat page1 + Status: 200 Script output follows\r (esc) + Content-Type: application/mercurial-0.1\r (esc) + Content-Length: 19\r (esc) + \r (esc) + 0 + unsynced changes + +successful force push + + $ QUERY_STRING="cmd=unbundle&heads=666f726365"; export QUERY_STRING + $ python hgweb.cgi <bundle.hg >page2 2>&1 + $ cat page2 + Status: 200 Script output follows\r (esc) + Content-Type: application/mercurial-0.1\r (esc) + \r (esc) + 1 + adding changesets + adding manifests + adding file changes + added 0 changesets with 0 changes to 1 files + +successful push + + $ QUERY_STRING="cmd=unbundle&heads=f7b1eb17ad24730a1651fccd46c43826d1bbc2ac"; export QUERY_STRING + $ python hgweb.cgi <bundle.hg >page3 2>&1 + $ cat page3 + Status: 200 Script output follows\r (esc) + Content-Type: application/mercurial-0.1\r (esc) + \r (esc) + 1 + adding changesets + adding manifests + adding file changes + added 0 changesets with 0 changes to 1 files
--- a/tests/test-push-http.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-push-http.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,4 @@ - $ cp "$TESTDIR"/printenv.py . $ hg init test $ cd test $ echo a > a @@ -53,7 +52,7 @@ $ echo 'allow_push = *' >> .hg/hgrc $ echo '[hooks]' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup 0' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup 0' >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes @@ -61,7 +60,7 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http + remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:*: (glob) % serve errors $ hg rollback rolling back to revision 0 (undo serve)
--- a/tests/test-push-warn.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-push-warn.t Wed Feb 16 14:13:22 2011 -0600 @@ -30,6 +30,23 @@ (you should pull and merge or use push -f to force) [255] + $ hg push --debug ../a + pushing to ../a + searching for changes + examining 1c9246a22a0a:d8d565842d04 + found incomplete branch 1c9246a22a0a:d8d565842d04 + searching: 1 queries + narrowing 1:1 d8d565842d04 + found new branch changeset 1c9246a22a0a + found new changesets starting at 1c9246a22a0a + 1 total queries + common changesets up to d8d565842d04 + new remote heads on branch 'default' + new remote head 1e108cc5548c + abort: push creates new remote heads on branch 'default'! + (you should pull and merge or use push -f to force) + [255] + $ hg pull ../a pulling from ../a searching for changes @@ -396,6 +413,7 @@ (branch merge, don't forget to commit) $ hg -R k ci -m merge + created new head $ hg -R k push -r a j pushing to j
--- a/tests/test-qrecord.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-qrecord.t Wed Feb 16 14:13:22 2011 -0600 @@ -158,13 +158,13 @@ -2 +2 2 3 - record change 1/6 to '1.txt'? [Ynsfdaq?] + record change 1/4 to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@ 3 -4 +4 4 5 - record change 2/6 to '1.txt'? [Ynsfdaq?] + record change 2/4 to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt 1 hunks, 1 lines changed examine changes to '2.txt'? [Ynsfdaq?] @@ -175,7 +175,7 @@ c d e - record change 4/6 to '2.txt'? [Ynsfdaq?] + record change 3/4 to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -255,7 +255,7 @@ -4 +4 4 5 - record change 1/3 to '1.txt'? [Ynsfdaq?] + record change 1/2 to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -265,7 +265,7 @@ someone up - record change 3/3 to 'dir/a.txt'? [Ynsfdaq?] + record change 2/2 to 'dir/a.txt'? [Ynsfdaq?] After qrecord b.patch 'tip'
--- a/tests/test-record.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-record.t Wed Feb 16 14:13:22 2011 -0600 @@ -285,7 +285,9 @@ Modify end of plain file, add EOL $ echo >> plain - $ hg record -d '10 0' -m eol plain <<EOF + $ echo 1 > plain2 + $ hg add plain2 + $ hg record -d '10 0' -m eol plain plain2 <<EOF > y > y > y @@ -300,16 +302,23 @@ -7264f99c5f5ff3261504828afa4fb4d406c3af54 \ No newline at end of file +7264f99c5f5ff3261504828afa4fb4d406c3af54 - record this change to 'plain'? [Ynsfdaq?] + record change 1/2 to 'plain'? [Ynsfdaq?] + diff --git a/plain2 b/plain2 + new file mode 100644 + examine changes to 'plain2'? [Ynsfdaq?] -Modify beginning, trim end, record both +Modify beginning, trim end, record both, add another file to test +changes numbering $ rm plain $ for i in 2 2 3 4 5 6 7 8 9 10; do > echo $i >> plain > done + $ echo 2 >> plain2 - $ hg record -d '10 0' -m begin-and-end plain <<EOF + $ hg record -d '10 0' -m begin-and-end plain plain2 <<EOF + > y + > y > y > y > y @@ -323,23 +332,30 @@ 2 3 4 - record change 1/2 to 'plain'? [Ynsfdaq?] + record change 1/3 to 'plain'? [Ynsfdaq?] @@ -8,5 +8,3 @@ 8 9 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 - record change 2/2 to 'plain'? [Ynsfdaq?] + record change 2/3 to 'plain'? [Ynsfdaq?] + diff --git a/plain2 b/plain2 + 1 hunks, 1 lines changed + examine changes to 'plain2'? [Ynsfdaq?] + @@ -1,1 +1,2 @@ + 1 + +2 + record change 3/3 to 'plain2'? [Ynsfdaq?] $ hg tip -p - changeset: 11:efca65c9b09e + changeset: 11:21df83db12b8 tag: tip user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: begin-and-end - diff -r cd07d48e8cbe -r efca65c9b09e plain + diff -r ddb8b281c3ff -r 21df83db12b8 plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:10 1970 +0000 @@ -1,4 +1,4 @@ @@ -354,6 +370,12 @@ 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 + diff -r ddb8b281c3ff -r 21df83db12b8 plain2 + --- a/plain2 Thu Jan 01 00:00:10 1970 +0000 + +++ b/plain2 Thu Jan 01 00:00:10 1970 +0000 + @@ -1,1 +1,2 @@ + 1 + +2 Trim beginning, modify end @@ -396,13 +418,13 @@ record change 2/2 to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 12:7d1e66983c15 + changeset: 12:99337501826f tag: tip user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: end-only - diff -r efca65c9b09e -r 7d1e66983c15 plain + diff -r 21df83db12b8 -r 99337501826f plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:11 1970 +0000 @@ -7,4 +7,4 @@ @@ -432,13 +454,13 @@ record this change to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 13:a09fc62a0e61 + changeset: 13:bbd45465d540 tag: tip user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: begin-only - diff -r 7d1e66983c15 -r a09fc62a0e61 plain + diff -r 99337501826f -r bbd45465d540 plain --- a/plain Thu Jan 01 00:00:11 1970 +0000 +++ b/plain Thu Jan 01 00:00:12 1970 +0000 @@ -1,6 +1,3 @@ @@ -533,13 +555,13 @@ record change 3/3 to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 15:7d137997f3a6 + changeset: 15:f34a7937ec33 tag: tip user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: middle-only - diff -r c0b8e5fb0be6 -r 7d137997f3a6 plain + diff -r 82c065d0b850 -r f34a7937ec33 plain --- a/plain Thu Jan 01 00:00:13 1970 +0000 +++ b/plain Thu Jan 01 00:00:14 1970 +0000 @@ -1,5 +1,10 @@ @@ -573,13 +595,13 @@ record this change to 'plain'? [Ynsfdaq?] $ hg tip -p - changeset: 16:4959e3ff13eb + changeset: 16:f9900b71a04c tag: tip user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: end-only - diff -r 7d137997f3a6 -r 4959e3ff13eb plain + diff -r f34a7937ec33 -r f9900b71a04c plain --- a/plain Thu Jan 01 00:00:14 1970 +0000 +++ b/plain Thu Jan 01 00:00:15 1970 +0000 @@ -9,3 +9,5 @@ @@ -610,13 +632,13 @@ record this change to 'subdir/a'? [Ynsfdaq?] $ hg tip -p - changeset: 18:40698cd490b2 + changeset: 18:61be427a9deb tag: tip user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: subdir-change - diff -r 661eacdc08b9 -r 40698cd490b2 subdir/a + diff -r a7ffae4d61cb -r 61be427a9deb subdir/a --- a/subdir/a Thu Jan 01 00:00:16 1970 +0000 +++ b/subdir/a Thu Jan 01 00:00:16 1970 +0000 @@ -1,1 +1,2 @@ @@ -709,13 +731,13 @@ examine changes to 'subdir/f2'? [Ynsfdaq?] $ hg tip -p - changeset: 20:d2d8c25276a8 + changeset: 20:b3df3dda369a tag: tip user: test date: Thu Jan 01 00:00:18 1970 +0000 summary: x - diff -r 25eb2a7694fb -r d2d8c25276a8 subdir/f2 + diff -r 6e02d6c9906d -r b3df3dda369a subdir/f2 --- a/subdir/f2 Thu Jan 01 00:00:17 1970 +0000 +++ b/subdir/f2 Thu Jan 01 00:00:18 1970 +0000 @@ -1,1 +1,2 @@ @@ -733,13 +755,13 @@ examine changes to 'subdir/f1'? [Ynsfdaq?] $ hg tip -p - changeset: 21:1013f51ce32f + changeset: 21:38ec577f126b tag: tip user: test date: Thu Jan 01 00:00:19 1970 +0000 summary: y - diff -r d2d8c25276a8 -r 1013f51ce32f subdir/f1 + diff -r b3df3dda369a -r 38ec577f126b subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:18 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:19 1970 +0000 @@ -1,1 +1,2 @@ @@ -768,7 +790,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 22:5df857735621 + changeset: 22:3261adceb075 tag: tip user: test date: Thu Jan 01 00:00:20 1970 +0000 @@ -804,7 +826,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 23:a4ae36a78715 + changeset: 23:b429867550db tag: tip user: test date: Thu Jan 01 00:00:21 1970 +0000 @@ -842,7 +864,7 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip --config diff.git=True -p - changeset: 24:1460f6e47966 + changeset: 24:0b082130c20a tag: tip user: test date: Thu Jan 01 00:00:22 1970 +0000 @@ -865,7 +887,7 @@ Abort early when a merge is in progress $ hg up 4 - 1 files updated, 0 files merged, 5 files removed, 0 files unresolved + 1 files updated, 0 files merged, 6 files removed, 0 files unresolved $ touch iwillmergethat $ hg add iwillmergethat @@ -876,14 +898,14 @@ $ hg ci -m'new head' $ hg up default - 5 files updated, 0 files merged, 2 files removed, 0 files unresolved + 6 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge thatbranch 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg record -m'will abort' - abort: cannot partially commit a merge (use hg commit instead) + abort: cannot partially commit a merge (use "hg commit" instead) [255] $ hg up -C @@ -921,14 +943,14 @@ record this change to 'subdir/f1'? [Ynsfdaq?] $ hg tip -p - changeset: 26:5bacc1f6e9cf + changeset: 26:b8306e70edc4 tag: tip - parent: 24:1460f6e47966 + parent: 24:0b082130c20a user: test date: Thu Jan 01 00:00:23 1970 +0000 summary: w1 - diff -r 1460f6e47966 -r 5bacc1f6e9cf subdir/f1 + diff -r 0b082130c20a -r b8306e70edc4 subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:22 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:23 1970 +0000 @@ -3,3 +3,4 @@
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-revset-dirstate-parents.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,52 @@ + $ HGENCODING=utf-8 + $ export HGENCODING + + $ try() { + > hg debugrevspec --debug $@ + > } + + $ log() { + > hg log --template '{rev}\n' -r "$1" + > } + + $ hg init repo + $ cd repo + + $ try 'p1()' + ('func', ('symbol', 'p1'), None) + $ try 'p2()' + ('func', ('symbol', 'p2'), None) + $ try 'parents()' + ('func', ('symbol', 'parents'), None) + +null revision + $ log 'p1()' + $ log 'p2()' + $ log 'parents()' + +working dir with a single parent + $ echo a > a + $ hg ci -Aqm0 + $ log 'p1()' + 0 + $ log 'tag() and p1()' + $ log 'p2()' + $ log 'parents()' + 0 + $ log 'tag() and parents()' + +merge in progress + $ echo b > b + $ hg ci -Aqm1 + $ hg up -q 0 + $ echo c > c + $ hg ci -Aqm2 + $ hg merge -q + $ log 'p1()' + 2 + $ log 'p2()' + 1 + $ log 'tag() and p2()' + $ log 'parents()' + 1 + 2
--- a/tests/test-rollback.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-rollback.t Wed Feb 16 14:13:22 2011 -0600 @@ -72,8 +72,9 @@ $ cat .hg/last-message.txt ; echo precious commit message - $ echo '% same thing, but run $EDITOR' - % same thing, but run $EDITOR + +same thing, but run $EDITOR + $ cat > editor << '__EOF__' > #!/bin/sh > echo "another precious commit message" > "$1" @@ -88,5 +89,3 @@ $ cat .hg/last-message.txt another precious commit message -.hg/last-message.txt: -
--- a/tests/test-ssh.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-ssh.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,4 @@ - $ cp "$TESTDIR"/printenv.py . This test tries to exercise the ssh functionality with a dummy script @@ -45,7 +44,7 @@ > bookmarks = > > [hooks] - > changegroup = python ../printenv.py changegroup-in-remote 0 ../dummylog + > changegroup = python "$TESTDIR"/printenv.py changegroup-in-remote 0 ../dummylog > EOF $ cd .. @@ -101,7 +100,7 @@ checking files 2 files, 1 changesets, 2 total revisions $ echo '[hooks]' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup-in-local 0 ../dummylog' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup-in-local 0 ../dummylog' >> .hg/hgrc empty default pull @@ -214,7 +213,7 @@ $ hg debugpushkey --config ui.ssh="python ../dummyssh" ssh://user@dummy/remote bookmarks foo 1160648e36cec0054048a7edc4110c6f84fde594 $ hg book -f foo - $ hg push + $ hg push --traceback pushing to ssh://user@dummy/remote searching for changes no changes found @@ -233,6 +232,9 @@ importing bookmark foo $ hg book -d foo $ hg push -B foo + pushing to ssh://user@dummy/remote + searching for changes + no changes found deleting remote bookmark foo a bad, evil hook that prints to stdout @@ -287,5 +289,3 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - Got arguments 1:user@dummy 2:hg -R remote serve --stdio - Got arguments 1:user@dummy 2:hg -R remote serve --stdio
--- a/tests/test-static-http.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-static-http.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,5 +1,4 @@ - $ cp "$TESTDIR"/printenv.py . $ hg clone http://localhost:$HGPORT/ copy abort: error: Connection refused [255] @@ -10,7 +9,7 @@ one pull $ cat > dumb.py <<EOF - > import BaseHTTPServer, SimpleHTTPServer, os, signal + > import BaseHTTPServer, SimpleHTTPServer, os, signal, sys > > def run(server_class=BaseHTTPServer.HTTPServer, > handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler): @@ -18,7 +17,7 @@ > httpd = server_class(server_address, handler_class) > httpd.serve_forever() > - > signal.signal(signal.SIGTERM, lambda x: sys.exit(0)) + > signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0)) > run() > EOF $ python dumb.py 2>/dev/null & @@ -27,10 +26,13 @@ $ cd remote $ hg init $ echo foo > bar - $ hg add bar + $ echo c2 > '.dotfile with spaces' + $ hg add + adding .dotfile with spaces + adding bar $ hg commit -m"test" $ hg tip - changeset: 0:61c9426e69fe + changeset: 0:02770d679fb8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -42,16 +44,16 @@ adding changesets adding manifests adding file changes - added 1 changesets with 1 changes to 1 files + added 1 changesets with 2 changes to 2 files updating to branch default - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files - 1 files, 1 changesets, 1 total revisions + 2 files, 1 changesets, 2 total revisions $ cat bar foo $ cd ../remote @@ -61,12 +63,12 @@ check for HTTP opener failures when cachefile does not exist - $ rm .hg/*.cache + $ rm .hg/cache/* $ cd ../local $ echo '[hooks]' >> .hg/hgrc - $ echo 'changegroup = python ../printenv.py changegroup' >> .hg/hgrc + $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc $ hg pull - changegroup hook: HG_NODE=822d6e31f08b9d6e3b898ce5e52efc0a4bf4905a HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/remote + changegroup hook: HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/remote pulling from static-http://localhost:$HGPORT/remote searching for changes adding changesets @@ -96,9 +98,9 @@ adding changesets adding manifests adding file changes - added 1 changesets with 1 changes to 1 files + added 1 changesets with 2 changes to 2 files updating to branch default - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved test with "/" URI (issue 747)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-subrepo-git.t Wed Feb 16 14:13:22 2011 -0600 @@ -0,0 +1,445 @@ + $ "$TESTDIR/hghave" git || exit 80 + +make git commits repeatable + + $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME + $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL + $ GIT_AUTHOR_DATE='1234567891 +0000'; export GIT_AUTHOR_DATE + $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME + $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL + $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE + +root hg repo + + $ hg init t + $ cd t + $ echo a > a + $ hg add a + $ hg commit -m a + $ cd .. + +new external git repo + + $ mkdir gitroot + $ cd gitroot + $ git init -q + $ echo g > g + $ git add g + $ git commit -q -m g + +add subrepo clone + + $ cd ../t + $ echo 's = [git]../gitroot' > .hgsub + $ git clone -q ../gitroot s + $ hg add .hgsub + $ hg commit -m 'new git subrepo' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + +record a new commit from upstream from a different branch + + $ cd ../gitroot + $ git checkout -q -b testing + $ echo gg >> g + $ git commit -q -a -m gg + + $ cd ../t/s + $ git pull -q >/dev/null 2>/dev/null + $ git checkout -q -b testing origin/testing >/dev/null + + $ cd .. + $ hg status --subrepos + M s/g + $ hg commit -m 'update git subrepo' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a + +make $GITROOT pushable, by replacing it with a clone with nothing checked out + + $ cd .. + $ git clone gitroot gitrootbare --bare -q + $ rm -rf gitroot + $ mv gitrootbare gitroot + +clone root + + $ cd t + $ hg clone . ../tc + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd ../tc + $ hg debugsub + path s + source ../gitroot + revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a + +update to previous substate + + $ hg update 1 -q + $ cat s/g + g + $ hg debugsub + path s + source ../gitroot + revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + +clone root, make local change + + $ cd ../t + $ hg clone . ../ta + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ cd ../ta + $ echo ggg >> s/g + $ hg status --subrepos + M s/g + $ hg commit -m ggg + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 79695940086840c99328513acbe35f90fcd55e57 + +clone root separately, make different local change + + $ cd ../t + $ hg clone . ../tb + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ cd ../tb/s + $ echo f > f + $ git add f + $ cd .. + + $ hg status --subrepos + A s/f + $ hg commit -m f + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + +user b push changes + + $ hg push 2>/dev/null + pushing to $TESTTMP/t + pushing branch testing of subrepo s + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + +user a pulls, merges, commits + + $ cd ../ta + $ hg pull + pulling from $TESTTMP/t + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads, 'hg merge' to merge) + $ hg merge 2>/dev/null + pulling subrepo s + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ cat s/f + f + $ cat s/g + g + gg + ggg + $ hg commit -m 'merge' + committing subrepository s + $ hg status --subrepos --rev 1:5 + M .hgsubstate + M s/g + A s/f + $ hg debugsub + path s + source ../gitroot + revision f47b465e1bce645dbf37232a00574aa1546ca8d3 + $ hg push 2>/dev/null + pushing to $TESTTMP/t + pushing branch testing of subrepo s + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 1 files + +make upstream git changes + + $ cd .. + $ git clone -q gitroot gitclone + $ cd gitclone + $ echo ff >> f + $ git commit -q -a -m ff + $ echo fff >> f + $ git commit -q -a -m fff + $ git push origin testing 2>/dev/null + +make and push changes to hg without updating the subrepo + + $ cd ../t + $ hg clone . ../td + updating to branch default + cloning subrepo s + checking out detached HEAD in subrepo s + check out a git branch if you intend to make changes + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd ../td + $ echo aa >> a + $ hg commit -m aa + $ hg push + pushing to $TESTTMP/t + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + +sync to upstream git, distribute changes + + $ cd ../ta + $ hg pull -u -q + $ cd s + $ git pull -q >/dev/null 2>/dev/null + $ cd .. + $ hg commit -m 'git upstream sync' + committing subrepository s + $ hg debugsub + path s + source ../gitroot + revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc + $ hg push -q + + $ cd ../tb + $ hg pull -q + $ hg update 2>/dev/null + pulling subrepo s + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg debugsub + path s + source ../gitroot + revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc + +update to a revision without the subrepo, keeping the local git repository + + $ cd ../t + $ hg up 0 + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ ls -a s + . + .. + .git + + $ hg up 2 + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ ls -a s + . + .. + .git + g + +archive subrepos + + $ cd ../tc + $ hg pull -q + $ hg archive --subrepos -r 5 ../archive 2>/dev/null + pulling subrepo s + $ cd ../archive + $ cat s/f + f + $ cat s/g + g + gg + ggg + +create nested repo + + $ cd .. + $ hg init outer + $ cd outer + $ echo b>b + $ hg add b + $ hg commit -m b + + $ hg clone ../t inner + updating to branch default + cloning subrepo s + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo inner = inner > .hgsub + $ hg add .hgsub + $ hg commit -m 'nested sub' + committing subrepository inner + +nested commit + + $ echo ffff >> inner/s/f + $ hg status --subrepos + M inner/s/f + $ hg commit -m nested + committing subrepository inner + committing subrepository inner/s + +nested archive + + $ hg archive --subrepos ../narchive + $ ls ../narchive/inner/s | grep -v pax_global_header + f + g + +Check hg update --clean + $ cd $TESTTMP/ta + $ echo > s/g + $ cd s + $ echo c1 > f1 + $ echo c1 > f2 + $ git add f1 + $ cd .. + $ hg status -S + M s/g + A s/f1 + $ ls s + f + f1 + f2 + g + $ hg update --clean + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg status -S + $ ls s + f + f1 + f2 + g + +Sticky subrepositories, no changes + $ cd $TESTTMP/ta + $ hg id -n + 7 + $ cd s + $ git rev-parse HEAD + 32a343883b74769118bb1d3b4b1fbf9156f4dddc + $ cd .. + $ hg update 1 > /dev/null 2>&1 + $ hg id -n + 1 + $ cd s + $ git rev-parse HEAD + da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + $ cd .. + +Sticky subrepositorys, file changes + $ touch s/f1 + $ cd s + $ git add f1 + $ cd .. + $ hg id -n + 1 + $ cd s + $ git rev-parse HEAD + da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + $ cd .. + $ hg update 4 + subrepository sources for s differ + use (l)ocal source (da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7) or (r)emote source (aa84837ccfbdfedcdcdeeedc309d73e6eb069edc)? + l + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 4+ + $ cd s + $ git rev-parse HEAD + da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + $ cd .. + $ hg update --clean tip > /dev/null 2>&1 + +Sticky subrepository, revision updates + $ hg id -n + 7 + $ cd s + $ git rev-parse HEAD + 32a343883b74769118bb1d3b4b1fbf9156f4dddc + $ cd .. + $ cd s + $ git checkout aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + Previous HEAD position was 32a3438... fff + HEAD is now at aa84837... f + $ cd .. + $ hg update 1 + subrepository sources for s differ (in checked out version) + use (l)ocal source (32a343883b74769118bb1d3b4b1fbf9156f4dddc) or (r)emote source (da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7)? + l + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 1+ + $ cd s + $ git rev-parse HEAD + aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + $ cd .. + +Sticky subrepository, file changes and revision updates + $ touch s/f1 + $ cd s + $ git add f1 + $ git rev-parse HEAD + aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + $ cd .. + $ hg id -n + 1+ + $ hg update 7 + subrepository sources for s differ + use (l)ocal source (32a343883b74769118bb1d3b4b1fbf9156f4dddc) or (r)emote source (32a343883b74769118bb1d3b4b1fbf9156f4dddc)? + l + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 7 + $ cd s + $ git rev-parse HEAD + aa84837ccfbdfedcdcdeeedc309d73e6eb069edc + $ cd .. + +Sticky repository, update --clean + $ hg update --clean tip + Previous HEAD position was aa84837... f + HEAD is now at 32a3438... fff + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 7 + $ cd s + $ git rev-parse HEAD + 32a343883b74769118bb1d3b4b1fbf9156f4dddc + $ cd .. + +Test subrepo already at intended revision: + $ cd s + $ git checkout 32a343883b74769118bb1d3b4b1fbf9156f4dddc + HEAD is now at 32a3438... fff + $ cd .. + $ hg update 1 + Previous HEAD position was 32a3438... fff + HEAD is now at da5f5b1... g + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 1 + $ cd s + $ git rev-parse HEAD + da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 + $ cd .. +
--- a/tests/test-subrepo-recursion.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-subrepo-recursion.t Wed Feb 16 14:13:22 2011 -0600 @@ -221,9 +221,48 @@ z1 +z2 -Test archiving to a directory tree: +Enable progress extension for archive tests: + + $ cp $HGRCPATH $HGRCPATH.no-progress + $ cat >> $HGRCPATH <<EOF + > [extensions] + > progress = + > [progress] + > assume-tty = 1 + > delay = 0 + > format = topic bar number + > refresh = 0 + > width = 60 + > EOF + +Test archiving to a directory tree (the doubled lines in the output +only show up in the test output, not in real usage): - $ hg archive --subrepos ../archive + $ hg archive --subrepos ../archive 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/3 + archiving [ ] 0/3 + archiving [=============> ] 1/3 + archiving [=============> ] 1/3 + archiving [===========================> ] 2/3 + archiving [===========================> ] 2/3 + archiving [==========================================>] 3/3 + archiving [==========================================>] 3/3 + + archiving (foo) [ ] 0/3 + archiving (foo) [ ] 0/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [====================================>] 3/3 + archiving (foo) [====================================>] 3/3 + + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [================================>] 1/1 + archiving (foo/bar) [================================>] 1/1 + \r (esc) $ find ../archive | sort ../archive ../archive/.hg_archival.txt @@ -239,7 +278,35 @@ Test archiving to zip file (unzip output is unstable): - $ hg archive --subrepos ../archive.zip + $ hg archive --subrepos ../archive.zip 2>&1 | $TESTDIR/filtercr.py + + archiving [ ] 0/3 + archiving [ ] 0/3 + archiving [=============> ] 1/3 + archiving [=============> ] 1/3 + archiving [===========================> ] 2/3 + archiving [===========================> ] 2/3 + archiving [==========================================>] 3/3 + archiving [==========================================>] 3/3 + + archiving (foo) [ ] 0/3 + archiving (foo) [ ] 0/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [===========> ] 1/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [=======================> ] 2/3 + archiving (foo) [====================================>] 3/3 + archiving (foo) [====================================>] 3/3 + + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [ ] 0/1 + archiving (foo/bar) [================================>] 1/1 + archiving (foo/bar) [================================>] 1/1 + \r (esc) + +Disable progress extension and cleanup: + + $ mv $HGRCPATH.no-progress $HGRCPATH Clone and test outgoing:
--- a/tests/test-subrepo-svn.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-subrepo-svn.t Wed Feb 16 14:13:22 2011 -0600 @@ -126,7 +126,7 @@ add an unrelated revision in svn and update the subrepo to without bringing any changes. - $ svn mkdir --parents "$SVNREPO/unrelated" -m 'create unrelated' + $ svn mkdir "$SVNREPO/unrelated" -m 'create unrelated' Committed revision 4. $ svn up s @@ -273,11 +273,11 @@ $ echo c1 > f2 $ svn add f1 -q $ svn status - ? a - X externals - ? f2 - M alpha - A f1 + ? * a (glob) + X * externals (glob) + ? * f2 (glob) + M * alpha (glob) + A * f1 (glob) Performing status on external item at 'externals' $ cd ../.. @@ -290,9 +290,151 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd t/s $ svn status - ? a - X externals - ? f1 - ? f2 + ? * a (glob) + X * externals (glob) + ? * f1 (glob) + ? * f2 (glob) Performing status on external item at 'externals' + +Sticky subrepositories, no changes + $ cd $TESTTMP/sub/t + $ hg id -n + 2 + $ cd s + $ svnversion + 3 + $ cd .. + $ hg update 1 + U $TESTTMP/sub/t/s/alpha + + Fetching external item into '$TESTTMP/sub/t/s/externals' + Checked out external at revision 1. + + Checked out revision 2. + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 1 + $ cd s + $ svnversion + 2 + $ cd .. + +Sticky subrepositorys, file changes + $ touch s/f1 + $ cd s + $ svn add f1 + A f1 + $ cd .. + $ hg id -n + 1 + $ cd s + $ svnversion + 2M + $ cd .. + $ hg update tip + subrepository sources for s differ + use (l)ocal source (2) or (r)emote source (3)? + l + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 2+ + $ cd s + $ svnversion + 2M + $ cd .. + $ hg update --clean tip + U $TESTTMP/sub/t/s/alpha + + Fetching external item into '$TESTTMP/sub/t/s/externals' + Checked out external at revision 1. + + Checked out revision 3. + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +Sticky subrepository, revision updates + $ hg id -n + 2 + $ cd s + $ svnversion + 3 + $ cd .. + $ cd s + $ svn update -r 1 + U alpha + U . + + Fetching external item into 'externals' + Updated external to revision 1. + + Updated to revision 1. + $ cd .. + $ hg update 1 + subrepository sources for s differ (in checked out version) + use (l)ocal source (1) or (r)emote source (2)? + l + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 1+ + $ cd s + $ svnversion + 1 + $ cd .. + +Sticky subrepository, file changes and revision updates + $ touch s/f1 + $ cd s + $ svn add f1 + A f1 + $ svnversion + 1M + $ cd .. + $ hg id -n + 1+ + $ hg update tip + subrepository sources for s differ + use (l)ocal source (1) or (r)emote source (3)? + l + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 2 + $ cd s + $ svnversion + 1M + $ cd .. + +Sticky repository, update --clean + $ hg update --clean tip + U $TESTTMP/sub/t/s/alpha + U $TESTTMP/sub/t/s + + Fetching external item into '$TESTTMP/sub/t/s/externals' + Checked out external at revision 1. + + Checked out revision 3. + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 2 + $ cd s + $ svnversion + 3 + $ cd .. + +Test subrepo already at intended revision: + $ cd s + $ svn update -r 2 + U alpha + + Fetching external item into 'externals' + Updated external to revision 1. + + Updated to revision 2. + $ cd .. + $ hg update 1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 1+ + $ cd s + $ svnversion + 2 + $ cd ..
--- a/tests/test-subrepo.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-subrepo.t Wed Feb 16 14:13:22 2011 -0600 @@ -75,16 +75,19 @@ commit: (clean) update: (current) -bump sub rev +bump sub rev (and check it is ignored by ui.commitsubrepos) $ echo b > s/a $ hg -R s ci -ms1 - $ hg ci -m3 + $ hg --config ui.commitsubrepos=no ci -m3 committing subrepository s -leave sub dirty +leave sub dirty (and check ui.commitsubrepos=no aborts the commit) $ echo c > s/a + $ hg --config ui.commitsubrepos=no ci -m4 + abort: uncommitted changes in subrepo s + [255] $ hg ci -m4 committing subrepository s $ hg tip -R s @@ -703,3 +706,125 @@ $ hg status -S ? s/b ? s/c + +Sticky subrepositories, no changes + $ cd $TESTTMP/sub/t + $ hg id + 925c17564ef8 tip + $ hg -R s id + 12a213df6fa9 tip + $ hg -R t id + 52c0adc0515a tip + $ hg update 11 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id + 365661e5936a + $ hg -R s id + fc627a69481f + $ hg -R t id + e95bcfa18a35 + +Sticky subrepositorys, file changes + $ touch s/f1 + $ touch t/f1 + $ hg add -S s/f1 + $ hg add -S t/f1 + $ hg id + 365661e5936a + $ hg -R s id + fc627a69481f+ + $ hg -R t id + e95bcfa18a35+ + $ hg update tip + subrepository sources for s differ + use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9)? + l + subrepository sources for t differ + use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a)? + l + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id + 925c17564ef8+ tip + $ hg -R s id + fc627a69481f+ + $ hg -R t id + e95bcfa18a35+ + $ hg update --clean tip + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +Sticky subrepository, revision updates + $ hg id + 925c17564ef8 tip + $ hg -R s id + 12a213df6fa9 tip + $ hg -R t id + 52c0adc0515a tip + $ cd s + $ hg update -r -2 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd ../t + $ hg update -r 2 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd .. + $ hg update 10 + subrepository sources for t differ (in checked out version) + use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c)? + l + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id + e45c8b14af55+ + $ hg -R s id + 1c833a7a9e3a + $ hg -R t id + 7af322bc1198 + +Sticky subrepository, file changes and revision updates + $ touch s/f1 + $ touch t/f1 + $ hg add -S s/f1 + $ hg add -S t/f1 + $ hg id + e45c8b14af55+ + $ hg -R s id + 1c833a7a9e3a+ + $ hg -R t id + 7af322bc1198+ + $ hg update tip + subrepository sources for s differ + use (l)ocal source (1c833a7a9e3a) or (r)emote source (12a213df6fa9)? + l + subrepository sources for t differ + use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a)? + l + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id + 925c17564ef8 tip + $ hg -R s id + 1c833a7a9e3a+ + $ hg -R t id + 7af322bc1198+ + +Sticky repository, update --clean + $ hg update --clean tip + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id + 925c17564ef8 tip + $ hg -R s id + 12a213df6fa9 tip + $ hg -R t id + 52c0adc0515a tip + +Test subrepo already at intended revision: + $ cd s + $ hg update fc627a69481f + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd .. + $ hg update 11 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -n + 11+ + $ hg -R s id + fc627a69481f + $ hg -R t id + e95bcfa18a35
--- a/tests/test-tag.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-tag.t Wed Feb 16 14:13:22 2011 -0600 @@ -63,6 +63,18 @@ $ hg tag -f gack $ hg tag --remove gack gorp + $ hg tag "bleah " + abort: tag 'bleah' already exists (use -f to force) + [255] + $ hg tag " bleah" + abort: tag 'bleah' already exists (use -f to force) + [255] + $ hg tag " bleah" + abort: tag 'bleah' already exists (use -f to force) + [255] + $ hg tag -r 0 " bleahbleah " + $ hg tag -r 0 " bleah bleah " + $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 bleah acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0 @@ -75,6 +87,9 @@ 0000000000000000000000000000000000000000 gack 336fccc858a4eb69609a291105009e484a6b6b8d gorp 0000000000000000000000000000000000000000 gorp + acb14030fe0a21b60322c440ad2d20cf7685a376 bleahbleah + acb14030fe0a21b60322c440ad2d20cf7685a376 bleah bleah + $ cat .hg/localtags d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1 @@ -107,7 +122,9 @@ $ hg -R test log -r0:5 changeset: 0:acb14030fe0a tag: bleah + tag: bleah bleah tag: bleah0 + tag: bleahbleah tag: foobar tag: localblah user: test @@ -210,7 +227,7 @@ $ hg tag hgtags-modified $ hg rollback - rolling back to revision 11 (undo commit) + rolling back to revision 13 (undo commit) $ hg st M .hgtags ? .hgtags.orig @@ -227,7 +244,7 @@ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge named branch' - $ hg up 11 + $ hg up 13 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tag new-topo-head @@ -252,27 +269,29 @@ $ echo c1 > f1 $ hg ci -Am0 adding f1 + $ echo c2 > f2 + $ hg ci -Am1 + adding f2 + $ hg co -q 0 $ hg branch b1 marked working directory as branch b1 - $ echo c2 >> f1 - $ hg ci -m1 + $ hg ci -m2 $ hg up default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge b1 - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg tag t1 abort: uncommitted merge [255] $ hg status - M f1 $ hg tag --rev 1 t2 abort: uncommitted merge [255] $ hg tag --rev 1 --local t3 $ hg tags -v - tip 1:9466ada9ee90 - t3 1:9466ada9ee90 local + tip 2:8a8f787d0d5c + t3 1:c3adabd1a5f4 local $ cd ..
--- a/tests/test-tags.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-tags.t Wed Feb 16 14:13:22 2011 -0600 @@ -1,7 +1,7 @@ Helper functions: $ cacheexists() { - > [ -f .hg/tags.cache ] && echo "tag cache exists" || echo "no tag cache" + > [ -f .hg/cache/tags ] && echo "tag cache exists" || echo "no tag cache" > } $ dumptags() { @@ -36,9 +36,9 @@ Try corrupting the cache - $ printf 'a b' > .hg/tags.cache + $ printf 'a b' > .hg/cache/tags $ hg identify - .hg/tags.cache is corrupt, rebuilding it + .hg/cache/tags is corrupt, rebuilding it acb14030fe0a tip $ cacheexists tag cache exists @@ -69,13 +69,13 @@ Repeat with cold tag cache: - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg identify b9154636be93 tip And again, but now unable to write tag cache: - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ chmod 555 .hg $ hg identify b9154636be93 tip @@ -216,7 +216,7 @@ Dump cache: - $ cat .hg/tags.cache + $ cat .hg/cache/tags 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d 3 6fa450212aeb2a21ed616a54aea39a4a27894cd7 7d3b718c964ef37b89e550ebdafd5789e76ce1b0 2 7a94127795a33c10a370c93f731fd9fea0b79af6 0c04f2a8af31de17fab7422878ee5a2dadbc943d @@ -325,7 +325,7 @@ $ hg tags # partly stale tip 4:735c3ca72986 bar 0:bbd179dfa0a7 - $ rm -f .hg/tags.cache + $ rm -f .hg/cache/tags $ hg tags # cold cache tip 4:735c3ca72986 bar 0:bbd179dfa0a7
--- a/tests/test-win32text.t Sat Feb 12 16:08:41 2011 +0800 +++ b/tests/test-win32text.t Wed Feb 16 14:13:22 2011 -0600 @@ -9,10 +9,6 @@ > data = data.replace('\n', '\r\n') > file(path, 'wb').write(data) > EOF - $ cat > print.py <<EOF - > import sys - > print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>')) - > EOF $ echo '[hooks]' >> .hg/hgrc $ echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc $ echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc @@ -369,12 +365,13 @@ $ python -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")' $ hg add f3 f4.bat $ hg ci -m 6 - $ python print.py < bin - hello<NUL><CR><LF> - $ python print.py < f3 - some<LF>text<LF> - $ python print.py < f4.bat - rem empty<CR><LF> + $ cat bin + hello\x00\r (esc) + $ cat f3 + some + text + $ cat f4.bat + rem empty\r (esc) $ echo $ echo '[extensions]' >> .hg/hgrc @@ -405,38 +402,39 @@ tip $ rm f3 f4.bat bin - $ hg co -C 2>&1 | python -c 'import sys, os; sys.stdout.write(sys.stdin.read().replace(os.getcwd(), "...."))' + $ hg co -C WARNING: f4.bat already has CRLF line endings and does not need EOL conversion by the win32text plugin. Before your next commit, please reconsider your encode/decode settings in - Mercurial.ini or ..../.hg/hgrc. + Mercurial.ini or $TESTTMP/t/.hg/hgrc. 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ python print.py < bin - hello<NUL><CR><LF> - $ python print.py < f3 - some<CR><LF>text<CR><LF> - $ python print.py < f4.bat - rem empty<CR><LF> + $ cat bin + hello\x00\r (esc) + $ cat f3 + some\r (esc) + text\r (esc) + $ cat f4.bat + rem empty\r (esc) $ echo $ python -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")' $ hg add f5.sh $ hg ci -m 7 - $ python print.py < f5.sh - # empty<CR><LF> - $ hg cat f5.sh | python print.py - # empty<LF> + $ cat f5.sh + # empty\r (esc) + $ hg cat f5.sh + # empty $ echo '% just linefeed' > linefeed $ hg ci -qAm 8 linefeed - $ python print.py < linefeed - % just linefeed<LF> - $ hg cat linefeed | python print.py - % just linefeed<LF> + $ cat linefeed + % just linefeed + $ hg cat linefeed + % just linefeed $ hg st -q $ hg revert -a linefeed no changes needed to linefeed - $ python print.py < linefeed - % just linefeed<LF> + $ cat linefeed + % just linefeed $ hg st -q $ echo modified >> linefeed $ hg st -q @@ -444,5 +442,5 @@ $ hg revert -a reverting linefeed $ hg st -q - $ python print.py < linefeed - % just linefeed<CR><LF> + $ cat linefeed + % just linefeed\r (esc)