--- a/Makefile Wed Feb 19 22:19:45 2014 +0900
+++ b/Makefile Wed Feb 19 16:46:47 2014 -0600
@@ -102,7 +102,7 @@
update-pot: i18n/hg.pot
-i18n/hg.pot: $(PYFILES) $(DOCFILES)
+i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
$(PYTHON) i18n/hggettext mercurial/commands.py \
hgext/*.py hgext/*/__init__.py \
mercurial/fileset.py mercurial/revset.py \
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/Makefile.python Wed Feb 19 16:46:47 2014 -0600
@@ -0,0 +1,79 @@
+PYTHONVER=2.7.6
+PYTHONNAME=python-
+PREFIX=$(HOME)/bin/prefix-$(PYTHONNAME)$(PYTHONVER)
+SYMLINKDIR=$(HOME)/bin
+
+help:
+ @echo
+ @echo 'Make a custom installation of a Python version'
+ @echo
+ @echo 'Common make parameters:'
+ @echo ' PYTHONVER=... [$(PYTHONVER)]'
+ @echo ' PREFIX=... [$(PREFIX)]'
+ @echo ' SYMLINKDIR=... [$(SYMLINKDIR) creating $(PYTHONNAME)$(PYTHONVER)]'
+ @echo
+ @echo 'Common make targets:'
+ @echo ' python - install Python $$PYTHONVER in $$PREFIX'
+ @echo ' symlink - create a $$SYMLINKDIR/$(PYTHONNAME)$$PYTHONVER symlink'
+ @echo
+ @echo 'Example: create a temporary Python installation:'
+ @echo ' $$ make -f Makefile.python python PYTHONVER=2.4 PREFIX=/tmp/p24'
+ @echo ' $$ /tmp/p24/bin/python -V'
+ @echo ' Python 2.4'
+ @echo
+ @echo 'Some external libraries are required for building Python: zlib bzip2 openssl.'
+ @echo 'Make sure their development packages are installed systemwide.'
+# fedora: yum install zlib-devel bzip2-devel openssl-devel
+# debian: apt-get install zlib1g-dev libbz2-dev libssl-dev
+ @echo
+ @echo 'To build a nice collection of interesting Python versions:'
+ @echo ' $$ for v in 2.{4{,.2,.3},5{,.6},6{,.1,.2,.9},7{,.6}}; do'
+ @echo ' make -f Makefile.python symlink PYTHONVER=$$v || break; done'
+ @echo 'To run a Mercurial test on all these Python versions:'
+ @echo ' $$ for py in `cd ~/bin && ls $(PYTHONNAME)2.*`; do'
+ @echo ' echo $$py; $$py run-tests.py test-http.t; echo; done'
+ @echo
+
+export LANGUAGE=C
+export LC_ALL=C
+
+python: $(PREFIX)/bin/python docutils
+ printf 'import sys, zlib, bz2, docutils\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+
+PYTHON_SRCDIR=Python-$(PYTHONVER)
+PYTHON_SRCFILE=$(PYTHON_SRCDIR).tgz
+
+$(PREFIX)/bin/python:
+ [ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ]
+ rm -rf $(PYTHON_SRCDIR)
+ tar xf $(PYTHON_SRCFILE)
+ # Ubuntu disables SSLv2 the hard way, disable it on old Pythons too
+ -sed -i 's,self.*SSLv2_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c
+ # Find multiarch system libraries on Ubuntu with Python 2.4.x
+ # http://lipyrary.blogspot.dk/2011/05/how-to-compile-python-on-ubuntu-1104.html
+ -sed -i "s|lib_dirs = .* \[|\0'/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`',|g" $(PYTHON_SRCDIR)/setup.py
+ # Find multiarch system libraries on Ubuntu and disable fortify error when setting argv
+ LDFLAGS="-L/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`"; \
+ BASECFLAGS=-U_FORTIFY_SOURCE; \
+ export LDFLAGS BASECFLAGS; \
+ cd $(PYTHON_SRCDIR) && ./configure --prefix=$(PREFIX) && make all SVNVERSION=pwd && make install
+ printf 'import sys, zlib, bz2\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+ rm -rf $(PYTHON_SRCDIR)
+
+DOCUTILSVER=0.11
+DOCUTILS_SRCDIR=docutils-$(DOCUTILSVER)
+DOCUTILS_SRCFILE=$(DOCUTILS_SRCDIR).tar.gz
+
+docutils: $(PREFIX)/bin/python
+ @$(PREFIX)/bin/python -c 'import docutils' || ( set -ex; \
+ [ -f $(DOCUTILS_SRCFILE) ] || wget http://downloads.sourceforge.net/project/docutils/docutils/$(DOCUTILSVER)/$(DOCUTILS_SRCFILE) || [ -f $(DOCUTILS_SRCFILE) ]; \
+ rm -rf $(DOCUTILS_SRCDIR); \
+ tar xf $(DOCUTILS_SRCFILE); \
+ cd $(DOCUTILS_SRCDIR) && $(PREFIX)/bin/python setup.py install --prefix=$(PREFIX); \
+ $(PREFIX)/bin/python -c 'import docutils'; \
+ rm -rf $(DOCUTILS_SRCDIR); )
+
+symlink: python $(SYMLINKDIR)
+ ln -sf $(PREFIX)/bin/python $(SYMLINKDIR)/$(PYTHONNAME)$(PYTHONVER)
+
+.PHONY: help python docutils symlink
--- a/contrib/check-code.py Wed Feb 19 22:19:45 2014 +0900
+++ b/contrib/check-code.py Wed Feb 19 16:46:47 2014 -0600
@@ -150,6 +150,9 @@
"explicit exit code checks unnecessary"),
(uprefix + r'set -e', "don't use set -e"),
(uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
+ (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
+ "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
+ 'hg pull -q file:../test'), # in test-pull.t which is skipped on windows
(r'^ saved backup bundle to \$TESTTMP.*\.hg$', winglobmsg),
(r'^ changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
winglobmsg),
@@ -162,6 +165,8 @@
(r'^ moving \S+/.*[^)]$', winglobmsg),
(r'^ no changes made to subrepo since.*/.*[^)]$', winglobmsg),
(r'^ .*: largefile \S+ not available from file:.*/.*[^)]$', winglobmsg),
+ (r'^ .*file://\$TESTTMP',
+ 'write "file:/*/$TESTTMP" + (glob) to match on windows too'),
],
# warnings
[
@@ -306,6 +311,7 @@
txtpats = [
[
('\s$', 'trailing whitespace'),
+ ('.. note::[ \n][^\n]', 'add two newlines after note::')
],
[]
]
--- a/contrib/hgfixes/fix_bytesmod.py Wed Feb 19 22:19:45 2014 +0900
+++ b/contrib/hgfixes/fix_bytesmod.py Wed Feb 19 16:46:47 2014 -0600
@@ -33,9 +33,10 @@
'''
def transform(self, node, results):
- if self.filename in blacklist:
- return
- elif self.filename == 'mercurial/util.py':
+ for bfn in blacklist:
+ if self.filename.endswith(bfn):
+ return
+ if not self.filename.endswith('mercurial/py3kcompat.py'):
touch_import('.', 'py3kcompat', node=node)
formatstr = results['formatstr'].clone()
@@ -60,4 +61,3 @@
call = Call(Name('bytesformatter', prefix=' '), args)
return call
-
--- a/contrib/import-checker.py Wed Feb 19 22:19:45 2014 +0900
+++ b/contrib/import-checker.py Wed Feb 19 16:46:47 2014 -0600
@@ -11,12 +11,15 @@
def dotted_name_of_path(path):
"""Given a relative path to a source file, return its dotted module name.
-
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
+ >>> dotted_name_of_path('zlibmodule.so')
+ 'zlib'
"""
parts = path.split('/')
- parts[-1] = parts[-1][:-3] # remove .py
+ parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
+ if parts[-1].endswith('module'):
+ parts[-1] = parts[-1][:-6]
return '.'.join(parts)
@@ -136,7 +139,7 @@
http://bugs.python.org/issue19510.
>>> list(verify_stdlib_on_own_line('import sys, foo'))
- ['mixed stdlib and relative imports:\\n foo, sys']
+ ['mixed imports\\n stdlib: sys\\n relative: foo']
>>> list(verify_stdlib_on_own_line('import sys, os'))
[]
>>> list(verify_stdlib_on_own_line('import foo, bar'))
@@ -144,13 +147,13 @@
"""
for node in ast.walk(ast.parse(source)):
if isinstance(node, ast.Import):
- from_stdlib = {}
+ from_stdlib = {False: [], True: []}
for n in node.names:
- from_stdlib[n.name] = n.name in stdlib_modules
- num_std = len([x for x in from_stdlib.values() if x])
- if num_std not in (len(from_stdlib.values()), 0):
- yield ('mixed stdlib and relative imports:\n %s' %
- ', '.join(sorted(from_stdlib.iterkeys())))
+ from_stdlib[n.name in stdlib_modules].append(n.name)
+ if from_stdlib[True] and from_stdlib[False]:
+ yield ('mixed imports\n stdlib: %s\n relative: %s' %
+ (', '.join(sorted(from_stdlib[True])),
+ ', '.join(sorted(from_stdlib[False]))))
class CircularImport(Exception):
pass
--- a/hgext/color.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/color.py Wed Feb 19 16:46:47 2014 -0600
@@ -426,6 +426,16 @@
_("when to colorize (boolean, always, auto, or never)"),
_('TYPE')))
+def debugcolor(ui, repo, **opts):
+ global _styles
+ _styles = {}
+ for effect in _effects.keys():
+ _styles[effect] = effect
+ ui.write(('colormode: %s\n') % ui._colormode)
+ ui.write(_('available colors:\n'))
+ for label, colors in _styles.items():
+ ui.write(('%s\n') % colors, label=label)
+
if os.name != 'nt':
w32effects = None
else:
@@ -555,3 +565,8 @@
finally:
# Explicitly reset original attributes
_kernel32.SetConsoleTextAttribute(stdout, origattr)
+
+cmdtable = {
+ 'debugcolor':
+ (debugcolor, [], ('hg debugcolor'))
+}
--- a/hgext/convert/__init__.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/__init__.py Wed Feb 19 16:46:47 2014 -0600
@@ -141,6 +141,14 @@
branch names. This can be used to (for instance) move code in one
repository from "default" to a named branch.
+ The closemap is a file that allows closing of a branch. This is useful if
+ you want to close a branch. Each entry contains a revision or hash
+ separated by white space.
+
+ The tagpmap is a file that exactly analogous to the branchmap. This will
+ rename tags on the fly and prevent the 'update tags' commit usually found
+ at the end of a convert process.
+
Mercurial Source
################
@@ -319,6 +327,10 @@
_('splice synthesized history into place'), _('FILE')),
('', 'branchmap', '',
_('change branch names while converting'), _('FILE')),
+ ('', 'closemap', '',
+ _('closes given revs'), _('FILE')),
+ ('', 'tagmap', '',
+ _('change tag names while converting'), _('FILE')),
('', 'branchsort', None, _('try to sort changesets by branches')),
('', 'datesort', None, _('try to sort changesets by date')),
('', 'sourcesort', None, _('preserve source changesets order')),
--- a/hgext/convert/common.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/common.py Wed Feb 19 16:46:47 2014 -0600
@@ -63,13 +63,13 @@
self.encoding = 'utf-8'
- def checkhexformat(self, revstr):
+ def checkhexformat(self, revstr, mapname='splicemap'):
""" fails if revstr is not a 40 byte hex. mercurial and git both uses
such format for their revision numbering
"""
if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
- raise util.Abort(_('splicemap entry %s is not a valid revision'
- ' identifier') % revstr)
+ raise util.Abort(_('%s entry %s is not a valid revision'
+ ' identifier') % (mapname, revstr))
def before(self):
pass
@@ -172,7 +172,7 @@
"""
return {}
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
"""revstr is a string that describes a revision in the given
source control system. Return true if revstr has correct
format.
@@ -192,10 +192,6 @@
self.path = path
self.created = []
- def getheads(self):
- """Return a list of this repository's heads"""
- raise NotImplementedError
-
def revmapfile(self):
"""Path to a file that will contain lines
source_rev_id sink_rev_id
@@ -208,7 +204,8 @@
mapping equivalent authors identifiers for each system."""
return None
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
"""Create a revision with all changed files listed in 'files'
and having listed parents. 'commit' is a commit object
containing at a minimum the author, date, and message for this
--- a/hgext/convert/convcmd.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/convcmd.py Wed Feb 19 16:46:47 2014 -0600
@@ -120,6 +120,43 @@
self.splicemap = self.parsesplicemap(opts.get('splicemap'))
self.branchmap = mapfile(ui, opts.get('branchmap'))
+ self.closemap = self.parseclosemap(opts.get('closemap'))
+ self.tagmap = mapfile(ui, opts.get('tagmap'))
+
+ def parseclosemap(self, path):
+ """ check and validate the closemap format and
+ return a list of revs to close.
+ Format checking has two parts.
+ 1. generic format which is same across all source types
+ 2. specific format checking which may be different for
+ different source type. This logic is implemented in
+ checkrevformat function in source files like
+ hg.py, subversion.py etc.
+ """
+
+ if not path:
+ return []
+ m = []
+ try:
+ fp = open(path, 'r')
+ for i, line in enumerate(fp):
+ line = line.splitlines()[0].rstrip()
+ if not line:
+ # Ignore blank lines
+ continue
+ # split line
+ lex = shlex.shlex(line, posix=True)
+ lex.whitespace_split = True
+ lex.whitespace += ','
+ line = list(lex)
+ for part in line:
+ self.source.checkrevformat(part, 'closemap')
+ m.extend(line)
+ # if file does not exist or error reading, exit
+ except IOError:
+ raise util.Abort(_('closemap file not found or error reading %s:')
+ % path)
+ return m
def parsesplicemap(self, path):
""" check and validate the splicemap format and
@@ -408,8 +445,11 @@
except KeyError:
parents = [b[0] for b in pbranches]
source = progresssource(self.ui, self.source, len(files))
+ if self.closemap and rev in self.closemap:
+ commit.extra['close'] = 1
+
newnode = self.dest.putcommit(files, copies, parents, commit,
- source, self.map)
+ source, self.map, self.tagmap)
source.close()
self.source.converted(rev, newnode)
self.map[rev] = newnode
@@ -445,6 +485,9 @@
self.ui.progress(_('converting'), None)
tags = self.source.gettags()
+ tags = dict((self.tagmap.get(k, k), v)
+ for k, v in tags.iteritems())
+
ctags = {}
for k in tags:
v = tags[k]
--- a/hgext/convert/git.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/git.py Wed Feb 19 16:46:47 2014 -0600
@@ -297,7 +297,7 @@
return bookmarks
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" git revision string is a 40 byte hex """
- self.checkhexformat(revstr)
+ self.checkhexformat(revstr, mapname)
--- a/hgext/convert/hg.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/hg.py Wed Feb 19 16:46:47 2014 -0600
@@ -25,6 +25,9 @@
from common import NoRepo, commit, converter_source, converter_sink
+import re
+sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
+
class mercurial_sink(converter_sink):
def __init__(self, ui, path):
converter_sink.__init__(self, ui, path)
@@ -75,10 +78,6 @@
def authorfile(self):
return self.repo.join("authormap")
- def getheads(self):
- h = self.repo.changelog.heads()
- return [hex(x) for x in h]
-
def setbranch(self, branch, pbranches):
if not self.clonebranches:
return
@@ -117,7 +116,7 @@
self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
self.before()
- def _rewritetags(self, source, revmap, data):
+ def _rewritetags(self, source, revmap, tagmap, data):
fp = cStringIO.StringIO()
for line in data.splitlines():
s = line.split(' ', 1)
@@ -126,17 +125,18 @@
revid = revmap.get(source.lookuprev(s[0]))
if not revid:
continue
- fp.write('%s %s\n' % (revid, s[1]))
+ fp.write('%s %s\n' % (revid, tagmap.get(s[1], s[1])))
return fp.getvalue()
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
files = dict(files)
def getfilectx(repo, memctx, f):
v = files[f]
data, mode = source.getfile(f, v)
if f == '.hgtags':
- data = self._rewritetags(source, revmap, data)
+ data = self._rewritetags(source, revmap, tagmap, data)
return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
copies.get(f))
@@ -157,6 +157,14 @@
p2 = parents.pop(0)
text = commit.desc
+
+ sha1s = re.findall(sha1re, text)
+ for sha1 in sha1s:
+ oldrev = source.lookuprev(sha1)
+ newrev = revmap.get(oldrev)
+ if newrev is not None:
+ text = text.replace(sha1, newrev[:len(sha1)])
+
extra = commit.extra.copy()
if self.branchnames and commit.branch:
extra['branch'] = commit.branch
@@ -190,14 +198,36 @@
parentctx = None
tagparent = nullid
- try:
- oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
- except Exception:
- oldlines = []
+ oldlines = set()
+ for branch, heads in self.repo.branchmap().iteritems():
+ for h in heads:
+ if '.hgtags' in self.repo[h]:
+ oldlines.update(
+ set(self.repo[h]['.hgtags'].data().splitlines(True)))
+ oldlines = sorted(list(oldlines))
newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
if newlines == oldlines:
return None, None
+
+ # if the old and new tags match, then there is nothing to update
+ oldtags = set()
+ newtags = set()
+ for line in oldlines:
+ s = line.strip().split(' ', 1)
+ if len(s) != 2:
+ continue
+ oldtags.add(s[1])
+ for line in newlines:
+ s = line.strip().split(' ', 1)
+ if len(s) != 2:
+ continue
+ if s[1] not in oldtags:
+ newtags.add(s[1].strip())
+
+ if not newtags:
+ return None, None
+
data = "".join(newlines)
def getfilectx(repo, memctx, f):
return context.memfilectx(f, data, False, False, None)
@@ -412,6 +442,6 @@
def getbookmarks(self):
return bookmarks.listbookmarks(self.repo)
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" Mercurial, revision string is a 40 byte hex """
- self.checkhexformat(revstr)
+ self.checkhexformat(revstr, mapname)
--- a/hgext/convert/subversion.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/convert/subversion.py Wed Feb 19 16:46:47 2014 -0600
@@ -41,13 +41,30 @@
pass
def revsplit(rev):
- """Parse a revision string and return (uuid, path, revnum)."""
- url, revnum = rev.rsplit('@', 1)
- parts = url.split('/', 1)
+ """Parse a revision string and return (uuid, path, revnum).
+ >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
+ ... '/proj%20B/mytrunk/mytrunk@1')
+ ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
+ >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
+ ('', '', 1)
+ >>> revsplit('@7')
+ ('', '', 7)
+ >>> revsplit('7')
+ ('', '', 0)
+ >>> revsplit('bad')
+ ('', '', 0)
+ """
+ parts = rev.rsplit('@', 1)
+ revnum = 0
+ if len(parts) > 1:
+ revnum = int(parts[1])
+ parts = parts[0].split('/', 1)
+ uuid = ''
mod = ''
- if len(parts) > 1:
+ if len(parts) > 1 and parts[0].startswith('svn:'):
+ uuid = parts[0][4:]
mod = '/' + parts[1]
- return parts[0][4:], mod, int(revnum)
+ return uuid, mod, revnum
def quote(s):
# As of svn 1.7, many svn calls expect "canonical" paths. In
@@ -157,6 +174,30 @@
self._stdout.close()
self._stdout = None
+class directlogstream(list):
+ """Direct revision log iterator.
+ This can be used for debugging and development but it will probably leak
+ memory and is not suitable for real conversions."""
+ def __init__(self, url, paths, start, end, limit=0,
+ discover_changed_paths=True, strict_node_history=False):
+
+ def receiver(orig_paths, revnum, author, date, message, pool):
+ paths = {}
+ if orig_paths is not None:
+ for k, v in orig_paths.iteritems():
+ paths[k] = changedpath(v)
+ self.append((paths, revnum, author, date, message))
+
+ # Use an ra of our own so that our parent can consume
+ # our results without confusing the server.
+ t = transport.SvnRaTransport(url=url)
+ svn.ra.get_log(t.ra, paths, start, end, limit,
+ discover_changed_paths,
+ strict_node_history,
+ receiver)
+
+ def close(self):
+ pass
# Check to see if the given path is a local Subversion repo. Verify this by
# looking for several svn-specific files and directories in the given
@@ -454,13 +495,13 @@
del self.commits[rev]
return commit
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" fails if revision format does not match the correct format"""
if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
'{12,12}(.*)\@[0-9]+$',revstr):
- raise util.Abort(_('splicemap entry %s is not a valid revision'
- ' identifier') % revstr)
+ raise util.Abort(_('%s entry %s is not a valid revision'
+ ' identifier') % (mapname, revstr))
def gettags(self):
tags = {}
@@ -975,6 +1016,9 @@
relpaths.append(p.strip('/'))
args = [self.baseurl, relpaths, start, end, limit,
discover_changed_paths, strict_node_history]
+ # undocumented feature: debugsvnlog can be disabled
+ if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
+ return directlogstream(*args)
arg = encodeargs(args)
hgexe = util.hgexecutable()
cmd = '%s debugsvnlog' % util.shellquote(hgexe)
@@ -1183,7 +1227,8 @@
def revid(self, rev):
return u"svn:%s@%s" % (self.uuid, rev)
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
for parent in parents:
try:
return self.revid(self.childmap[parent])
--- a/hgext/histedit.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/histedit.py Wed Feb 19 16:46:47 2014 -0600
@@ -30,10 +30,12 @@
# Edit history between c561b4e977df and 7c2fd3b9020c
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
@@ -49,10 +51,12 @@
# Edit history between c561b4e977df and 7c2fd3b9020c
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
@@ -170,10 +174,12 @@
# i18n: command names and abbreviations must remain untranslated
editcomment = _("""# Edit history between %s and %s
#
+# Commits are listed from least to most recent
+#
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
-# f, fold = use commit, but fold into previous commit (combines N and N-1)
+# f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/hgext/largefiles/overrides.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/largefiles/overrides.py Wed Feb 19 16:46:47 2014 -0600
@@ -12,7 +12,7 @@
import copy
from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
- node, archival, error, merge, discovery, pathutil
+ node, archival, error, merge, discovery, pathutil, revset
from mercurial.i18n import _
from mercurial.node import hex
from hgext import rebase
@@ -752,7 +752,7 @@
firstpulled = repo.firstpulled
except AttributeError:
raise util.Abort(_("pulled() only available in --lfrev"))
- return [r for r in subset if r >= firstpulled]
+ return revset.baseset([r for r in subset if r >= firstpulled])
def overrideclone(orig, ui, source, dest=None, **opts):
d = dest
--- a/hgext/mq.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/mq.py Wed Feb 19 16:46:47 2014 -0600
@@ -1986,9 +1986,11 @@
raise util.Abort(_('-e is incompatible with import from -'))
filename = normname(filename)
self.checkreservedname(filename)
- originpath = self.join(filename)
- if not os.path.isfile(originpath):
- raise util.Abort(_("patch %s does not exist") % filename)
+ if util.url(filename).islocal():
+ originpath = self.join(filename)
+ if not os.path.isfile(originpath):
+ raise util.Abort(
+ _("patch %s does not exist") % filename)
if patchname:
self.checkpatchname(patchname, force)
@@ -3407,7 +3409,7 @@
"""
revset.getargs(x, 0, 0, _("mq takes no arguments"))
applied = set([repo[r.node].rev() for r in repo.mq.applied])
- return [r for r in subset if r in applied]
+ return revset.baseset([r for r in subset if r in applied])
# tell hggettext to extract docstrings from these functions:
i18nfunctions = [revsetmq]
--- a/hgext/shelve.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/shelve.py Wed Feb 19 16:46:47 2014 -0600
@@ -22,8 +22,8 @@
"""
from mercurial.i18n import _
-from mercurial.node import nullid, bin, hex
-from mercurial import changegroup, cmdutil, scmutil, phases
+from mercurial.node import nullid, nullrev, bin, hex
+from mercurial import changegroup, cmdutil, scmutil, phases, commands
from mercurial import error, hg, mdiff, merge, patch, repair, util
from mercurial import templatefilters
from mercurial import lock as lockmod
@@ -122,22 +122,21 @@
"""subcommand that creates a new shelve"""
def publicancestors(ctx):
- """Compute the heads of the public ancestors of a commit.
+ """Compute the public ancestors of a commit.
- Much faster than the revset heads(ancestors(ctx) - draft())"""
- seen = set()
+ Much faster than the revset ancestors(ctx) & draft()"""
+ seen = set([nullrev])
visit = util.deque()
visit.append(ctx)
while visit:
ctx = visit.popleft()
+ yield ctx.node()
for parent in ctx.parents():
rev = parent.rev()
if rev not in seen:
seen.add(rev)
if parent.mutable():
visit.append(parent)
- else:
- yield parent.node()
wctx = repo[None]
parents = wctx.parents()
@@ -173,9 +172,9 @@
repo.mq.checkapplied = saved
if parent.node() != nullid:
- desc = parent.description().split('\n', 1)[0]
+ desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
else:
- desc = '(empty repository)'
+ desc = '(changes in empty repository)'
if not opts['message']:
opts['message'] = desc
@@ -518,6 +517,7 @@
if not shelvedfile(repo, basename, 'files').exists():
raise util.Abort(_("shelved change '%s' not found") % basename)
+ oldquiet = ui.quiet
wlock = lock = tr = None
try:
lock = repo.lock()
@@ -537,6 +537,8 @@
# Store pending changes in a commit
m, a, r, d = repo.status()[:4]
if m or a or r or d:
+ ui.status(_("temporarily committing pending changes "
+ "(restore with 'hg unshelve --abort')\n"))
def commitfunc(ui, repo, message, match, opts):
hasmq = util.safehasattr(repo, 'mq')
if hasmq:
@@ -551,15 +553,12 @@
tempopts = {}
tempopts['message'] = "pending changes temporary commit"
- oldquiet = ui.quiet
- try:
- ui.quiet = True
- node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
- finally:
- ui.quiet = oldquiet
+ ui.quiet = True
+ node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
tmpwctx = repo[node]
try:
+ ui.quiet = True
fp = shelvedfile(repo, basename, 'hg').opener()
gen = changegroup.readbundle(fp, fp.name)
repo.addchangegroup(gen, 'unshelve', 'bundle:' + fp.name)
@@ -568,11 +567,14 @@
finally:
fp.close()
+ ui.quiet = oldquiet
+
shelvectx = repo['tip']
# If the shelve is not immediately on top of the commit
# we'll be merging with, rebase it to be on top.
if tmpwctx.node() != shelvectx.parents()[0].node():
+ ui.status(_('rebasing shelved changes\n'))
try:
rebase.rebase(ui, repo, **{
'rev' : [shelvectx.rev()],
@@ -610,6 +612,7 @@
unshelvecleanup(ui, repo, basename, opts)
finally:
+ ui.quiet = oldquiet
if tr:
tr.release()
lockmod.release(lock, wlock)
@@ -632,8 +635,8 @@
('p', 'patch', None,
_('show patch')),
('', 'stat', None,
- _('output diffstat-style summary of changes'))],
- _('hg shelve'))
+ _('output diffstat-style summary of changes'))] + commands.walkopts,
+ _('hg shelve [OPTION]... [FILE]...'))
def shelvecmd(ui, repo, *pats, **opts):
'''save and set aside changes from the working directory
--- a/hgext/transplant.py Wed Feb 19 22:19:45 2014 +0900
+++ b/hgext/transplant.py Wed Feb 19 16:46:47 2014 -0600
@@ -670,7 +670,8 @@
s = revset.getset(repo, subset, x)
else:
s = subset
- return [r for r in s if repo[r].extra().get('transplant_source')]
+ return revset.baseset([r for r in s if
+ repo[r].extra().get('transplant_source')])
def kwtransplanted(repo, ctx, **args):
""":transplanted: String. The node identifier of the transplanted
--- a/i18n/check-translation.py Wed Feb 19 22:19:45 2014 +0900
+++ b/i18n/check-translation.py Wed Feb 19 16:46:47 2014 -0600
@@ -66,6 +66,46 @@
def warningchecker(msgidpat=None):
return checker('warning', msgidpat)
+@warningchecker()
+def taildoublecolons(pe):
+ """Check equality of tail '::'-ness between msgid and msgstr
+
+ >>> pe = polib.POEntry(
+ ... msgid ='ends with ::',
+ ... msgstr='ends with ::')
+ >>> for e in taildoublecolons(pe): print e
+ >>> pe = polib.POEntry(
+ ... msgid ='ends with ::',
+ ... msgstr='ends without double-colons')
+ >>> for e in taildoublecolons(pe): print e
+ tail '::'-ness differs between msgid and msgstr
+ >>> pe = polib.POEntry(
+ ... msgid ='ends without double-colons',
+ ... msgstr='ends with ::')
+ >>> for e in taildoublecolons(pe): print e
+ tail '::'-ness differs between msgid and msgstr
+ """
+ if pe.msgid.endswith('::') != pe.msgstr.endswith('::'):
+ yield "tail '::'-ness differs between msgid and msgstr"
+
+@warningchecker()
+def indentation(pe):
+ """Check equality of initial indentation between msgid and msgstr
+
+ This may report unexpected warning, because this doesn't aware
+ the syntax of rst document and the context of msgstr.
+
+ >>> pe = polib.POEntry(
+ ... msgid =' indented text',
+ ... msgstr=' narrowed indentation')
+ >>> for e in indentation(pe): print e
+ initial indentation width differs betweeen msgid and msgstr
+ """
+ idindent = len(pe.msgid) - len(pe.msgid.lstrip())
+ strindent = len(pe.msgstr) - len(pe.msgstr.lstrip())
+ if idindent != strindent:
+ yield "initial indentation width differs betweeen msgid and msgstr"
+
####################
def check(pofile, fatal=True, warning=False):
--- a/i18n/de.po Wed Feb 19 22:19:45 2014 +0900
+++ b/i18n/de.po Wed Feb 19 16:46:47 2014 -0600
@@ -20,7 +20,7 @@
msgstr ""
"Project-Id-Version: Mercurial\n"
"Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n"
-"POT-Creation-Date: 2014-01-25 17:51+0100\n"
+"POT-Creation-Date: 2014-01-29 16:47+0100\n"
"PO-Revision-Date: 2013-09-30 20:52+0100\n"
"Last-Translator: Simon Heimberg <simohe@besonet.ch>\n"
"Language-Team: \n"
@@ -2928,6 +2928,7 @@
" [repository]\n"
" native = LF"
+#. do not translate: .. note::
msgid ".. note::"
msgstr ""
@@ -5029,6 +5030,7 @@
" Siehe Hilfe zu 'paths' zu Pfad-Kurznamen und 'urls' für erlaubte\n"
" Formate für die Quellangabe."
+#. do not translate: .. container::
msgid " .. container:: verbose"
msgstr ""
@@ -6548,6 +6550,7 @@
" Ohne Argumente werden die aktuell aktiven Wächter ausgegeben.\n"
" Mit einem Argument wird der aktuelle Wächter gesetzt."
+#. do not translate: .. note::
msgid " .. note::"
msgstr ""
@@ -15694,6 +15697,7 @@
" order until one or more configuration files are detected."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: The registry key ``HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node"
"\\Mercurial``\n"
@@ -15873,6 +15877,7 @@
msgid " stable5 = latest -b stable"
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: It is possible to create aliases with the same names as\n"
" existing commands, which will then override the original\n"
@@ -15918,6 +15923,7 @@
"echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: Some global configuration options such as ``-R`` are\n"
" processed before shell aliases and will thus not be passed to\n"
@@ -16101,6 +16107,7 @@
"the command."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: The tempfile mechanism is recommended for Windows systems,\n"
" where the standard shell I/O redirection operators often have\n"
@@ -16572,6 +16579,7 @@
" update failed (e.g. because conflicts not resolved), ``$HG_ERROR=1``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: It is generally better to use standard hooks rather than the\n"
" generic pre- and post- command hooks as they are guaranteed to be\n"
@@ -16580,6 +16588,7 @@
" generate a commit (e.g. tag) and not just the commit command."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: Environment variables with empty values may not be passed to\n"
" hooks on platforms such as Windows. As an example, ``$HG_PARENT2``\n"
@@ -18967,6 +18976,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. contents::
msgid ""
".. contents::\n"
" :backlinks: top\n"
@@ -19017,6 +19027,7 @@
" repository."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: hg.1.gendoc.txt"
msgstr ""
@@ -19121,6 +19132,7 @@
"Public License version 2 or any later version."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: common.txt\n"
msgstr ""
@@ -19143,6 +19155,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: hgignore.5.gendoc.txt"
msgstr ""
@@ -19170,6 +19183,7 @@
"Public License version 2 or any later version."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: common.txt"
msgstr ""
@@ -19281,6 +19295,7 @@
"regexp pattern, start it with ``^``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Patterns specified in other than ``.hgignore`` are always rooted.\n"
@@ -19333,6 +19348,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. contents::
msgid ""
".. contents::\n"
" :backlinks: top\n"
@@ -19348,6 +19364,7 @@
"Beschreibung\n"
"============"
+#. do not translate: .. include::
msgid ".. include:: hgrc.5.gendoc.txt"
msgstr ""
@@ -19564,6 +19581,7 @@
msgid "8. The merge of the file fails and must be resolved before commit."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" After selecting a merge program, Mercurial will by default attempt\n"
@@ -19633,6 +19651,7 @@
msgid "Alternate pattern notations must be specified explicitly."
msgstr "Andere Schreibweisen von Mustern müssen explizit angegeben werden."
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Patterns specified in ``.hgignore`` are not rooted.\n"
@@ -19804,6 +19823,7 @@
msgid " - secret changesets are neither pushed, pulled, or cloned"
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Pulling a draft changeset from a publishing server does not mark it\n"
@@ -19823,12 +19843,14 @@
" [phases]\n"
" publish = False"
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Servers running older versions of Mercurial are treated as\n"
" publishing."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Changesets in secret phase are not exchanged with the server. This\n"
@@ -20216,6 +20238,7 @@
" repositories states when committing in the parent repository."
msgstr ""
+#. do not translate: .. note::
msgid ""
" .. note::\n"
" The ``.hgsubstate`` file should not be edited manually."
--- a/i18n/posplit Wed Feb 19 22:19:45 2014 +0900
+++ b/i18n/posplit Wed Feb 19 16:46:47 2014 -0600
@@ -5,6 +5,7 @@
# license: MIT/X11/Expat
#
+import re
import sys
import polib
@@ -30,6 +31,7 @@
cache = {}
entries = po[:]
po[:] = []
+ findd = re.compile(r' *\.\. (\w+)::') # for finding directives
for entry in entries:
msgids = entry.msgid.split(u'\n\n')
if entry.msgstr:
@@ -49,8 +51,27 @@
delta = 0
for msgid, msgstr in zip(msgids, msgstrs):
- if msgid:
+ if msgid and msgid != '::':
newentry = mkentry(entry, delta, msgid, msgstr)
+ mdirective = findd.match(msgid)
+ if mdirective:
+ if not msgid[mdirective.end():].rstrip():
+ # only directive, nothing to translate here
+ continue
+ directive = mdirective.group(1)
+ if directive in ('container', 'include'):
+ if msgid.rstrip('\n').count('\n') == 0:
+ # only rst syntax, nothing to translate
+ continue
+ else:
+ # lines following directly, unexpected
+ print 'Warning: text follows line with directive' \
+ ' %s' % directive
+ comment = 'do not translate: .. %s::' % directive
+ if not newentry.comment:
+ newentry.comment = comment
+ elif comment not in newentry.comment:
+ newentry.comment += '\n' + comment
addentry(po, newentry, cache)
delta += 2 + msgid.count('\n')
po.save()
--- a/mercurial/bookmarks.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/bookmarks.py Wed Feb 19 16:46:47 2014 -0600
@@ -363,22 +363,6 @@
writer(msg)
localmarks.write()
-def updateremote(ui, repo, remote, revs):
- ui.debug("checking for updated bookmarks\n")
- revnums = map(repo.changelog.rev, revs or [])
- ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
- (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
- ) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
- srchex=hex)
-
- for b, scid, dcid in advsrc:
- if ancestors and repo[scid].rev() not in ancestors:
- continue
- if remote.pushkey('bookmarks', b, dcid, scid):
- ui.status(_("updating bookmark %s\n") % b)
- else:
- ui.warn(_('updating bookmark %s failed!\n') % b)
-
def pushtoremote(ui, repo, remote, targets):
(addsrc, adddst, advsrc, advdst, diverge, differ, invalid
) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
--- a/mercurial/cmdutil.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/cmdutil.py Wed Feb 19 16:46:47 2014 -0600
@@ -10,7 +10,7 @@
import os, sys, errno, re, tempfile
import util, scmutil, templater, patch, error, templatekw, revlog, copies
import match as matchmod
-import subrepo, context, repair, graphmod, revset, phases, obsolete, pathutil
+import context, repair, graphmod, revset, phases, obsolete, pathutil
import changelog
import bookmarks
import lock as lockmod
@@ -542,6 +542,131 @@
if runfn:
return runfn()
+def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
+ """Utility function used by commands.import to import a single patch
+
+ This function is explicitly defined here to help the evolve extension to
+ wrap this part of the import logic.
+
+ The API is currently a bit ugly because it a simple code translation from
+ the import command. Feel free to make it better.
+
+ :hunk: a patch (as a binary string)
+ :parents: nodes that will be parent of the created commit
+ :opts: the full dict of option passed to the import command
+ :msgs: list to save commit message to.
+ (used in case we need to save it when failing)
+ :updatefunc: a function that update a repo to a given node
+ updatefunc(<repo>, <node>)
+ """
+ tmpname, message, user, date, branch, nodeid, p1, p2 = \
+ patch.extract(ui, hunk)
+
+ editor = commiteditor
+ if opts.get('edit'):
+ editor = commitforceeditor
+ update = not opts.get('bypass')
+ strip = opts["strip"]
+ sim = float(opts.get('similarity') or 0)
+ if not tmpname:
+ return (None, None)
+ msg = _('applied to working directory')
+
+ try:
+ cmdline_message = logmessage(ui, opts)
+ if cmdline_message:
+ # pickup the cmdline msg
+ message = cmdline_message
+ elif message:
+ # pickup the patch msg
+ message = message.strip()
+ else:
+ # launch the editor
+ message = None
+ ui.debug('message:\n%s\n' % message)
+
+ if len(parents) == 1:
+ parents.append(repo[nullid])
+ if opts.get('exact'):
+ if not nodeid or not p1:
+ raise util.Abort(_('not a Mercurial patch'))
+ p1 = repo[p1]
+ p2 = repo[p2 or nullid]
+ elif p2:
+ try:
+ p1 = repo[p1]
+ p2 = repo[p2]
+ # Without any options, consider p2 only if the
+ # patch is being applied on top of the recorded
+ # first parent.
+ if p1 != parents[0]:
+ p1 = parents[0]
+ p2 = repo[nullid]
+ except error.RepoError:
+ p1, p2 = parents
+ else:
+ p1, p2 = parents
+
+ n = None
+ if update:
+ if p1 != parents[0]:
+ updatefunc(repo, p1.node())
+ if p2 != parents[1]:
+ repo.setparents(p1.node(), p2.node())
+
+ if opts.get('exact') or opts.get('import_branch'):
+ repo.dirstate.setbranch(branch or 'default')
+
+ files = set()
+ patch.patch(ui, repo, tmpname, strip=strip, files=files,
+ eolmode=None, similarity=sim / 100.0)
+ files = list(files)
+ if opts.get('no_commit'):
+ if message:
+ msgs.append(message)
+ else:
+ if opts.get('exact') or p2:
+ # If you got here, you either use --force and know what
+ # you are doing or used --exact or a merge patch while
+ # being updated to its first parent.
+ m = None
+ else:
+ m = scmutil.matchfiles(repo, files or [])
+ n = repo.commit(message, opts.get('user') or user,
+ opts.get('date') or date, match=m,
+ editor=editor)
+ else:
+ if opts.get('exact') or opts.get('import_branch'):
+ branch = branch or 'default'
+ else:
+ branch = p1.branch()
+ store = patch.filestore()
+ try:
+ files = set()
+ try:
+ patch.patchrepo(ui, repo, p1, store, tmpname, strip,
+ files, eolmode=None)
+ except patch.PatchError, e:
+ raise util.Abort(str(e))
+ memctx = context.makememctx(repo, (p1.node(), p2.node()),
+ message,
+ opts.get('user') or user,
+ opts.get('date') or date,
+ branch, files, store,
+ editor=commiteditor)
+ repo.savecommitmessage(memctx.description())
+ n = memctx.commit()
+ finally:
+ store.close()
+ if opts.get('exact') and hex(n) != nodeid:
+ raise util.Abort(_('patch is damaged or loses information'))
+ if n:
+ # i18n: refers to a short changeset id
+ msg = _('created %s') % short(n)
+ return (msg, n)
+ finally:
+ os.unlink(tmpname)
+
def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
opts=None):
'''export changesets as hg patches.'''
@@ -629,7 +754,7 @@
if listsubrepos:
ctx1 = repo[node1]
ctx2 = repo[node2]
- for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
+ for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
tempnode2 = node2
try:
if node2 is not None:
@@ -974,6 +1099,19 @@
t.use_template(tmpl)
return t
+def showmarker(ui, marker):
+ """utility function to display obsolescence marker in a readable way
+
+ To be used by debug function."""
+ ui.write(hex(marker.precnode()))
+ for repl in marker.succnodes():
+ ui.write(' ')
+ ui.write(hex(repl))
+ ui.write(' %X ' % marker._data[2])
+ ui.write('{%s}' % (', '.join('%r: %r' % t for t in
+ sorted(marker.metadata().items()))))
+ ui.write('\n')
+
def finddate(ui, repo, date):
"""Find the tipmost changeset that matches the given date spec"""
@@ -1132,7 +1270,7 @@
elif follow:
revs = repo.revs('reverse(:.)')
else:
- revs = list(repo)
+ revs = revset.baseset(repo)
revs.reverse()
if not revs:
return []
@@ -1140,6 +1278,7 @@
slowpath = match.anypats() or (match.files() and opts.get('removed'))
fncache = {}
change = repo.changectx
+ revs = revset.baseset(revs)
# First step is to fill wanted, the set of revisions that we want to yield.
# When it does not induce extra cost, we also fill fncache for revisions in
@@ -1471,10 +1610,11 @@
if follow and len(repo) > 0:
revs = repo.revs('reverse(:.)')
else:
- revs = list(repo.changelog)
+ revs = revset.baseset(repo.changelog)
revs.reverse()
if not revs:
return [], None, None
+ revs = revset.baseset(revs)
expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
if possiblyunsorted:
revs.sort(reverse=True)
--- a/mercurial/commands.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/commands.py Wed Feb 19 16:46:47 2014 -0600
@@ -1943,7 +1943,7 @@
tree = fileset.parse(expr)[0]
ui.note(tree, "\n")
- for f in fileset.getfileset(ctx, expr):
+ for f in ctx.getfileset(expr):
ui.write("%s\n" % f)
@command('debugfsinfo', [], _('[PATH]'))
@@ -2107,10 +2107,21 @@
import templater
p = templater.templatepath()
ui.status(_("checking templates (%s)...\n") % ' '.join(p))
- try:
- templater.templater(templater.templatepath("map-cmdline.default"))
- except Exception, inst:
- ui.write(" %s\n" % inst)
+ if p:
+ m = templater.templatepath("map-cmdline.default")
+ if m:
+ # template found, check if it is working
+ try:
+ templater.templater(m)
+ except Exception, inst:
+ ui.write(" %s\n" % inst)
+ p = None
+ else:
+ ui.write(_(" template 'default' not found\n"))
+ p = None
+ else:
+ ui.write(_(" no template directories found\n"))
+ if not p:
ui.write(_(" (templates seem to have been installed incorrectly)\n"))
problems += 1
@@ -2216,14 +2227,7 @@
l.release()
else:
for m in obsolete.allmarkers(repo):
- ui.write(hex(m.precnode()))
- for repl in m.succnodes():
- ui.write(' ')
- ui.write(hex(repl))
- ui.write(' %X ' % m._data[2])
- ui.write('{%s}' % (', '.join('%r: %r' % t for t in
- sorted(m.metadata().items()))))
- ui.write('\n')
+ cmdutil.showmarker(ui, m)
@command('debugpathcomplete',
[('f', 'full', None, _('complete an entire path')),
@@ -2544,8 +2548,10 @@
ui.write(('deltas against other : ') + fmt % pcfmt(numother,
numdeltas))
-@command('debugrevspec', [], ('REVSPEC'))
-def debugrevspec(ui, repo, expr):
+@command('debugrevspec',
+ [('', 'optimize', None, _('print parsed tree after optimizing'))],
+ ('REVSPEC'))
+def debugrevspec(ui, repo, expr, **opts):
"""parse and apply a revision specification
Use --verbose to print the parsed tree before and after aliases
@@ -2557,8 +2563,11 @@
newtree = revset.findaliases(ui, tree)
if newtree != tree:
ui.note(revset.prettyformat(newtree), "\n")
+ if opts["optimize"]:
+ weight, optimizedtree = revset.optimize(newtree, True)
+ ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
func = revset.match(ui, expr)
- for c in func(repo, range(len(repo))):
+ for c in func(repo, revset.spanset(repo)):
ui.write("%s\n" % c)
@command('debugsetparents', [], _('REV1 [REV2]'))
@@ -3669,10 +3678,6 @@
if date:
opts['date'] = util.parsedate(date)
- editor = cmdutil.commiteditor
- if opts.get('edit'):
- editor = cmdutil.commitforceeditor
-
update = not opts.get('bypass')
if not update and opts.get('no_commit'):
raise util.Abort(_('cannot use --no-commit with --bypass'))
@@ -3691,112 +3696,9 @@
cmdutil.bailifchanged(repo)
base = opts["base"]
- strip = opts["strip"]
wlock = lock = tr = None
msgs = []
- def tryone(ui, hunk, parents):
- tmpname, message, user, date, branch, nodeid, p1, p2 = \
- patch.extract(ui, hunk)
-
- if not tmpname:
- return (None, None)
- msg = _('applied to working directory')
-
- try:
- cmdline_message = cmdutil.logmessage(ui, opts)
- if cmdline_message:
- # pickup the cmdline msg
- message = cmdline_message
- elif message:
- # pickup the patch msg
- message = message.strip()
- else:
- # launch the editor
- message = None
- ui.debug('message:\n%s\n' % message)
-
- if len(parents) == 1:
- parents.append(repo[nullid])
- if opts.get('exact'):
- if not nodeid or not p1:
- raise util.Abort(_('not a Mercurial patch'))
- p1 = repo[p1]
- p2 = repo[p2 or nullid]
- elif p2:
- try:
- p1 = repo[p1]
- p2 = repo[p2]
- # Without any options, consider p2 only if the
- # patch is being applied on top of the recorded
- # first parent.
- if p1 != parents[0]:
- p1 = parents[0]
- p2 = repo[nullid]
- except error.RepoError:
- p1, p2 = parents
- else:
- p1, p2 = parents
-
- n = None
- if update:
- if p1 != parents[0]:
- hg.clean(repo, p1.node())
- if p2 != parents[1]:
- repo.setparents(p1.node(), p2.node())
-
- if opts.get('exact') or opts.get('import_branch'):
- repo.dirstate.setbranch(branch or 'default')
-
- files = set()
- patch.patch(ui, repo, tmpname, strip=strip, files=files,
- eolmode=None, similarity=sim / 100.0)
- files = list(files)
- if opts.get('no_commit'):
- if message:
- msgs.append(message)
- else:
- if opts.get('exact') or p2:
- # If you got here, you either use --force and know what
- # you are doing or used --exact or a merge patch while
- # being updated to its first parent.
- m = None
- else:
- m = scmutil.matchfiles(repo, files or [])
- n = repo.commit(message, opts.get('user') or user,
- opts.get('date') or date, match=m,
- editor=editor)
- else:
- if opts.get('exact') or opts.get('import_branch'):
- branch = branch or 'default'
- else:
- branch = p1.branch()
- store = patch.filestore()
- try:
- files = set()
- try:
- patch.patchrepo(ui, repo, p1, store, tmpname, strip,
- files, eolmode=None)
- except patch.PatchError, e:
- raise util.Abort(str(e))
- memctx = context.makememctx(repo, (p1.node(), p2.node()),
- message,
- opts.get('user') or user,
- opts.get('date') or date,
- branch, files, store,
- editor=cmdutil.commiteditor)
- repo.savecommitmessage(memctx.description())
- n = memctx.commit()
- finally:
- store.close()
- if opts.get('exact') and hex(n) != nodeid:
- raise util.Abort(_('patch is damaged or loses information'))
- if n:
- # i18n: refers to a short changeset id
- msg = _('created %s') % short(n)
- return (msg, n)
- finally:
- os.unlink(tmpname)
try:
try:
@@ -3817,7 +3719,8 @@
haspatch = False
for hunk in patch.split(patchfile):
- (msg, node) = tryone(ui, hunk, parents)
+ (msg, node) = cmdutil.tryimportone(ui, repo, hunk, parents,
+ opts, msgs, hg.clean)
if msg:
haspatch = True
ui.note(msg + '\n')
--- a/mercurial/context.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/context.py Wed Feb 19 16:46:47 2014 -0600
@@ -12,6 +12,7 @@
import os, errno, stat
import obsolete as obsmod
import repoview
+import fileset
propertycache = util.propertycache
@@ -79,6 +80,9 @@
def mutable(self):
return self.phase() > phases.public
+ def getfileset(self, expr):
+ return fileset.getfileset(self, expr)
+
def obsolete(self):
"""True if the changeset is obsolete"""
return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
--- a/mercurial/discovery.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/discovery.py Wed Feb 19 16:46:47 2014 -0600
@@ -154,7 +154,7 @@
- branch: the branch name
- remoteheads: the list of remote heads known locally
- None is the branch is new
+ None if the branch is new
- newheads: the new remote heads (known locally) with outgoing pushed
- unsyncedheads: the list of remote heads unknown locally.
"""
@@ -250,8 +250,7 @@
hint=_("use 'hg push --new-branch' to create"
" new remote branches"))
- # 2 compute newly pushed bookmarks. We
- # we don't warned about bookmarked heads.
+ # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
localbookmarks = repo._bookmarks
remotebookmarks = remote.listkeys('bookmarks')
bookmarkedheads = set()
@@ -269,23 +268,23 @@
# If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
error = None
- unsynced = False
allmissing = set(outgoing.missing)
allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
allfuturecommon.update(allmissing)
for branch, heads in sorted(headssum.iteritems()):
- candidate_newhs = set(heads[1])
+ remoteheads, newheads, unsyncedheads = heads
+ candidate_newhs = set(newheads)
# add unsynced data
- if heads[0] is None:
+ if remoteheads is None:
oldhs = set()
else:
- oldhs = set(heads[0])
- oldhs.update(heads[2])
- candidate_newhs.update(heads[2])
- dhs = None
+ oldhs = set(remoteheads)
+ oldhs.update(unsyncedheads)
+ candidate_newhs.update(unsyncedheads)
+ dhs = None # delta heads, the new heads on branch
discardedheads = set()
if repo.obsstore:
- # remove future heads which are actually obsolete by another
+ # remove future heads which are actually obsoleted by another
# pushed element:
#
# XXX as above, There are several cases this case does not handle
@@ -297,8 +296,8 @@
# (2) if the new heads have ancestors which are not obsolete and
# not ancestors of any other heads we will have a new head too.
#
- # This two case will be easy to handle for know changeset but much
- # more tricky for unsynced changes.
+ # These two cases will be easy to handle for known changeset but
+ # much more tricky for unsynced changes.
newhs = set()
for nh in candidate_newhs:
if nh in repo and repo[nh].phase() <= phases.public:
@@ -312,10 +311,17 @@
newhs.add(nh)
else:
newhs = candidate_newhs
- if [h for h in heads[2] if h not in discardedheads]:
- unsynced = True
- if heads[0] is None:
- if 1 < len(newhs):
+ unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
+ if unsynced:
+ heads = ' '.join(short(h) for h in unsynced)
+ if branch is None:
+ repo.ui.status(_("remote has heads that are "
+ "not known locally: %s\n") % heads)
+ else:
+ repo.ui.status(_("remote has heads on branch '%s' that are "
+ "not known locally: %s\n") % (branch, heads))
+ if remoteheads is None:
+ if len(newhs) > 1:
dhs = list(newhs)
if error is None:
error = (_("push creates new branch '%s' "
@@ -324,7 +330,7 @@
" see \"hg help push\" for details about"
" pushing new heads")
elif len(newhs) > len(oldhs):
- # strip updates to existing remote heads from the new heads list
+ # remove bookmarked or existing remote heads from the new heads list
dhs = sorted(newhs - bookmarkedheads - oldhs)
if dhs:
if error is None:
@@ -334,7 +340,7 @@
else:
error = _("push creates new remote head %s!"
) % short(dhs[0])
- if heads[2]: # unsynced
+ if unsyncedheads:
hint = _("pull and merge or"
" see \"hg help push\" for details about"
" pushing new heads")
@@ -350,7 +356,3 @@
repo.ui.note((" %s\n") % short(h))
if error:
raise util.Abort(error, hint=hint)
-
- # 6. Check for unsynced changes on involved branches.
- if unsynced:
- repo.ui.warn(_("note: unsynced remote changes!\n"))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/exchange.py Wed Feb 19 16:46:47 2014 -0600
@@ -0,0 +1,530 @@
+# exchange.py - utily to exchange data between repo.
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from i18n import _
+from node import hex, nullid
+import errno
+import util, scmutil, changegroup, base85
+import discovery, phases, obsolete, bookmarks
+
+
+class pushoperation(object):
+ """A object that represent a single push operation
+
+ It purpose is to carry push related state and very common operation.
+
+ A new should be created at the begining of each push and discarded
+ afterward.
+ """
+
+ def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
+ # repo we push from
+ self.repo = repo
+ self.ui = repo.ui
+ # repo we push to
+ self.remote = remote
+ # force option provided
+ self.force = force
+ # revs to be pushed (None is "all")
+ self.revs = revs
+ # allow push of new branch
+ self.newbranch = newbranch
+ # did a local lock get acquired?
+ self.locallocked = None
+ # Integer version of the push result
+ # - None means nothing to push
+ # - 0 means HTTP error
+ # - 1 means we pushed and remote head count is unchanged *or*
+ # we have outgoing changesets but refused to push
+ # - other values as described by addchangegroup()
+ self.ret = None
+ # discover.outgoing object (contains common and outgoin data)
+ self.outgoing = None
+ # all remote heads before the push
+ self.remoteheads = None
+ # testable as a boolean indicating if any nodes are missing locally.
+ self.incoming = None
+ # set of all heads common after changeset bundle push
+ self.commonheads = None
+
+def push(repo, remote, force=False, revs=None, newbranch=False):
+ '''Push outgoing changesets (limited by revs) from a local
+ repository to remote. Return an integer:
+ - None means nothing to push
+ - 0 means HTTP error
+ - 1 means we pushed and remote head count is unchanged *or*
+ we have outgoing changesets but refused to push
+ - other values as described by addchangegroup()
+ '''
+ pushop = pushoperation(repo, remote, force, revs, newbranch)
+ if pushop.remote.local():
+ missing = (set(pushop.repo.requirements)
+ - pushop.remote.local().supported)
+ if missing:
+ msg = _("required features are not"
+ " supported in the destination:"
+ " %s") % (', '.join(sorted(missing)))
+ raise util.Abort(msg)
+
+ # there are two ways to push to remote repo:
+ #
+ # addchangegroup assumes local user can lock remote
+ # repo (local filesystem, old ssh servers).
+ #
+ # unbundle assumes local user cannot lock remote repo (new ssh
+ # servers, http servers).
+
+ if not pushop.remote.canpush():
+ raise util.Abort(_("destination does not support push"))
+ # get local lock as we might write phase data
+ locallock = None
+ try:
+ locallock = pushop.repo.lock()
+ pushop.locallocked = True
+ except IOError, err:
+ pushop.locallocked = False
+ if err.errno != errno.EACCES:
+ raise
+ # source repo cannot be locked.
+ # We do not abort the push, but just disable the local phase
+ # synchronisation.
+ msg = 'cannot lock source repository: %s\n' % err
+ pushop.ui.debug(msg)
+ try:
+ pushop.repo.checkpush(pushop.force, pushop.revs)
+ lock = None
+ unbundle = pushop.remote.capable('unbundle')
+ if not unbundle:
+ lock = pushop.remote.lock()
+ try:
+ _pushdiscovery(pushop)
+ if _pushcheckoutgoing(pushop):
+ _pushchangeset(pushop)
+ _pushcomputecommonheads(pushop)
+ _pushsyncphase(pushop)
+ _pushobsolete(pushop)
+ finally:
+ if lock is not None:
+ lock.release()
+ finally:
+ if locallock is not None:
+ locallock.release()
+
+ _pushbookmark(pushop)
+ return pushop.ret
+
+def _pushdiscovery(pushop):
+ # discovery
+ unfi = pushop.repo.unfiltered()
+ fci = discovery.findcommonincoming
+ commoninc = fci(unfi, pushop.remote, force=pushop.force)
+ common, inc, remoteheads = commoninc
+ fco = discovery.findcommonoutgoing
+ outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
+ commoninc=commoninc, force=pushop.force)
+ pushop.outgoing = outgoing
+ pushop.remoteheads = remoteheads
+ pushop.incoming = inc
+
+def _pushcheckoutgoing(pushop):
+ outgoing = pushop.outgoing
+ unfi = pushop.repo.unfiltered()
+ if not outgoing.missing:
+ # nothing to push
+ scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
+ return False
+ # something to push
+ if not pushop.force:
+ # if repo.obsstore == False --> no obsolete
+ # then, save the iteration
+ if unfi.obsstore:
+ # this message are here for 80 char limit reason
+ mso = _("push includes obsolete changeset: %s!")
+ mst = "push includes %s changeset: %s!"
+ # plain versions for i18n tool to detect them
+ _("push includes unstable changeset: %s!")
+ _("push includes bumped changeset: %s!")
+ _("push includes divergent changeset: %s!")
+ # If we are to push if there is at least one
+ # obsolete or unstable changeset in missing, at
+ # least one of the missinghead will be obsolete or
+ # unstable. So checking heads only is ok
+ for node in outgoing.missingheads:
+ ctx = unfi[node]
+ if ctx.obsolete():
+ raise util.Abort(mso % ctx)
+ elif ctx.troubled():
+ raise util.Abort(_(mst)
+ % (ctx.troubles()[0],
+ ctx))
+ newbm = pushop.ui.configlist('bookmarks', 'pushing')
+ discovery.checkheads(unfi, pushop.remote, outgoing,
+ pushop.remoteheads,
+ pushop.newbranch,
+ bool(pushop.incoming),
+ newbm)
+ return True
+
+def _pushchangeset(pushop):
+ """Make the actual push of changeset bundle to remote repo"""
+ outgoing = pushop.outgoing
+ unbundle = pushop.remote.capable('unbundle')
+ # TODO: get bundlecaps from remote
+ bundlecaps = None
+ # create a changegroup from local
+ if pushop.revs is None and not (outgoing.excluded
+ or pushop.repo.changelog.filteredrevs):
+ # push everything,
+ # use the fast path, no race possible on push
+ bundler = changegroup.bundle10(pushop.repo, bundlecaps)
+ cg = pushop.repo._changegroupsubset(outgoing,
+ bundler,
+ 'push',
+ fastpath=True)
+ else:
+ cg = pushop.repo.getlocalbundle('push', outgoing, bundlecaps)
+
+ # apply changegroup to remote
+ if unbundle:
+ # local repo finds heads on server, finds out what
+ # revs it must push. once revs transferred, if server
+ # finds it has different heads (someone else won
+ # commit/push race), server aborts.
+ if pushop.force:
+ remoteheads = ['force']
+ else:
+ remoteheads = pushop.remoteheads
+ # ssh: return remote's addchangegroup()
+ # http: return remote's addchangegroup() or 0 for error
+ pushop.ret = pushop.remote.unbundle(cg, remoteheads,
+ 'push')
+ else:
+ # we return an integer indicating remote head count
+ # change
+ pushop.ret = pushop.remote.addchangegroup(cg, 'push',
+ pushop.repo.url())
+
+def _pushcomputecommonheads(pushop):
+ unfi = pushop.repo.unfiltered()
+ if pushop.ret:
+ # push succeed, synchronize target of the push
+ cheads = pushop.outgoing.missingheads
+ elif pushop.revs is None:
+ # All out push fails. synchronize all common
+ cheads = pushop.outgoing.commonheads
+ else:
+ # I want cheads = heads(::missingheads and ::commonheads)
+ # (missingheads is revs with secret changeset filtered out)
+ #
+ # This can be expressed as:
+ # cheads = ( (missingheads and ::commonheads)
+ # + (commonheads and ::missingheads))"
+ # )
+ #
+ # while trying to push we already computed the following:
+ # common = (::commonheads)
+ # missing = ((commonheads::missingheads) - commonheads)
+ #
+ # We can pick:
+ # * missingheads part of common (::commonheads)
+ common = set(pushop.outgoing.common)
+ nm = pushop.repo.changelog.nodemap
+ cheads = [node for node in pushop.revs if nm[node] in common]
+ # and
+ # * commonheads parents on missing
+ revset = unfi.set('%ln and parents(roots(%ln))',
+ pushop.outgoing.commonheads,
+ pushop.outgoing.missing)
+ cheads.extend(c.node() for c in revset)
+ pushop.commonheads = cheads
+
+def _pushsyncphase(pushop):
+ """synchronise phase information locally and remotly"""
+ unfi = pushop.repo.unfiltered()
+ cheads = pushop.commonheads
+ if pushop.ret:
+ # push succeed, synchronize target of the push
+ cheads = pushop.outgoing.missingheads
+ elif pushop.revs is None:
+ # All out push fails. synchronize all common
+ cheads = pushop.outgoing.commonheads
+ else:
+ # I want cheads = heads(::missingheads and ::commonheads)
+ # (missingheads is revs with secret changeset filtered out)
+ #
+ # This can be expressed as:
+ # cheads = ( (missingheads and ::commonheads)
+ # + (commonheads and ::missingheads))"
+ # )
+ #
+ # while trying to push we already computed the following:
+ # common = (::commonheads)
+ # missing = ((commonheads::missingheads) - commonheads)
+ #
+ # We can pick:
+ # * missingheads part of common (::commonheads)
+ common = set(pushop.outgoing.common)
+ nm = pushop.repo.changelog.nodemap
+ cheads = [node for node in pushop.revs if nm[node] in common]
+ # and
+ # * commonheads parents on missing
+ revset = unfi.set('%ln and parents(roots(%ln))',
+ pushop.outgoing.commonheads,
+ pushop.outgoing.missing)
+ cheads.extend(c.node() for c in revset)
+ pushop.commonheads = cheads
+ # even when we don't push, exchanging phase data is useful
+ remotephases = pushop.remote.listkeys('phases')
+ if (pushop.ui.configbool('ui', '_usedassubrepo', False)
+ and remotephases # server supports phases
+ and pushop.ret is None # nothing was pushed
+ and remotephases.get('publishing', False)):
+ # When:
+ # - this is a subrepo push
+ # - and remote support phase
+ # - and no changeset was pushed
+ # - and remote is publishing
+ # We may be in issue 3871 case!
+ # We drop the possible phase synchronisation done by
+ # courtesy to publish changesets possibly locally draft
+ # on the remote.
+ remotephases = {'publishing': 'True'}
+ if not remotephases: # old server or public only rer
+ _localphasemove(pushop, cheads)
+ # don't push any phase data as there is nothing to push
+ else:
+ ana = phases.analyzeremotephases(pushop.repo, cheads,
+ remotephases)
+ pheads, droots = ana
+ ### Apply remote phase on local
+ if remotephases.get('publishing', False):
+ _localphasemove(pushop, cheads)
+ else: # publish = False
+ _localphasemove(pushop, pheads)
+ _localphasemove(pushop, cheads, phases.draft)
+ ### Apply local phase on remote
+
+ # Get the list of all revs draft on remote by public here.
+ # XXX Beware that revset break if droots is not strictly
+ # XXX root we may want to ensure it is but it is costly
+ outdated = unfi.set('heads((%ln::%ln) and public())',
+ droots, cheads)
+ for newremotehead in outdated:
+ r = pushop.remote.pushkey('phases',
+ newremotehead.hex(),
+ str(phases.draft),
+ str(phases.public))
+ if not r:
+ pushop.ui.warn(_('updating %s to public failed!\n')
+ % newremotehead)
+
+def _localphasemove(pushop, nodes, phase=phases.public):
+ """move <nodes> to <phase> in the local source repo"""
+ if pushop.locallocked:
+ phases.advanceboundary(pushop.repo, phase, nodes)
+ else:
+ # repo is not locked, do not change any phases!
+ # Informs the user that phases should have been moved when
+ # applicable.
+ actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
+ phasestr = phases.phasenames[phase]
+ if actualmoves:
+ pushop.ui.status(_('cannot lock source repo, skipping '
+ 'local %s phase update\n') % phasestr)
+
+def _pushobsolete(pushop):
+ """utility function to push obsolete markers to a remote"""
+ pushop.ui.debug('try to push obsolete markers to remote\n')
+ repo = pushop.repo
+ remote = pushop.remote
+ if (obsolete._enabled and repo.obsstore and
+ 'obsolete' in remote.listkeys('namespaces')):
+ rslts = []
+ remotedata = repo.listkeys('obsolete')
+ for key in sorted(remotedata, reverse=True):
+ # reverse sort to ensure we end with dump0
+ data = remotedata[key]
+ rslts.append(remote.pushkey('obsolete', key, '', data))
+ if [r for r in rslts if not r]:
+ msg = _('failed to push some obsolete markers!\n')
+ repo.ui.warn(msg)
+
+def _pushbookmark(pushop):
+ """Update bookmark position on remote"""
+ ui = pushop.ui
+ repo = pushop.repo.unfiltered()
+ remote = pushop.remote
+ ui.debug("checking for updated bookmarks\n")
+ revnums = map(repo.changelog.rev, pushop.revs or [])
+ ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
+ (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
+ ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
+ srchex=hex)
+
+ for b, scid, dcid in advsrc:
+ if ancestors and repo[scid].rev() not in ancestors:
+ continue
+ if remote.pushkey('bookmarks', b, dcid, scid):
+ ui.status(_("updating bookmark %s\n") % b)
+ else:
+ ui.warn(_('updating bookmark %s failed!\n') % b)
+
+class pulloperation(object):
+ """A object that represent a single pull operation
+
+ It purpose is to carry push related state and very common operation.
+
+ A new should be created at the begining of each push and discarded
+ afterward.
+ """
+
+ def __init__(self, repo, remote, heads=None, force=False):
+ # repo we pull from
+ self.repo = repo
+ # repo we pull to
+ self.remote = remote
+ # revision we try to pull (None is "all")
+ self.heads = heads
+ # do we force pull?
+ self.force = force
+ # the name the pull transaction
+ self._trname = 'pull\n' + util.hidepassword(remote.url())
+ # hold the transaction once created
+ self._tr = None
+ # set of common changeset between local and remote before pull
+ self.common = None
+ # set of pulled head
+ self.rheads = None
+ # list of missing changeset to fetch remotly
+ self.fetch = None
+
+ @util.propertycache
+ def pulledsubset(self):
+ """heads of the set of changeset target by the pull"""
+ # compute target subset
+ if self.heads is None:
+ # We pulled every thing possible
+ # sync on everything common
+ return self.common + self.rheads
+ else:
+ # We pulled a specific subset
+ # sync on this subset
+ return self.heads
+
+ def gettransaction(self):
+ """get appropriate pull transaction, creating it if needed"""
+ if self._tr is None:
+ self._tr = self.repo.transaction(self._trname)
+ return self._tr
+
+ def closetransaction(self):
+ """close transaction if created"""
+ if self._tr is not None:
+ self._tr.close()
+
+ def releasetransaction(self):
+ """release transaction if created"""
+ if self._tr is not None:
+ self._tr.release()
+
+def pull(repo, remote, heads=None, force=False):
+ pullop = pulloperation(repo, remote, heads, force)
+ if pullop.remote.local():
+ missing = set(pullop.remote.requirements) - pullop.repo.supported
+ if missing:
+ msg = _("required features are not"
+ " supported in the destination:"
+ " %s") % (', '.join(sorted(missing)))
+ raise util.Abort(msg)
+
+ lock = pullop.repo.lock()
+ try:
+ tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
+ pullop.remote,
+ heads=pullop.heads,
+ force=force)
+ pullop.common, pullop.fetch, pullop.rheads = tmp
+ if not pullop.fetch:
+ pullop.repo.ui.status(_("no changes found\n"))
+ result = 0
+ else:
+ result = _pullchangeset(pullop)
+
+ _pullphase(pullop)
+ _pullobsolete(pullop)
+ pullop.closetransaction()
+ finally:
+ pullop.releasetransaction()
+ lock.release()
+
+ return result
+
+def _pullchangeset(pullop):
+ """pull changeset from unbundle into the local repo"""
+ # We delay the open of the transaction as late as possible so we
+ # don't open transaction for nothing or you break future useful
+ # rollback call
+ pullop.gettransaction()
+ if pullop.heads is None and list(pullop.common) == [nullid]:
+ pullop.repo.ui.status(_("requesting all changes\n"))
+ elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
+ # issue1320, avoid a race if remote changed after discovery
+ pullop.heads = pullop.rheads
+
+ if pullop.remote.capable('getbundle'):
+ # TODO: get bundlecaps from remote
+ cg = pullop.remote.getbundle('pull', common=pullop.common,
+ heads=pullop.heads or pullop.rheads)
+ elif pullop.heads is None:
+ cg = pullop.remote.changegroup(pullop.fetch, 'pull')
+ elif not pullop.remote.capable('changegroupsubset'):
+ raise util.Abort(_("partial pull cannot be done because "
+ "other repository doesn't support "
+ "changegroupsubset."))
+ else:
+ cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
+ return pullop.repo.addchangegroup(cg, 'pull', pullop.remote.url())
+
+def _pullphase(pullop):
+ # Get remote phases data from remote
+ remotephases = pullop.remote.listkeys('phases')
+ publishing = bool(remotephases.get('publishing', False))
+ if remotephases and not publishing:
+ # remote is new and unpublishing
+ pheads, _dr = phases.analyzeremotephases(pullop.repo,
+ pullop.pulledsubset,
+ remotephases)
+ phases.advanceboundary(pullop.repo, phases.public, pheads)
+ phases.advanceboundary(pullop.repo, phases.draft,
+ pullop.pulledsubset)
+ else:
+ # Remote is old or publishing all common changesets
+ # should be seen as public
+ phases.advanceboundary(pullop.repo, phases.public,
+ pullop.pulledsubset)
+
+def _pullobsolete(pullop):
+ """utility function to pull obsolete markers from a remote
+
+ The `gettransaction` is function that return the pull transaction, creating
+ one if necessary. We return the transaction to inform the calling code that
+ a new transaction have been created (when applicable).
+
+ Exists mostly to allow overriding for experimentation purpose"""
+ tr = None
+ if obsolete._enabled:
+ pullop.repo.ui.debug('fetching remote obsolete markers\n')
+ remoteobs = pullop.remote.listkeys('obsolete')
+ if 'dump0' in remoteobs:
+ tr = pullop.gettransaction()
+ for key in sorted(remoteobs, reverse=True):
+ if key.startswith('dump'):
+ data = base85.b85decode(remoteobs[key])
+ pullop.repo.obsstore.mergemarkers(tr, data)
+ pullop.repo.invalidatevolatilesets()
+ return tr
+
--- a/mercurial/help/config.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/config.txt Wed Feb 19 16:46:47 2014 -0600
@@ -85,7 +85,9 @@
be read. Mercurial checks each of these locations in the specified
order until one or more configuration files are detected.
-.. note:: The registry key ``HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Mercurial``
+.. note::
+
+ The registry key ``HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Mercurial``
is used when running 32-bit Python on 64-bit Windows.
Syntax
@@ -204,7 +206,9 @@
stable5 = latest -b stable
-.. note:: It is possible to create aliases with the same names as
+.. note::
+
+ It is possible to create aliases with the same names as
existing commands, which will then override the original
definitions. This is almost always a bad idea!
@@ -235,7 +239,9 @@
``$HG_ARGS`` expands to the arguments given to Mercurial. In the ``hg
echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``.
-.. note:: Some global configuration options such as ``-R`` are
+.. note::
+
+ Some global configuration options such as ``-R`` are
processed before shell aliases and will thus not be passed to
aliases.
@@ -362,7 +368,9 @@
of an empty temporary file, where the filtered data must be written by
the command.
-.. note:: The tempfile mechanism is recommended for Windows systems,
+.. note::
+
+ The tempfile mechanism is recommended for Windows systems,
where the standard shell I/O redirection operators often have
strange effects and may corrupt the contents of your files.
@@ -708,13 +716,17 @@
in ``$HG_PARENT2``. If the update succeeded, ``$HG_ERROR=0``. If the
update failed (e.g. because conflicts not resolved), ``$HG_ERROR=1``.
-.. note:: It is generally better to use standard hooks rather than the
+.. note::
+
+ It is generally better to use standard hooks rather than the
generic pre- and post- command hooks as they are guaranteed to be
called in the appropriate contexts for influencing transactions.
Also, hooks like "commit" will be called in all contexts that
generate a commit (e.g. tag) and not just the commit command.
-.. note:: Environment variables with empty values may not be passed to
+.. note::
+
+ Environment variables with empty values may not be passed to
hooks on platforms such as Windows. As an example, ``$HG_PARENT2``
will have an empty value under Unix-like platforms for non-merge
changesets, while it will not be available at all under Windows.
--- a/mercurial/help/hgignore.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/hgignore.txt Wed Feb 19 16:46:47 2014 -0600
@@ -69,6 +69,7 @@
regexp pattern, start it with ``^``.
.. note::
+
Patterns specified in other than ``.hgignore`` are always rooted.
Please see :hg:`help patterns` for details.
--- a/mercurial/help/merge-tools.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/merge-tools.txt Wed Feb 19 16:46:47 2014 -0600
@@ -73,6 +73,7 @@
8. The merge of the file fails and must be resolved before commit.
.. note::
+
After selecting a merge program, Mercurial will by default attempt
to merge the files using a simple merge algorithm first. Only if it doesn't
succeed because of conflicting changes Mercurial will actually execute the
--- a/mercurial/help/patterns.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/patterns.txt Wed Feb 19 16:46:47 2014 -0600
@@ -7,6 +7,7 @@
Alternate pattern notations must be specified explicitly.
.. note::
+
Patterns specified in ``.hgignore`` are not rooted.
Please see :hg:`help hgignore` for details.
--- a/mercurial/help/phases.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/phases.txt Wed Feb 19 16:46:47 2014 -0600
@@ -42,6 +42,7 @@
- secret changesets are neither pushed, pulled, or cloned
.. note::
+
Pulling a draft changeset from a publishing server does not mark it
as public on the server side due to the read-only nature of pull.
@@ -55,10 +56,12 @@
See :hg:`help config` for more information on configuration files.
.. note::
+
Servers running older versions of Mercurial are treated as
publishing.
.. note::
+
Changesets in secret phase are not exchanged with the server. This
applies to their content: file names, file contents, and changeset
metadata. For technical reasons, the identifier (e.g. d825e4025e39)
--- a/mercurial/help/subrepos.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/subrepos.txt Wed Feb 19 16:46:47 2014 -0600
@@ -39,6 +39,7 @@
repositories states when committing in the parent repository.
.. note::
+
The ``.hgsubstate`` file should not be edited manually.
--- a/mercurial/help/templates.txt Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/help/templates.txt Wed Feb 19 16:46:47 2014 -0600
@@ -52,14 +52,20 @@
- if(expr, then[, else])
+- ifcontains(expr, expr, then[, else])
+
- ifeq(expr, expr, then[, else])
- join(list, sep)
- label(label, expr)
+- revset(query[, formatargs])
+
- rstdoc(text, style)
+- shortest(node)
+
- strip(text[, chars])
- sub(pat, repl, expr)
@@ -106,3 +112,11 @@
- Display the contents of the 'extra' field, one per line::
$ hg log -r 0 --template "{join(extras, '\n')}\n"
+
+- Mark the current bookmark with '*'::
+
+ $ hg log --template "{bookmarks % '{bookmark}{ifeq(bookmark, current, \"*\")} '}\n"
+
+- Mark the working copy parent with '@'::
+
+ $ hg log --template "{ifcontains(rev, revset('.'), '@')}\n"
--- a/mercurial/hgweb/webcommands.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/hgweb/webcommands.py Wed Feb 19 16:46:47 2014 -0600
@@ -187,7 +187,7 @@
mfunc = revset.match(web.repo.ui, revdef)
try:
- revs = mfunc(web.repo, list(web.repo))
+ revs = mfunc(web.repo, revset.baseset(web.repo))
return MODE_REVSET, revs
# ParseError: wrongly placed tokens, wrongs arguments, etc
# RepoLookupError: no such revision, e.g. in 'revision:'
--- a/mercurial/localrepo.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/localrepo.py Wed Feb 19 16:46:47 2014 -0600
@@ -9,7 +9,7 @@
import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
import changelog, dirstate, filelog, manifest, context, bookmarks, phases
import lock as lockmod
-import transaction, store, encoding
+import transaction, store, encoding, exchange
import scmutil, util, extensions, hook, error, revset
import match as matchmod
import merge as mergemod
@@ -428,7 +428,7 @@
'''Return a list of revisions matching the given revset'''
expr = revset.formatspec(expr, *args)
m = revset.match(None, expr)
- return [r for r in m(self, list(self))]
+ return revset.baseset([r for r in m(self, revset.baseset(self))])
def set(self, expr, *args):
'''
@@ -1005,6 +1005,7 @@
l = lockmod.lock(vfs, lockname,
int(self.ui.config("ui", "timeout", "600")),
releasefn, desc=desc)
+ self.ui.warn(_("got lock after %s seconds\n") % l.delay)
if acquirefn:
acquirefn()
return l
@@ -1578,7 +1579,7 @@
r = modified, added, removed, deleted, unknown, ignored, clean
if listsubrepos:
- for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
+ for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
if working:
rev2 = None
else:
@@ -1658,89 +1659,7 @@
return r
def pull(self, remote, heads=None, force=False):
- if remote.local():
- missing = set(remote.requirements) - self.supported
- if missing:
- msg = _("required features are not"
- " supported in the destination:"
- " %s") % (', '.join(sorted(missing)))
- raise util.Abort(msg)
-
- # don't open transaction for nothing or you break future useful
- # rollback call
- tr = None
- trname = 'pull\n' + util.hidepassword(remote.url())
- lock = self.lock()
- try:
- tmp = discovery.findcommonincoming(self.unfiltered(), remote,
- heads=heads, force=force)
- common, fetch, rheads = tmp
- if not fetch:
- self.ui.status(_("no changes found\n"))
- result = 0
- else:
- tr = self.transaction(trname)
- if heads is None and list(common) == [nullid]:
- self.ui.status(_("requesting all changes\n"))
- elif heads is None and remote.capable('changegroupsubset'):
- # issue1320, avoid a race if remote changed after discovery
- heads = rheads
-
- if remote.capable('getbundle'):
- # TODO: get bundlecaps from remote
- cg = remote.getbundle('pull', common=common,
- heads=heads or rheads)
- elif heads is None:
- cg = remote.changegroup(fetch, 'pull')
- elif not remote.capable('changegroupsubset'):
- raise util.Abort(_("partial pull cannot be done because "
- "other repository doesn't support "
- "changegroupsubset."))
- else:
- cg = remote.changegroupsubset(fetch, heads, 'pull')
- result = self.addchangegroup(cg, 'pull', remote.url())
-
- # compute target subset
- if heads is None:
- # We pulled every thing possible
- # sync on everything common
- subset = common + rheads
- else:
- # We pulled a specific subset
- # sync on this subset
- subset = heads
-
- # Get remote phases data from remote
- remotephases = remote.listkeys('phases')
- publishing = bool(remotephases.get('publishing', False))
- if remotephases and not publishing:
- # remote is new and unpublishing
- pheads, _dr = phases.analyzeremotephases(self, subset,
- remotephases)
- phases.advanceboundary(self, phases.public, pheads)
- phases.advanceboundary(self, phases.draft, subset)
- else:
- # Remote is old or publishing all common changesets
- # should be seen as public
- phases.advanceboundary(self, phases.public, subset)
-
- def gettransaction():
- if tr is None:
- return self.transaction(trname)
- return tr
-
- obstr = obsolete.syncpull(self, remote, gettransaction)
- if obstr is not None:
- tr = obstr
-
- if tr is not None:
- tr.close()
- finally:
- if tr is not None:
- tr.release()
- lock.release()
-
- return result
+ return exchange.pull (self, remote, heads, force)
def checkpush(self, force, revs):
"""Extensions can override this function if additional checks have
@@ -1750,223 +1669,7 @@
pass
def push(self, remote, force=False, revs=None, newbranch=False):
- '''Push outgoing changesets (limited by revs) from the current
- repository to remote. Return an integer:
- - None means nothing to push
- - 0 means HTTP error
- - 1 means we pushed and remote head count is unchanged *or*
- we have outgoing changesets but refused to push
- - other values as described by addchangegroup()
- '''
- if remote.local():
- missing = set(self.requirements) - remote.local().supported
- if missing:
- msg = _("required features are not"
- " supported in the destination:"
- " %s") % (', '.join(sorted(missing)))
- raise util.Abort(msg)
-
- # there are two ways to push to remote repo:
- #
- # addchangegroup assumes local user can lock remote
- # repo (local filesystem, old ssh servers).
- #
- # unbundle assumes local user cannot lock remote repo (new ssh
- # servers, http servers).
-
- if not remote.canpush():
- raise util.Abort(_("destination does not support push"))
- unfi = self.unfiltered()
- def localphasemove(nodes, phase=phases.public):
- """move <nodes> to <phase> in the local source repo"""
- if locallock is not None:
- phases.advanceboundary(self, phase, nodes)
- else:
- # repo is not locked, do not change any phases!
- # Informs the user that phases should have been moved when
- # applicable.
- actualmoves = [n for n in nodes if phase < self[n].phase()]
- phasestr = phases.phasenames[phase]
- if actualmoves:
- self.ui.status(_('cannot lock source repo, skipping local'
- ' %s phase update\n') % phasestr)
- # get local lock as we might write phase data
- locallock = None
- try:
- locallock = self.lock()
- except IOError, err:
- if err.errno != errno.EACCES:
- raise
- # source repo cannot be locked.
- # We do not abort the push, but just disable the local phase
- # synchronisation.
- msg = 'cannot lock source repository: %s\n' % err
- self.ui.debug(msg)
- try:
- self.checkpush(force, revs)
- lock = None
- unbundle = remote.capable('unbundle')
- if not unbundle:
- lock = remote.lock()
- try:
- # discovery
- fci = discovery.findcommonincoming
- commoninc = fci(unfi, remote, force=force)
- common, inc, remoteheads = commoninc
- fco = discovery.findcommonoutgoing
- outgoing = fco(unfi, remote, onlyheads=revs,
- commoninc=commoninc, force=force)
-
-
- if not outgoing.missing:
- # nothing to push
- scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
- ret = None
- else:
- # something to push
- if not force:
- # if self.obsstore == False --> no obsolete
- # then, save the iteration
- if unfi.obsstore:
- # this message are here for 80 char limit reason
- mso = _("push includes obsolete changeset: %s!")
- mst = "push includes %s changeset: %s!"
- # plain versions for i18n tool to detect them
- _("push includes unstable changeset: %s!")
- _("push includes bumped changeset: %s!")
- _("push includes divergent changeset: %s!")
- # If we are to push if there is at least one
- # obsolete or unstable changeset in missing, at
- # least one of the missinghead will be obsolete or
- # unstable. So checking heads only is ok
- for node in outgoing.missingheads:
- ctx = unfi[node]
- if ctx.obsolete():
- raise util.Abort(mso % ctx)
- elif ctx.troubled():
- raise util.Abort(_(mst)
- % (ctx.troubles()[0],
- ctx))
- newbm = self.ui.configlist('bookmarks', 'pushing')
- discovery.checkheads(unfi, remote, outgoing,
- remoteheads, newbranch,
- bool(inc), newbm)
-
- # TODO: get bundlecaps from remote
- bundlecaps = None
- # create a changegroup from local
- if revs is None and not (outgoing.excluded
- or self.changelog.filteredrevs):
- # push everything,
- # use the fast path, no race possible on push
- bundler = changegroup.bundle10(self, bundlecaps)
- cg = self._changegroupsubset(outgoing,
- bundler,
- 'push',
- fastpath=True)
- else:
- cg = self.getlocalbundle('push', outgoing, bundlecaps)
-
- # apply changegroup to remote
- if unbundle:
- # local repo finds heads on server, finds out what
- # revs it must push. once revs transferred, if server
- # finds it has different heads (someone else won
- # commit/push race), server aborts.
- if force:
- remoteheads = ['force']
- # ssh: return remote's addchangegroup()
- # http: return remote's addchangegroup() or 0 for error
- ret = remote.unbundle(cg, remoteheads, 'push')
- else:
- # we return an integer indicating remote head count
- # change
- ret = remote.addchangegroup(cg, 'push', self.url())
-
- if ret:
- # push succeed, synchronize target of the push
- cheads = outgoing.missingheads
- elif revs is None:
- # All out push fails. synchronize all common
- cheads = outgoing.commonheads
- else:
- # I want cheads = heads(::missingheads and ::commonheads)
- # (missingheads is revs with secret changeset filtered out)
- #
- # This can be expressed as:
- # cheads = ( (missingheads and ::commonheads)
- # + (commonheads and ::missingheads))"
- # )
- #
- # while trying to push we already computed the following:
- # common = (::commonheads)
- # missing = ((commonheads::missingheads) - commonheads)
- #
- # We can pick:
- # * missingheads part of common (::commonheads)
- common = set(outgoing.common)
- nm = self.changelog.nodemap
- cheads = [node for node in revs if nm[node] in common]
- # and
- # * commonheads parents on missing
- revset = unfi.set('%ln and parents(roots(%ln))',
- outgoing.commonheads,
- outgoing.missing)
- cheads.extend(c.node() for c in revset)
- # even when we don't push, exchanging phase data is useful
- remotephases = remote.listkeys('phases')
- if (self.ui.configbool('ui', '_usedassubrepo', False)
- and remotephases # server supports phases
- and ret is None # nothing was pushed
- and remotephases.get('publishing', False)):
- # When:
- # - this is a subrepo push
- # - and remote support phase
- # - and no changeset was pushed
- # - and remote is publishing
- # We may be in issue 3871 case!
- # We drop the possible phase synchronisation done by
- # courtesy to publish changesets possibly locally draft
- # on the remote.
- remotephases = {'publishing': 'True'}
- if not remotephases: # old server or public only repo
- localphasemove(cheads)
- # don't push any phase data as there is nothing to push
- else:
- ana = phases.analyzeremotephases(self, cheads, remotephases)
- pheads, droots = ana
- ### Apply remote phase on local
- if remotephases.get('publishing', False):
- localphasemove(cheads)
- else: # publish = False
- localphasemove(pheads)
- localphasemove(cheads, phases.draft)
- ### Apply local phase on remote
-
- # Get the list of all revs draft on remote by public here.
- # XXX Beware that revset break if droots is not strictly
- # XXX root we may want to ensure it is but it is costly
- outdated = unfi.set('heads((%ln::%ln) and public())',
- droots, cheads)
- for newremotehead in outdated:
- r = remote.pushkey('phases',
- newremotehead.hex(),
- str(phases.draft),
- str(phases.public))
- if not r:
- self.ui.warn(_('updating %s to public failed!\n')
- % newremotehead)
- self.ui.debug('try to push obsolete markers to remote\n')
- obsolete.syncpush(self, remote)
- finally:
- if lock is not None:
- lock.release()
- finally:
- if locallock is not None:
- locallock.release()
-
- bookmarks.updateremote(self.ui, unfi, remote, revs)
- return ret
+ return exchange.push(self, remote, force, revs, newbranch)
def changegroupinfo(self, nodes, source):
if self.ui.verbose or source == 'bundle':
@@ -1976,9 +1679,9 @@
for node in nodes:
self.ui.debug("%s\n" % hex(node))
- def changegroupsubset(self, bases, heads, source):
+ def changegroupsubset(self, roots, heads, source):
"""Compute a changegroup consisting of all the nodes that are
- descendants of any of the bases and ancestors of any of the heads.
+ descendants of any of the roots and ancestors of any of the heads.
Return a chunkbuffer object whose read() method will return
successive changegroup chunks.
@@ -1990,12 +1693,12 @@
the changegroup a particular filenode or manifestnode belongs to.
"""
cl = self.changelog
- if not bases:
- bases = [nullid]
+ if not roots:
+ roots = [nullid]
# TODO: remove call to nodesbetween.
- csets, bases, heads = cl.nodesbetween(bases, heads)
+ csets, roots, heads = cl.nodesbetween(roots, heads)
discbases = []
- for n in bases:
+ for n in roots:
discbases.extend([p for p in cl.parents(n) if p != nullid])
outgoing = discovery.outgoing(cl, discbases, heads)
bundler = changegroup.bundle10(self)
--- a/mercurial/lock.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/lock.py Wed Feb 19 16:46:47 2014 -0600
@@ -38,7 +38,7 @@
self.desc = desc
self.postrelease = []
self.pid = os.getpid()
- self.lock()
+ self.delay = self.lock()
def __del__(self):
if self.held:
@@ -57,7 +57,7 @@
while True:
try:
self.trylock()
- return 1
+ return self.timeout - timeout
except error.LockHeld, inst:
if timeout != 0:
time.sleep(1)
--- a/mercurial/match.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/match.py Wed Feb 19 16:46:47 2014 -0600
@@ -6,7 +6,7 @@
# GNU General Public License version 2 or any later version.
import re
-import util, fileset, pathutil
+import util, pathutil
from i18n import _
def _rematcher(pat):
@@ -26,7 +26,7 @@
if kind == 'set':
if not ctx:
raise util.Abort("fileset expression with no context")
- s = fileset.getfileset(ctx, expr)
+ s = ctx.getfileset(expr)
fset.update(s)
continue
other.append((kind, expr))
--- a/mercurial/obsolete.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/obsolete.py Wed Feb 19 16:46:47 2014 -0600
@@ -256,6 +256,9 @@
* ensuring it is hashable
* check mandatory metadata
* encode metadata
+
+ If you are a human writing code creating marker you want to use the
+ `createmarkers` function in this module instead.
"""
if metadata is None:
metadata = {}
@@ -384,43 +387,6 @@
finally:
lock.release()
-def syncpush(repo, remote):
- """utility function to push obsolete markers to a remote
-
- Exist mostly to allow overriding for experimentation purpose"""
- if (_enabled and repo.obsstore and
- 'obsolete' in remote.listkeys('namespaces')):
- rslts = []
- remotedata = repo.listkeys('obsolete')
- for key in sorted(remotedata, reverse=True):
- # reverse sort to ensure we end with dump0
- data = remotedata[key]
- rslts.append(remote.pushkey('obsolete', key, '', data))
- if [r for r in rslts if not r]:
- msg = _('failed to push some obsolete markers!\n')
- repo.ui.warn(msg)
-
-def syncpull(repo, remote, gettransaction):
- """utility function to pull obsolete markers from a remote
-
- The `gettransaction` is function that return the pull transaction, creating
- one if necessary. We return the transaction to inform the calling code that
- a new transaction have been created (when applicable).
-
- Exists mostly to allow overriding for experimentation purpose"""
- tr = None
- if _enabled:
- repo.ui.debug('fetching remote obsolete markers\n')
- remoteobs = remote.listkeys('obsolete')
- if 'dump0' in remoteobs:
- tr = gettransaction()
- for key in sorted(remoteobs, reverse=True):
- if key.startswith('dump'):
- data = base85.b85decode(remoteobs[key])
- repo.obsstore.mergemarkers(tr, data)
- repo.invalidatevolatilesets()
- return tr
-
def allmarkers(repo):
"""all obsolete markers known in a repository"""
for markerdata in repo.obsstore:
@@ -845,8 +811,10 @@
def createmarkers(repo, relations, flag=0, metadata=None):
"""Add obsolete markers between changesets in a repo
- <relations> must be an iterable of (<old>, (<new>, ...)) tuple.
- `old` and `news` are changectx.
+ <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
+ tuple. `old` and `news` are changectx. metadata is an optional dictionnary
+ containing metadata for this marker only. It is merged with the global
+ metadata specified through the `metadata` argument of this function,
Trying to obsolete a public changeset will raise an exception.
@@ -865,7 +833,13 @@
metadata['user'] = repo.ui.username()
tr = repo.transaction('add-obsolescence-marker')
try:
- for prec, sucs in relations:
+ for rel in relations:
+ prec = rel[0]
+ sucs = rel[1]
+ localmetadata = metadata.copy()
+ if 2 < len(rel):
+ localmetadata.update(rel[2])
+
if not prec.mutable():
raise util.Abort("cannot obsolete immutable changeset: %s"
% prec)
@@ -873,7 +847,7 @@
nsucs = tuple(s.node() for s in sucs)
if nprec in nsucs:
raise util.Abort("changeset %s cannot obsolete itself" % prec)
- repo.obsstore.create(tr, nprec, nsucs, flag, metadata)
+ repo.obsstore.create(tr, nprec, nsucs, flag, localmetadata)
repo.filteredrevcache.clear()
tr.close()
finally:
--- a/mercurial/repoview.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/repoview.py Wed Feb 19 16:46:47 2014 -0600
@@ -9,7 +9,7 @@
import copy
import phases
import util
-import obsolete, revset
+import obsolete
def hideablerevs(repo):
@@ -28,8 +28,9 @@
cl = repo.changelog
firsthideable = min(hideable)
revs = cl.revs(start=firsthideable)
- blockers = [r for r in revset._children(repo, revs, hideable)
- if r not in hideable]
+ tofilter = repo.revs(
+ '(%ld) and children(%ld)', list(revs), list(hideable))
+ blockers = [r for r in tofilter if r not in hideable]
for par in repo[None].parents():
blockers.append(par.rev())
for bm in repo._bookmarks.values():
--- a/mercurial/revset.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/revset.py Wed Feb 19 16:46:47 2014 -0600
@@ -53,9 +53,9 @@
"""Return all paths between roots and heads, inclusive of both endpoint
sets."""
if not roots:
- return []
+ return baseset([])
parentrevs = repo.changelog.parentrevs
- visit = heads[:]
+ visit = baseset(heads)
reachable = set()
seen = {}
minroot = min(roots)
@@ -72,12 +72,12 @@
if parent >= minroot and parent not in seen:
visit.append(parent)
if not reachable:
- return []
+ return baseset([])
for rev in sorted(seen):
for parent in seen[rev]:
if parent in reachable:
reachable.add(rev)
- return sorted(reachable)
+ return baseset(sorted(reachable))
elements = {
"(": (20, ("group", 1, ")"), ("func", 1, ")")),
@@ -195,7 +195,10 @@
def getset(repo, subset, x):
if not x:
raise error.ParseError(_("missing argument"))
- return methods[x[0]](repo, subset, *x[1:])
+ s = methods[x[0]](repo, subset, *x[1:])
+ if util.safehasattr(s, 'set'):
+ return s
+ return baseset(s)
def _getrevsource(repo, r):
extra = repo[r].extra()
@@ -212,10 +215,10 @@
def stringset(repo, subset, x):
x = repo[x].rev()
if x == -1 and len(subset) == len(repo):
- return [-1]
+ return baseset([-1])
if len(subset) == len(repo) or x in subset:
- return [x]
- return []
+ return baseset([x])
+ return baseset([])
def symbolset(repo, subset, x):
if x in symbols:
@@ -223,39 +226,36 @@
return stringset(repo, subset, x)
def rangeset(repo, subset, x, y):
- cl = repo.changelog
+ cl = baseset(repo.changelog)
m = getset(repo, cl, x)
n = getset(repo, cl, y)
if not m or not n:
- return []
+ return baseset([])
m, n = m[0], n[-1]
if m < n:
- r = range(m, n + 1)
+ r = spanset(repo, m, n + 1)
else:
- r = range(m, n - 1, -1)
- s = set(subset)
- return [x for x in r if x in s]
+ r = spanset(repo, m, n - 1)
+ return r & subset
def dagrange(repo, subset, x, y):
- r = list(repo)
+ r = spanset(repo)
xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
- s = set(subset)
- return [r for r in xs if r in s]
+ s = subset.set()
+ return baseset([r for r in xs if r in s])
def andset(repo, subset, x, y):
return getset(repo, getset(repo, subset, x), y)
def orset(repo, subset, x, y):
xl = getset(repo, subset, x)
- s = set(xl)
- yl = getset(repo, [r for r in subset if r not in s], y)
+ yl = getset(repo, subset - xl, y)
return xl + yl
def notset(repo, subset, x):
- s = set(getset(repo, subset, x))
- return [r for r in subset if r not in s]
+ return subset - getset(repo, subset, x)
def listset(repo, subset, a, b):
raise error.ParseError(_("can't use a list in this context"))
@@ -289,7 +289,7 @@
"""
# i18n: "ancestor" is a keyword
l = getlist(x)
- rl = list(repo)
+ rl = spanset(repo)
anc = None
# (getset(repo, rl, i) for i in l) generates a list of lists
@@ -304,15 +304,15 @@
anc = rev(ancestor(node(anc), node(r)))
if anc is not None and anc in subset:
- return [anc]
- return []
+ return baseset([anc])
+ return baseset([])
def _ancestors(repo, subset, x, followfirst=False):
- args = getset(repo, list(repo), x)
+ args = getset(repo, spanset(repo), x)
if not args:
- return []
+ return baseset([])
s = set(_revancestors(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def ancestors(repo, subset, x):
"""``ancestors(set)``
@@ -336,11 +336,11 @@
raise error.ParseError(_("~ expects a number"))
ps = set()
cl = repo.changelog
- for r in getset(repo, cl, x):
+ for r in getset(repo, baseset(cl), x):
for i in range(n):
r = cl.parentrevs(r)[0]
ps.add(r)
- return [r for r in subset if r in ps]
+ return baseset([r for r in subset if r in ps])
def author(repo, subset, x):
"""``author(string)``
@@ -349,7 +349,7 @@
# i18n: "author" is a keyword
n = encoding.lower(getstring(x, _("author requires a string")))
kind, pattern, matcher = _substringmatcher(n)
- return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
+ return lazyset(subset, lambda x: matcher(encoding.lower(repo[x].user())))
def bisect(repo, subset, x):
"""``bisect(string)``
@@ -366,7 +366,7 @@
# i18n: "bisect" is a keyword
status = getstring(x, _("bisect requires a string")).lower()
state = set(hbisect.get(repo, status))
- return [r for r in subset if r in state]
+ return baseset([r for r in subset if r in state])
# Backward-compatibility
# - no help entry so that we do not advertise it any more
@@ -393,7 +393,7 @@
if not bmrev:
raise util.Abort(_("bookmark '%s' does not exist") % bm)
bmrev = repo[bmrev].rev()
- return [r for r in subset if r == bmrev]
+ return lazyset(subset, lambda r: r == bmrev)
else:
matchrevs = set()
for name, bmrev in repo._bookmarks.iteritems():
@@ -405,11 +405,11 @@
bmrevs = set()
for bmrev in matchrevs:
bmrevs.add(repo[bmrev].rev())
- return [r for r in subset if r in bmrevs]
+ return subset & bmrevs
bms = set([repo[r].rev()
for r in repo._bookmarks.values()])
- return [r for r in subset if r in bms]
+ return lazyset(subset, lambda r: r in bms)
def branch(repo, subset, x):
"""``branch(string or set)``
@@ -431,16 +431,16 @@
# note: falls through to the revspec case if no branch with
# this name exists
if pattern in repo.branchmap():
- return [r for r in subset if matcher(repo[r].branch())]
+ return lazyset(subset, lambda r: matcher(repo[r].branch()))
else:
- return [r for r in subset if matcher(repo[r].branch())]
+ return lazyset(subset, lambda r: matcher(repo[r].branch()))
- s = getset(repo, list(repo), x)
+ s = getset(repo, spanset(repo), x)
b = set()
for r in s:
b.add(repo[r].branch())
- s = set(s)
- return [r for r in subset if r in s or repo[r].branch() in b]
+ s = s.set()
+ return lazyset(subset, lambda r: r in s or repo[r].branch() in b)
def bumped(repo, subset, x):
"""``bumped()``
@@ -451,7 +451,7 @@
# i18n: "bumped" is a keyword
getargs(x, 0, 0, _("bumped takes no arguments"))
bumped = obsmod.getrevs(repo, 'bumped')
- return [r for r in subset if r in bumped]
+ return subset & bumped
def bundle(repo, subset, x):
"""``bundle()``
@@ -463,43 +463,43 @@
bundlerevs = repo.changelog.bundlerevs
except AttributeError:
raise util.Abort(_("no bundle provided - specify with -R"))
- return [r for r in subset if r in bundlerevs]
+ return subset & bundlerevs
def checkstatus(repo, subset, pat, field):
- m = None
- s = []
hasset = matchmod.patkind(pat) == 'set'
- fname = None
- for r in subset:
- c = repo[r]
+
+ def matches(x):
+ m = None
+ fname = None
+ c = repo[x]
if not m or hasset:
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
if not m.anypats() and len(m.files()) == 1:
fname = m.files()[0]
if fname is not None:
if fname not in c.files():
- continue
+ return False
else:
for f in c.files():
if m(f):
break
else:
- continue
+ return False
files = repo.status(c.p1().node(), c.node())[field]
if fname is not None:
if fname in files:
- s.append(r)
+ return True
else:
for f in files:
if m(f):
- s.append(r)
- break
- return s
+ return True
+
+ return lazyset(subset, matches)
def _children(repo, narrow, parentset):
cs = set()
if not parentset:
- return cs
+ return baseset(cs)
pr = repo.changelog.parentrevs
minrev = min(parentset)
for r in narrow:
@@ -508,15 +508,15 @@
for p in pr(r):
if p in parentset:
cs.add(r)
- return cs
+ return baseset(cs)
def children(repo, subset, x):
"""``children(set)``
Child changesets of changesets in set.
"""
- s = set(getset(repo, list(repo), x))
+ s = getset(repo, baseset(repo), x).set()
cs = _children(repo, subset, s)
- return [r for r in subset if r in cs]
+ return subset & cs
def closed(repo, subset, x):
"""``closed()``
@@ -524,7 +524,7 @@
"""
# i18n: "closed" is a keyword
getargs(x, 0, 0, _("closed takes no arguments"))
- return [r for r in subset if repo[r].closesbranch()]
+ return lazyset(subset, lambda r: repo[r].closesbranch())
def contains(repo, subset, x):
"""``contains(pattern)``
@@ -537,23 +537,21 @@
"""
# i18n: "contains" is a keyword
pat = getstring(x, _("contains requires a pattern"))
- s = []
- if not matchmod.patkind(pat):
- pat = pathutil.canonpath(repo.root, repo.getcwd(), pat)
- for r in subset:
- if pat in repo[r]:
- s.append(r)
- else:
- m = None
- for r in subset:
- c = repo[r]
- if not m or matchmod.patkind(pat) == 'set':
- m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
+
+ def matches(x):
+ if not matchmod.patkind(pat):
+ pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
+ if pats in repo[x]:
+ return True
+ else:
+ c = repo[x]
+ m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
for f in c.manifest():
if m(f):
- s.append(r)
- break
- return s
+ return True
+ return False
+
+ return lazyset(subset, matches)
def converted(repo, subset, x):
"""``converted([id])``
@@ -575,7 +573,7 @@
source = repo[r].extra().get('convert_revision', None)
return source is not None and (rev is None or source.startswith(rev))
- return [r for r in subset if _matchvalue(r)]
+ return lazyset(subset, lambda r: _matchvalue(r))
def date(repo, subset, x):
"""``date(interval)``
@@ -584,7 +582,7 @@
# i18n: "date" is a keyword
ds = getstring(x, _("date requires a string"))
dm = util.matchdate(ds)
- return [r for r in subset if dm(repo[r].date()[0])]
+ return lazyset(subset, lambda x: dm(repo[x].date()[0]))
def desc(repo, subset, x):
"""``desc(string)``
@@ -592,19 +590,19 @@
"""
# i18n: "desc" is a keyword
ds = encoding.lower(getstring(x, _("desc requires a string")))
- l = []
- for r in subset:
- c = repo[r]
- if ds in encoding.lower(c.description()):
- l.append(r)
- return l
+
+ def matches(x):
+ c = repo[x]
+ return ds in encoding.lower(c.description())
+
+ return lazyset(subset, matches)
def _descendants(repo, subset, x, followfirst=False):
- args = getset(repo, list(repo), x)
+ args = getset(repo, spanset(repo), x)
if not args:
- return []
+ return baseset([])
s = set(_revdescendants(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
+ return subset & s
def descendants(repo, subset, x):
"""``descendants(set)``
@@ -624,9 +622,9 @@
is the same as passing all().
"""
if x is not None:
- args = set(getset(repo, list(repo), x))
+ args = getset(repo, spanset(repo), x).set()
else:
- args = set(getall(repo, list(repo), x))
+ args = getall(repo, spanset(repo), x).set()
dests = set()
@@ -659,7 +657,7 @@
r = src
src = _getrevsource(repo, r)
- return [r for r in subset if r in dests]
+ return baseset([r for r in subset if r in dests])
def divergent(repo, subset, x):
"""``divergent()``
@@ -668,7 +666,7 @@
# i18n: "divergent" is a keyword
getargs(x, 0, 0, _("divergent takes no arguments"))
divergent = obsmod.getrevs(repo, 'divergent')
- return [r for r in subset if r in divergent]
+ return baseset([r for r in subset if r in divergent])
def draft(repo, subset, x):
"""``draft()``
@@ -676,7 +674,7 @@
# i18n: "draft" is a keyword
getargs(x, 0, 0, _("draft takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.draft]
+ return lazyset(subset, lambda r: pc.phase(repo, r) == phases.draft)
def extinct(repo, subset, x):
"""``extinct()``
@@ -685,7 +683,7 @@
# i18n: "extinct" is a keyword
getargs(x, 0, 0, _("extinct takes no arguments"))
extincts = obsmod.getrevs(repo, 'extinct')
- return [r for r in subset if r in extincts]
+ return subset & extincts
def extra(repo, subset, x):
"""``extra(label, [value])``
@@ -712,7 +710,7 @@
extra = repo[r].extra()
return label in extra and (value is None or matcher(extra[label]))
- return [r for r in subset if _matchvalue(r)]
+ return lazyset(subset, lambda r: _matchvalue(r))
def filelog(repo, subset, x):
"""``filelog(pattern)``
@@ -744,7 +742,7 @@
for fr in fl:
s.add(fl.linkrev(fr))
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def first(repo, subset, x):
"""``first(set, [n])``
@@ -763,11 +761,11 @@
# include the revision responsible for the most recent version
s.add(cx.linkrev())
else:
- return []
+ return baseset([])
else:
s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def follow(repo, subset, x):
"""``follow([file])``
@@ -802,14 +800,15 @@
gr = re.compile(getstring(x, _("grep requires a string")))
except re.error, e:
raise error.ParseError(_('invalid match pattern: %s') % e)
- l = []
- for r in subset:
- c = repo[r]
+
+ def matches(x):
+ c = repo[x]
for e in c.files() + [c.user(), c.description()]:
if gr.search(e):
- l.append(r)
- break
- return l
+ return True
+ return False
+
+ return lazyset(subset, matches)
def _matchfiles(repo, subset, x):
# _matchfiles takes a revset list of prefixed arguments:
@@ -858,10 +857,10 @@
hasset = True
if not default:
default = 'glob'
- m = None
- s = []
- for r in subset:
- c = repo[r]
+
+ def matches(x):
+ m = None
+ c = repo[x]
if not m or (hasset and rev is None):
ctx = c
if rev is not None:
@@ -870,9 +869,10 @@
exclude=exc, ctx=ctx, default=default)
for f in c.files():
if m(f):
- s.append(r)
- break
- return s
+ return True
+ return False
+
+ return lazyset(subset, matches)
def hasfile(repo, subset, x):
"""``file(pattern)``
@@ -896,15 +896,15 @@
hs = set()
for b, ls in repo.branchmap().iteritems():
hs.update(repo[h].rev() for h in ls)
- return [r for r in subset if r in hs]
+ return baseset([r for r in subset if r in hs])
def heads(repo, subset, x):
"""``heads(set)``
Members of set with no children in set.
"""
s = getset(repo, subset, x)
- ps = set(parents(repo, subset, x))
- return [r for r in s if r not in ps]
+ ps = parents(repo, subset, x)
+ return s - ps
def hidden(repo, subset, x):
"""``hidden()``
@@ -913,7 +913,7 @@
# i18n: "hidden" is a keyword
getargs(x, 0, 0, _("hidden takes no arguments"))
hiddenrevs = repoview.filterrevs(repo, 'visible')
- return [r for r in subset if r in hiddenrevs]
+ return subset & hiddenrevs
def keyword(repo, subset, x):
"""``keyword(string)``
@@ -922,13 +922,13 @@
"""
# i18n: "keyword" is a keyword
kw = encoding.lower(getstring(x, _("keyword requires a string")))
- l = []
- for r in subset:
+
+ def matches(r):
c = repo[r]
- if util.any(kw in encoding.lower(t)
- for t in c.files() + [c.user(), c.description()]):
- l.append(r)
- return l
+ return util.any(kw in encoding.lower(t) for t in c.files() + [c.user(),
+ c.description()])
+
+ return lazyset(subset, matches)
def limit(repo, subset, x):
"""``limit(set, [n])``
@@ -944,9 +944,18 @@
except (TypeError, ValueError):
# i18n: "limit" is a keyword
raise error.ParseError(_("limit expects a number"))
- ss = set(subset)
- os = getset(repo, list(repo), l[0])[:lim]
- return [r for r in os if r in ss]
+ ss = subset.set()
+ os = getset(repo, spanset(repo), l[0])
+ bs = baseset([])
+ it = iter(os)
+ for x in xrange(lim):
+ try:
+ y = it.next()
+ if y in ss:
+ bs.append(y)
+ except (StopIteration):
+ break
+ return bs
def last(repo, subset, x):
"""``last(set, [n])``
@@ -962,20 +971,20 @@
except (TypeError, ValueError):
# i18n: "last" is a keyword
raise error.ParseError(_("last expects a number"))
- ss = set(subset)
- os = getset(repo, list(repo), l[0])[-lim:]
- return [r for r in os if r in ss]
+ ss = subset.set()
+ os = getset(repo, spanset(repo), l[0])[-lim:]
+ return baseset([r for r in os if r in ss])
def maxrev(repo, subset, x):
"""``max(set)``
Changeset with highest revision number in set.
"""
- os = getset(repo, list(repo), x)
+ os = getset(repo, spanset(repo), x)
if os:
m = max(os)
if m in subset:
- return [m]
- return []
+ return baseset([m])
+ return baseset([])
def merge(repo, subset, x):
"""``merge()``
@@ -984,7 +993,7 @@
# i18n: "merge" is a keyword
getargs(x, 0, 0, _("merge takes no arguments"))
cl = repo.changelog
- return [r for r in subset if cl.parentrevs(r)[1] != -1]
+ return lazyset(subset, lambda r: cl.parentrevs(r)[1] != -1)
def branchpoint(repo, subset, x):
"""``branchpoint()``
@@ -994,25 +1003,35 @@
getargs(x, 0, 0, _("branchpoint takes no arguments"))
cl = repo.changelog
if not subset:
- return []
+ return baseset([])
baserev = min(subset)
parentscount = [0]*(len(repo) - baserev)
for r in cl.revs(start=baserev + 1):
for p in cl.parentrevs(r):
if p >= baserev:
parentscount[p - baserev] += 1
- return [r for r in subset if (parentscount[r - baserev] > 1)]
+ return baseset([r for r in subset if (parentscount[r - baserev] > 1)])
def minrev(repo, subset, x):
"""``min(set)``
Changeset with lowest revision number in set.
"""
- os = getset(repo, list(repo), x)
+ os = getset(repo, spanset(repo), x)
if os:
m = min(os)
if m in subset:
- return [m]
- return []
+ return baseset([m])
+ return baseset([])
+
+def _missingancestors(repo, subset, x):
+ # i18n: "_missingancestors" is a keyword
+ revs, bases = getargs(x, 2, 2,
+ _("_missingancestors requires two arguments"))
+ rs = baseset(repo)
+ revs = getset(repo, rs, revs)
+ bases = getset(repo, rs, bases)
+ missing = set(repo.changelog.findmissingrevs(bases, revs))
+ return baseset([r for r in subset if r in missing])
def modifies(repo, subset, x):
"""``modifies(pattern)``
@@ -1042,7 +1061,7 @@
if pm is not None:
rn = repo.changelog.rev(pm)
- return [r for r in subset if r == rn]
+ return baseset([r for r in subset if r == rn])
def obsolete(repo, subset, x):
"""``obsolete()``
@@ -1050,7 +1069,7 @@
# i18n: "obsolete" is a keyword
getargs(x, 0, 0, _("obsolete takes no arguments"))
obsoletes = obsmod.getrevs(repo, 'obsolete')
- return [r for r in subset if r in obsoletes]
+ return subset & obsoletes
def origin(repo, subset, x):
"""``origin([set])``
@@ -1061,9 +1080,9 @@
for the first operation is selected.
"""
if x is not None:
- args = set(getset(repo, list(repo), x))
+ args = getset(repo, spanset(repo), x).set()
else:
- args = set(getall(repo, list(repo), x))
+ args = getall(repo, spanset(repo), x).set()
def _firstsrc(rev):
src = _getrevsource(repo, rev)
@@ -1078,7 +1097,7 @@
src = prev
o = set([_firstsrc(r) for r in args])
- return [r for r in subset if r in o]
+ return baseset([r for r in subset if r in o])
def outgoing(repo, subset, x):
"""``outgoing([path])``
@@ -1101,7 +1120,7 @@
repo.ui.popbuffer()
cl = repo.changelog
o = set([cl.rev(r) for r in outgoing.missing])
- return [r for r in subset if r in o]
+ return baseset([r for r in subset if r in o])
def p1(repo, subset, x):
"""``p1([set])``
@@ -1109,13 +1128,13 @@
"""
if x is None:
p = repo[x].p1().rev()
- return [r for r in subset if r == p]
+ return baseset([r for r in subset if r == p])
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, spanset(repo), x):
ps.add(cl.parentrevs(r)[0])
- return [r for r in subset if r in ps]
+ return subset & ps
def p2(repo, subset, x):
"""``p2([set])``
@@ -1125,15 +1144,15 @@
ps = repo[x].parents()
try:
p = ps[1].rev()
- return [r for r in subset if r == p]
+ return baseset([r for r in subset if r == p])
except IndexError:
- return []
+ return baseset([])
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, spanset(repo), x):
ps.add(cl.parentrevs(r)[1])
- return [r for r in subset if r in ps]
+ return subset & ps
def parents(repo, subset, x):
"""``parents([set])``
@@ -1141,13 +1160,13 @@
"""
if x is None:
ps = tuple(p.rev() for p in repo[x].parents())
- return [r for r in subset if r in ps]
+ return subset & ps
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, spanset(repo), x):
ps.update(cl.parentrevs(r))
- return [r for r in subset if r in ps]
+ return subset & ps
def parentspec(repo, subset, x, n):
"""``set^0``
@@ -1163,7 +1182,7 @@
raise error.ParseError(_("^ expects a number 0, 1, or 2"))
ps = set()
cl = repo.changelog
- for r in getset(repo, cl, x):
+ for r in getset(repo, baseset(cl), x):
if n == 0:
ps.add(r)
elif n == 1:
@@ -1172,7 +1191,7 @@
parents = cl.parentrevs(r)
if len(parents) > 1:
ps.add(parents[1])
- return [r for r in subset if r in ps]
+ return subset & ps
def present(repo, subset, x):
"""``present(set)``
@@ -1186,7 +1205,7 @@
try:
return getset(repo, subset, x)
except error.RepoLookupError:
- return []
+ return baseset([])
def public(repo, subset, x):
"""``public()``
@@ -1194,7 +1213,7 @@
# i18n: "public" is a keyword
getargs(x, 0, 0, _("public takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.public]
+ return lazyset(subset, lambda r: pc.phase(repo, r) == phases.public)
def remote(repo, subset, x):
"""``remote([id [,path]])``
@@ -1228,8 +1247,8 @@
if n in repo:
r = repo[n].rev()
if r in subset:
- return [r]
- return []
+ return baseset([r])
+ return baseset([])
def removes(repo, subset, x):
"""``removes(pattern)``
@@ -1255,7 +1274,7 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_("rev expects a number"))
- return [r for r in subset if r == l]
+ return baseset([r for r in subset if r == l])
def matching(repo, subset, x):
"""``matching(revision [, field])``
@@ -1285,7 +1304,7 @@
# i18n: "matching" is a keyword
l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
- revs = getset(repo, repo.changelog, l[0])
+ revs = getset(repo, baseset(repo.changelog), l[0])
fieldlist = ['metadata']
if len(l) > 1:
@@ -1356,26 +1375,24 @@
# is only one field to match)
getinfo = lambda r: [f(r) for f in getfieldfuncs]
- matches = set()
- for rev in revs:
- target = getinfo(rev)
- for r in subset:
+ def matches(x):
+ for rev in revs:
+ target = getinfo(rev)
match = True
for n, f in enumerate(getfieldfuncs):
- if target[n] != f(r):
+ if target[n] != f(x):
match = False
- break
if match:
- matches.add(r)
- return [r for r in subset if r in matches]
+ return True
+ return False
+
+ return lazyset(subset, matches)
def reverse(repo, subset, x):
"""``reverse(set)``
Reverse order of set.
"""
l = getset(repo, subset, x)
- if not isinstance(l, list):
- l = list(l)
l.reverse()
return l
@@ -1383,10 +1400,10 @@
"""``roots(set)``
Changesets in set with no parent changeset in set.
"""
- s = set(getset(repo, repo.changelog, x))
- subset = [r for r in subset if r in s]
+ s = getset(repo, baseset(repo.changelog), x).set()
+ subset = baseset([r for r in subset if r in s])
cs = _children(repo, subset, s)
- return [r for r in subset if r not in cs]
+ return subset - cs
def secret(repo, subset, x):
"""``secret()``
@@ -1394,7 +1411,7 @@
# i18n: "secret" is a keyword
getargs(x, 0, 0, _("secret takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.secret]
+ return lazyset(subset, lambda x: pc.phase(repo, x) == phases.secret)
def sort(repo, subset, x):
"""``sort(set[, [-]key...])``
@@ -1450,7 +1467,7 @@
e.append(r)
l.append(e)
l.sort()
- return [e[-1] for e in l]
+ return baseset([e[-1] for e in l])
def _stringmatcher(pattern):
"""
@@ -1519,7 +1536,7 @@
s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
else:
s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
- return [r for r in subset if r in s]
+ return subset & s
def tagged(repo, subset, x):
return tag(repo, subset, x)
@@ -1531,7 +1548,7 @@
# i18n: "unstable" is a keyword
getargs(x, 0, 0, _("unstable takes no arguments"))
unstables = obsmod.getrevs(repo, 'unstable')
- return [r for r in subset if r in unstables]
+ return subset & unstables
def user(repo, subset, x):
@@ -1548,11 +1565,10 @@
def _list(repo, subset, x):
s = getstring(x, "internal error")
if not s:
- return []
- if not isinstance(subset, set):
- subset = set(subset)
+ return baseset([])
ls = [repo[r].rev() for r in s.split('\0')]
- return [r for r in ls if r in subset]
+ s = subset.set()
+ return baseset([r for r in ls if r in s])
symbols = {
"adds": adds,
@@ -1598,6 +1614,7 @@
"max": maxrev,
"merge": merge,
"min": minrev,
+ "_missingancestors": _missingancestors,
"modifies": modifies,
"obsolete": obsolete,
"origin": origin,
@@ -1667,6 +1684,7 @@
"max",
"merge",
"min",
+ "_missingancestors",
"modifies",
"obsolete",
"origin",
@@ -1733,7 +1751,24 @@
elif op == 'and':
wa, ta = optimize(x[1], True)
wb, tb = optimize(x[2], True)
+
+ # (::x and not ::y)/(not ::y and ::x) have a fast path
+ def ismissingancestors(revs, bases):
+ return (
+ revs[0] == 'func'
+ and getstring(revs[1], _('not a symbol')) == 'ancestors'
+ and bases[0] == 'not'
+ and bases[1][0] == 'func'
+ and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
+
w = min(wa, wb)
+ if ismissingancestors(ta, tb):
+ return w, ('func', ('symbol', '_missingancestors'),
+ ('list', ta[2], tb[1][2]))
+ if ismissingancestors(tb, ta):
+ return w, ('func', ('symbol', '_missingancestors'),
+ ('list', tb[2], ta[1][2]))
+
if wa > wb:
return w, (op, tb, ta)
return w, (op, ta, tb)
@@ -1917,7 +1952,9 @@
tree = findaliases(ui, tree)
weight, tree = optimize(tree, True)
def mfunc(repo, subset):
- return getset(repo, subset, tree)
+ if util.safehasattr(subset, 'set'):
+ return getset(repo, subset, tree)
+ return getset(repo, baseset(subset), tree)
return mfunc
def formatspec(expr, *args):
@@ -2046,5 +2083,162 @@
funcs.add(tree[1][1])
return funcs
+class baseset(list):
+ """Basic data structure that represents a revset and contains the basic
+ operation that it should be able to perform.
+ """
+ def __init__(self, data):
+ super(baseset, self).__init__(data)
+ self._set = None
+
+ def set(self):
+ if not self._set:
+ self._set = set(self)
+ return self._set
+
+ def __sub__(self, x):
+ if isinstance(x, baseset):
+ s = x.set()
+ else:
+ s = set(x)
+ return baseset(self.set() - s)
+
+ def __and__(self, x):
+ if isinstance(x, baseset):
+ x = x.set()
+ return baseset([y for y in self if y in x])
+
+ def __add__(self, x):
+ s = self.set()
+ l = [r for r in x if r not in s]
+ return baseset(list(self) + l)
+
+class lazyset(object):
+ """Duck type for baseset class which iterates lazily over the revisions in
+ the subset and contains a function which tests for membership in the
+ revset
+ """
+ def __init__(self, subset, condition):
+ self._subset = subset
+ self._condition = condition
+ self._cache = {}
+
+ def __contains__(self, x):
+ c = self._cache
+ if x not in c:
+ c[x] = x in self._subset and self._condition(x)
+ return c[x]
+
+ def __iter__(self):
+ cond = self._condition
+ for x in self._subset:
+ if cond(x):
+ yield x
+
+ def __and__(self, x):
+ return lazyset(self, lambda r: r in x)
+
+ def __sub__(self, x):
+ return lazyset(self, lambda r: r not in x)
+
+ def __add__(self, x):
+ l = baseset([r for r in self])
+ return l + baseset(x)
+
+ def __len__(self):
+ # Basic implementation to be changed in future patches.
+ l = baseset([r for r in self])
+ return len(l)
+
+ def __getitem__(self, x):
+ # Basic implementation to be changed in future patches.
+ l = baseset([r for r in self])
+ return l[x]
+
+ def sort(self, reverse=False):
+ # Basic implementation to be changed in future patches.
+ self._subset = baseset(self._subset)
+ self._subset.sort(reverse=reverse)
+
+ def reverse(self):
+ self._subset.reverse()
+
+ def set(self):
+ return set([r for r in self])
+
+class spanset(object):
+ """Duck type for baseset class which represents a range of revisions and
+ can work lazily and without having all the range in memory
+ """
+ def __init__(self, repo, start=0, end=None):
+ self._start = start
+ if end is not None:
+ self._end = end
+ else:
+ self._end = len(repo)
+ self._hiddenrevs = repo.changelog.filteredrevs
+
+ def _contained(self, rev):
+ return (rev <= self._start and rev > self._end) or (rev >= self._start
+ and rev < self._end)
+
+ def __iter__(self):
+ if self._start <= self._end:
+ iterrange = xrange(self._start, self._end)
+ else:
+ iterrange = xrange(self._start, self._end, -1)
+
+ if self._hiddenrevs:
+ s = self._hiddenrevs
+ for r in iterrange:
+ if r not in s:
+ yield r
+ else:
+ for r in iterrange:
+ yield r
+
+ def __contains__(self, x):
+ return self._contained(x) and not (self._hiddenrevs and rev in
+ self._hiddenrevs)
+
+ def __and__(self, x):
+ return lazyset(self, lambda r: r in x)
+
+ def __sub__(self, x):
+ return lazyset(self, lambda r: r not in x)
+
+ def __add__(self, x):
+ l = baseset(self)
+ return l + baseset(x)
+
+ def __len__(self):
+ if not self._hiddenrevs:
+ return abs(self._end - self._start)
+ else:
+ count = 0
+ for rev in self._hiddenrevs:
+ if self._contained(rev):
+ count += 1
+ return abs(self._end - self._start) - count
+
+ def __getitem__(self, x):
+ # Basic implementation to be changed in future patches.
+ l = baseset([r for r in self])
+ return l[x]
+
+ def sort(self, reverse=False):
+ # Basic implementation to be changed in future patches.
+ if reverse:
+ self.reverse()
+
+ def reverse(self):
+ if self._start <= self._end:
+ self._start, self._end = self._end - 1, self._start - 1
+ else:
+ self._start, self._end = self._end + 1, self._start + 1
+
+ def set(self):
+ return self
+
# tell hggettext to extract docstrings from these functions:
i18nfunctions = symbols.values()
--- a/mercurial/scmutil.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/scmutil.py Wed Feb 19 16:46:47 2014 -0600
@@ -20,6 +20,16 @@
systemrcpath = scmplatform.systemrcpath
userrcpath = scmplatform.userrcpath
+def itersubrepos(ctx1, ctx2):
+ """find subrepos in ctx1 or ctx2"""
+ # Create a (subpath, ctx) mapping where we prefer subpaths from
+ # ctx1. The subpaths from ctx2 are important when the .hgsub file
+ # has been modified (in ctx2) but not yet committed (in ctx1).
+ subpaths = dict.fromkeys(ctx2.substate, ctx2)
+ subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
+ for subpath, ctx in sorted(subpaths.iteritems()):
+ yield subpath, ctx.sub(subpath)
+
def nochangesfound(ui, repo, excluded=None):
'''Report no changes for push/pull, excluded is None or a list of
nodes excluded from the push/pull.
@@ -524,11 +534,11 @@
# fall through to new-style queries if old-style fails
m = revset.match(repo.ui, spec)
- dl = [r for r in m(repo, list(repo)) if r not in seen]
+ dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
l.extend(dl)
seen.update(dl)
- return l
+ return revset.baseset(l)
def expandpats(pats):
if not util.expandglobs:
--- a/mercurial/subrepo.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/subrepo.py Wed Feb 19 16:46:47 2014 -0600
@@ -326,16 +326,6 @@
os.unlink(os.path.join(dirname, f))
os.walk(path, v, None)
-def itersubrepos(ctx1, ctx2):
- """find subrepos in ctx1 or ctx2"""
- # Create a (subpath, ctx) mapping where we prefer subpaths from
- # ctx1. The subpaths from ctx2 are important when the .hgsub file
- # has been modified (in ctx2) but not yet committed (in ctx1).
- subpaths = dict.fromkeys(ctx2.substate, ctx2)
- subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
- for subpath, ctx in sorted(subpaths.iteritems()):
- yield subpath, ctx.sub(subpath)
-
def subrepo(ctx, path):
"""return instance of the right subrepo class for subrepo in path"""
# subrepo inherently violates our import layering rules
--- a/mercurial/templatekw.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/templatekw.py Wed Feb 19 16:46:47 2014 -0600
@@ -195,8 +195,12 @@
""":bookmarks: List of strings. Any bookmarks associated with the
changeset.
"""
+ repo = args['ctx']._repo
bookmarks = args['ctx'].bookmarks()
- return showlist('bookmark', bookmarks, **args)
+ hybrid = showlist('bookmark', bookmarks, **args)
+ for value in hybrid.values:
+ value['current'] = repo._bookmarkcurrent
+ return hybrid
def showchildren(**args):
""":children: List of strings. The children of the changeset."""
--- a/mercurial/templater.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/templater.py Wed Feb 19 16:46:47 2014 -0600
@@ -7,7 +7,7 @@
from i18n import _
import sys, os, re
-import util, config, templatefilters, parser, error
+import util, config, templatefilters, templatekw, parser, error
import types
import minirst
@@ -245,6 +245,31 @@
return templatefilters.fill(text, width, initindent, hangindent)
+def pad(context, mapping, args):
+ """usage: pad(text, width, fillchar=' ', right=False)
+ """
+ if not (2 <= len(args) <= 4):
+ raise error.ParseError(_("pad() expects two to four arguments"))
+
+ width = int(args[1][1])
+
+ text = stringify(args[0][0](context, mapping, args[0][1]))
+ if args[0][0] == runstring:
+ text = stringify(runtemplate(context, mapping,
+ compiletemplate(text, context)))
+
+ right = False
+ fillchar = ' '
+ if len(args) > 2:
+ fillchar = stringify(args[2][0](context, mapping, args[2][1]))
+ if len(args) > 3:
+ right = util.parsebool(args[3][1])
+
+ if right:
+ return text.rjust(width, fillchar)
+ else:
+ return text.ljust(width, fillchar)
+
def get(context, mapping, args):
if len(args) != 2:
# i18n: "get" is a keyword
@@ -276,6 +301,19 @@
elif len(args) == 3:
yield _evalifliteral(args[2], context, mapping)
+def ifcontains(context, mapping, args):
+ if not (3 <= len(args) <= 4):
+ # i18n: "ifcontains" is a keyword
+ raise error.ParseError(_("ifcontains expects three or four arguments"))
+
+ item = stringify(args[0][0](context, mapping, args[0][1]))
+ items = args[1][0](context, mapping, args[1][1])
+
+ if item in items:
+ yield _evalifliteral(args[2], context, mapping)
+ elif len(args) == 4:
+ yield _evalifliteral(args[3], context, mapping)
+
def ifeq(context, mapping, args):
if not (3 <= len(args) <= 4):
# i18n: "ifeq" is a keyword
@@ -318,6 +356,32 @@
# ignore args[0] (the label string) since this is supposed to be a a no-op
yield _evalifliteral(args[1], context, mapping)
+def revset(context, mapping, args):
+ """usage: revset(query[, formatargs...])
+ """
+ if not len(args) > 0:
+ # i18n: "revset" is a keyword
+ raise error.ParseError(_("revset expects one or more arguments"))
+
+ raw = args[0][1]
+ ctx = mapping['ctx']
+ repo = ctx._repo
+
+ if len(args) > 1:
+ formatargs = list([a[0](context, mapping, a[1]) for a in args[1:]])
+ revs = repo.revs(raw, *formatargs)
+ revs = list([str(r) for r in revs])
+ else:
+ revsetcache = mapping['cache'].setdefault("revsetcache", {})
+ if raw in revsetcache:
+ revs = revsetcache[raw]
+ else:
+ revs = repo.revs(raw)
+ revs = list([str(r) for r in revs])
+ revsetcache[raw] = revs
+
+ return templatekw.showlist("revision", revs, **mapping)
+
def rstdoc(context, mapping, args):
if len(args) != 2:
# i18n: "rstdoc" is a keyword
@@ -328,6 +392,52 @@
return minirst.format(text, style=style, keep=['verbose'])
+def shortest(context, mapping, args):
+ """usage: shortest(node, minlength=4)
+ """
+ if not (1 <= len(args) <= 2):
+ raise error.ParseError(_("shortest() expects one or two arguments"))
+
+ node = stringify(args[0][0](context, mapping, args[0][1]))
+
+ minlength = 4
+ if len(args) > 1:
+ minlength = int(args[1][1])
+
+ cl = mapping['ctx']._repo.changelog
+ def isvalid(test):
+ try:
+ try:
+ cl.index.partialmatch(test)
+ except AttributeError:
+ # Pure mercurial doesn't support partialmatch on the index.
+ # Fallback to the slow way.
+ if cl._partialmatch(test) is None:
+ return False
+
+ try:
+ int(test)
+ return False
+ except ValueError:
+ return True
+ except error.RevlogError:
+ return False
+
+ shortest = node
+ startlength = max(6, minlength)
+ length = startlength
+ while True:
+ test = node[:length]
+ if isvalid(test):
+ shortest = test
+ if length == minlength or length > startlength:
+ return shortest
+ length -= 1
+ else:
+ length += 1
+ if len(shortest) <= length:
+ return shortest
+
def strip(context, mapping, args):
if not (1 <= len(args) <= 2):
raise error.ParseError(_("strip expects one or two arguments"))
@@ -365,10 +475,14 @@
"fill": fill,
"get": get,
"if": if_,
+ "ifcontains": ifcontains,
"ifeq": ifeq,
"join": join,
"label": label,
+ "pad": pad,
+ "revset": revset,
"rstdoc": rstdoc,
+ "shortest": shortest,
"strip": strip,
"sub": sub,
}
--- a/mercurial/transaction.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/transaction.py Wed Feb 19 16:46:47 2014 -0600
@@ -174,7 +174,10 @@
lines = fp.readlines()
fp.close()
for l in lines:
- f, o = l.split('\0')
- entries.append((f, int(o), None))
+ try:
+ f, o = l.split('\0')
+ entries.append((f, int(o), None))
+ except ValueError:
+ report(_("couldn't read journal entry %r!\n") % l)
_playback(file, report, opener, entries)
--- a/mercurial/win32.py Wed Feb 19 22:19:45 2014 +0900
+++ b/mercurial/win32.py Wed Feb 19 16:46:47 2014 -0600
@@ -24,6 +24,7 @@
# GetLastError
_ERROR_SUCCESS = 0
+_ERROR_SHARING_VIOLATION = 32
_ERROR_INVALID_PARAMETER = 87
_ERROR_INSUFFICIENT_BUFFER = 122
@@ -59,7 +60,9 @@
_OPEN_EXISTING = 3
+_FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
_FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+_FILE_FLAG_DELETE_ON_CLOSE = 0x04000000
# SetFileAttributes
_FILE_ATTRIBUTE_NORMAL = 0x80
@@ -119,6 +122,27 @@
_STD_ERROR_HANDLE = _DWORD(-12).value
+# CreateToolhelp32Snapshot, Process32First, Process32Next
+_TH32CS_SNAPPROCESS = 0x00000002
+_MAX_PATH = 260
+
+class _tagPROCESSENTRY32(ctypes.Structure):
+ _fields_ = [('dwsize', _DWORD),
+ ('cntUsage', _DWORD),
+ ('th32ProcessID', _DWORD),
+ ('th32DefaultHeapID', ctypes.c_void_p),
+ ('th32ModuleID', _DWORD),
+ ('cntThreads', _DWORD),
+ ('th32ParentProcessID', _DWORD),
+ ('pcPriClassBase', _LONG),
+ ('dwFlags', _DWORD),
+ ('szExeFile', ctypes.c_char * _MAX_PATH)]
+
+ def __init__(self):
+ super(_tagPROCESSENTRY32, self).__init__()
+ self.dwsize = ctypes.sizeof(self)
+
+
# types of parameters of C functions used (required by pypy)
_kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
@@ -186,6 +210,15 @@
_user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM]
_user32.EnumWindows.restype = _BOOL
+_kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD]
+_kernel32.CreateToolhelp32Snapshot.restype = _BOOL
+
+_kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.Process32First.restype = _BOOL
+
+_kernel32.Process32Next.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.Process32Next.restype = _BOOL
+
def _raiseoserror(name):
err = ctypes.WinError()
raise OSError(err.errno, '%s: %s' % (name, err.strerror))
@@ -309,6 +342,51 @@
width = csbi.srWindow.Right - csbi.srWindow.Left
return width
+def _1stchild(pid):
+ '''return the 1st found child of the given pid
+
+ None is returned when no child is found'''
+ pe = _tagPROCESSENTRY32()
+
+ # create handle to list all processes
+ ph = _kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0)
+ if ph == _INVALID_HANDLE_VALUE:
+ raise ctypes.WinError
+ try:
+ r = _kernel32.Process32First(ph, ctypes.byref(pe))
+ # loop over all processes
+ while r:
+ if pe.th32ParentProcessID == pid:
+ # return first child found
+ return pe.th32ProcessID
+ r = _kernel32.Process32Next(ph, ctypes.byref(pe))
+ finally:
+ _kernel32.CloseHandle(ph)
+ if _kernel32.GetLastError() != _ERROR_NO_MORE_FILES:
+ raise ctypes.WinError
+ return None # no child found
+
+class _tochildpid(int): # pid is _DWORD, which always matches in an int
+ '''helper for spawndetached, returns the child pid on conversion to string
+
+ Does not resolve the child pid immediately because the child may not yet be
+ started.
+ '''
+ def childpid(self):
+ '''returns the child pid of the first found child of the process
+ with this pid'''
+ return _1stchild(self)
+ def __str__(self):
+ # run when the pid is written to the file
+ ppid = self.childpid()
+ if ppid is None:
+ # race, child has exited since check
+ # fall back to this pid. Its process will also have disappeared,
+ # raising the same error type later as when the child pid would
+ # be returned.
+ return " %d" % self
+ return str(ppid)
+
def spawndetached(args):
# No standard library function really spawns a fully detached
# process under win32 because they allocate pipes or other objects
@@ -339,16 +417,24 @@
if not res:
raise ctypes.WinError
- return pi.dwProcessId
+ # _tochildpid because the process is the child of COMSPEC
+ return _tochildpid(pi.dwProcessId)
def unlink(f):
'''try to implement POSIX' unlink semantics on Windows'''
- if os.path.isdir(f):
- # use EPERM because it is POSIX prescribed value, even though
- # unlink(2) on directories returns EISDIR on Linux
- raise IOError(errno.EPERM,
- "Unlinking directory not permitted: '%s'" % f)
+ # If we can open f exclusively, no other processes must have open handles
+ # for it and we can expect its name will be deleted immediately when we
+ # close the handle unless we have another in the same process. We also
+ # expect we shall simply fail to open f if it is a directory.
+ fh = _kernel32.CreateFileA(f, 0, 0, None, _OPEN_EXISTING,
+ _FILE_FLAG_OPEN_REPARSE_POINT | _FILE_FLAG_DELETE_ON_CLOSE, None)
+ if fh != _INVALID_HANDLE_VALUE:
+ _kernel32.CloseHandle(fh)
+ return
+ error = _kernel32.GetLastError()
+ if error != _ERROR_SHARING_VIOLATION:
+ raise ctypes.WinError(error)
# POSIX allows to unlink and rename open files. Windows has serious
# problems with doing that:
--- a/tests/hghave.py Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/hghave.py Wed Feb 19 16:46:47 2014 -0600
@@ -248,6 +248,9 @@
except ImportError:
return False
+def has_python243():
+ return sys.version_info >= (2, 4, 3)
+
def has_outer_repo():
# failing for other reasons than 'no repo' imply that there is a repo
return not matchoutput('hg root 2>&1',
@@ -320,6 +323,7 @@
"p4": (has_p4, "Perforce server and client"),
"pyflakes": (has_pyflakes, "Pyflakes python linter"),
"pygments": (has_pygments, "Pygments source highlighting library"),
+ "python243": (has_python243, "python >= 2.4.3"),
"root": (has_root, "root permissions"),
"serve": (has_serve, "platform and python can manage 'hg serve -d'"),
"ssl": (has_ssl, "python >= 2.6 ssl module and python OpenSSL"),
--- a/tests/killdaemons.py Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/killdaemons.py Wed Feb 19 16:46:47 2014 -0600
@@ -4,13 +4,51 @@
if os.name =='nt':
import ctypes
+
+ def _check(ret, expectederr=None):
+ if ret == 0:
+ winerrno = ctypes.GetLastError()
+ if winerrno == expectederr:
+ return True
+ raise ctypes.WinError(winerrno)
+
def kill(pid, logfn, tryhard=True):
logfn('# Killing daemon process %d' % pid)
PROCESS_TERMINATE = 1
+ PROCESS_QUERY_INFORMATION = 0x400
+ SYNCHRONIZE = 0x00100000L
+ WAIT_OBJECT_0 = 0
+ WAIT_TIMEOUT = 258
handle = ctypes.windll.kernel32.OpenProcess(
- PROCESS_TERMINATE, False, pid)
- ctypes.windll.kernel32.TerminateProcess(handle, -1)
- ctypes.windll.kernel32.CloseHandle(handle)
+ PROCESS_TERMINATE|SYNCHRONIZE|PROCESS_QUERY_INFORMATION,
+ False, pid)
+ if handle == 0:
+ _check(0, 87) # err 87 when process not found
+ return # process not found, already finished
+ try:
+ r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100)
+ if r == WAIT_OBJECT_0:
+ pass # terminated, but process handle still available
+ elif r == WAIT_TIMEOUT:
+ _check(ctypes.windll.kernel32.TerminateProcess(handle, -1))
+ else:
+ _check(r)
+
+ # TODO?: forcefully kill when timeout
+ # and ?shorter waiting time? when tryhard==True
+ r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100)
+ # timeout = 100 ms
+ if r == WAIT_OBJECT_0:
+ pass # process is terminated
+ elif r == WAIT_TIMEOUT:
+ logfn('# Daemon process %d is stuck')
+ else:
+ check(r) # any error
+ except: #re-raises
+ ctypes.windll.kernel32.CloseHandle(handle) # no _check, keep error
+ raise
+ _check(ctypes.windll.kernel32.CloseHandle(handle))
+
else:
def kill(pid, logfn, tryhard=True):
try:
--- a/tests/run-tests.py Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/run-tests.py Wed Feb 19 16:46:47 2014 -0600
@@ -776,9 +776,9 @@
# Merge the script output back into a unified test
+ warnonly = True
pos = -1
postout = []
- ret = 0
for l in output:
lout, lcmd = l, None
if salt in l:
@@ -797,11 +797,10 @@
if isinstance(r, str):
if r == '+glob':
lout = el[:-1] + ' (glob)\n'
- r = False
+ r = 0 # warn only
elif r == '-glob':
- log('\ninfo, unnecessary glob in %s (after line %d):'
- ' %s (glob)\n' % (test, pos, el[-1]))
- r = True # pass on unnecessary glob
+ lout = ''.join(el.rsplit(' (glob)', 1))
+ r = 0 # warn only
else:
log('\ninfo, unknown linematch result: %r\n' % r)
r = False
@@ -811,6 +810,8 @@
if needescape(lout):
lout = stringescape(lout.rstrip('\n')) + " (esc)\n"
postout.append(" " + lout) # let diff deal with it
+ if r != 0: # != warn only
+ warnonly = False
if lcmd:
# add on last return code
@@ -825,6 +826,8 @@
if pos in after:
postout += after.pop(pos)
+ if warnonly and exitcode == 0:
+ exitcode = False
return exitcode, postout
wifexited = getattr(os, "WIFEXITED", lambda x: False)
@@ -882,8 +885,9 @@
return 's', test, msg
def fail(msg, ret):
+ warned = ret is False
if not options.nodiff:
- log("\nERROR: %s %s" % (testpath, msg))
+ log("\n%s: %s %s" % (warned and 'Warning' or 'ERROR', test, msg))
if (not ret and options.interactive
and os.path.exists(testpath + ".err")):
iolock.acquire()
@@ -896,7 +900,7 @@
else:
rename(testpath + ".err", testpath + ".out")
return '.', test, ''
- return '!', test, msg
+ return warned and '~' or '!', test, msg
def success():
return '.', test, ''
@@ -1075,7 +1079,7 @@
' (expected %s)\n'
% (verb, actualhg, expecthg))
-results = {'.':[], '!':[], 's':[], 'i':[]}
+results = {'.':[], '!':[], '~': [], 's':[], 'i':[]}
times = []
iolock = threading.Lock()
abort = False
@@ -1139,7 +1143,8 @@
scheduletests(options, tests)
failed = len(results['!'])
- tested = len(results['.']) + failed
+ warned = len(results['~'])
+ tested = len(results['.']) + failed + warned
skipped = len(results['s'])
ignored = len(results['i'])
@@ -1147,11 +1152,13 @@
if not options.noskips:
for s in results['s']:
print "Skipped %s: %s" % s
+ for s in results['~']:
+ print "Warned %s: %s" % s
for s in results['!']:
print "Failed %s: %s" % s
_checkhglib("Tested")
- print "# Ran %d tests, %d skipped, %d failed." % (
- tested, skipped + ignored, failed)
+ print "# Ran %d tests, %d skipped, %d warned, %d failed." % (
+ tested, skipped + ignored, warned, failed)
if results['!']:
print 'python hash seed:', os.environ['PYTHONHASHSEED']
if options.time:
@@ -1164,7 +1171,9 @@
print "\ninterrupted!"
if failed:
- sys.exit(1)
+ return 1
+ if warned:
+ return 80
testtypes = [('.py', pytest, '.out'),
('.t', tsttest, '')]
@@ -1255,8 +1264,9 @@
# Include TESTDIR in PYTHONPATH so that out-of-tree extensions
# can run .../tests/run-tests.py test-foo where test-foo
- # adds an extension to HGRC
- pypath = [PYTHONDIR, TESTDIR]
+ # adds an extension to HGRC. Also include run-test.py directory to import
+ # modules like heredoctest.
+ pypath = [PYTHONDIR, TESTDIR, os.path.abspath(os.path.dirname(__file__))]
# We have to augment PYTHONPATH, rather than simply replacing
# it, in case external libraries are only available via current
# PYTHONPATH. (In particular, the Subversion bindings on OS X
@@ -1274,7 +1284,7 @@
vlog("# Using", IMPL_PATH, os.environ[IMPL_PATH])
try:
- runtests(options, tests)
+ sys.exit(runtests(options, tests) or 0)
finally:
time.sleep(.1)
cleanup(options)
--- a/tests/test-command-template.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-command-template.t Wed Feb 19 16:46:47 2014 -0600
@@ -1626,3 +1626,60 @@
$ hg log -r 0 --template '{if(branches, "yes", "no")}\n'
no
+
+Test shortest(node) function:
+
+ $ echo b > b
+ $ hg ci -qAm b
+ $ hg log --template '{shortest(node)}\n'
+ d97c
+ f776
+ $ hg log --template '{shortest(node, 10)}\n'
+ d97c383ae3
+ f7769ec2ab
+
+Test pad function
+
+ $ hg log --template '{pad(rev, 20)} {author|user}\n'
+ 1 test
+ 0 test
+
+ $ hg log --template '{pad(rev, 20, " ", True)} {author|user}\n'
+ 1 test
+ 0 test
+
+ $ hg log --template '{pad(rev, 20, "-", False)} {author|user}\n'
+ 1------------------- test
+ 0------------------- test
+
+Test ifcontains function
+
+ $ hg log --template '{rev} {ifcontains("a", file_adds, "added a", "did not add a")}\n'
+ 1 did not add a
+ 0 added a
+
+Test revset function
+
+ $ hg log --template '{rev} {ifcontains(rev, revset("."), "current rev", "not current rev")}\n'
+ 1 current rev
+ 0 not current rev
+
+ $ hg log --template '{rev} Parents: {revset("parents(%s)", rev)}\n'
+ 1 Parents: 0
+ 0 Parents:
+
+ $ hg log --template 'Rev: {rev}\n{revset("::%s", rev) % "Ancestor: {revision}\n"}\n'
+ Rev: 1
+ Ancestor: 0
+ Ancestor: 1
+
+ Rev: 0
+ Ancestor: 0
+
+Test current bookmark templating
+
+ $ hg book foo
+ $ hg book bar
+ $ hg log --template "{rev} {bookmarks % '{bookmark}{ifeq(bookmark, current, \"*\")} '}\n"
+ 1 bar* foo
+ 0
--- a/tests/test-commandserver.py Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-commandserver.py Wed Feb 19 16:46:47 2014 -0600
@@ -267,7 +267,10 @@
runcommand(server, ['up', 'null'])
runcommand(server, ['phase', '-df', 'tip'])
- os.system('hg debugobsolete `hg log -r tip --template {node}`')
+ cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
+ if os.name == 'nt':
+ cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
+ os.system(cmd)
runcommand(server, ['log', '--hidden'])
runcommand(server, ['log'])
--- a/tests/test-completion.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-completion.t Wed Feb 19 16:46:47 2014 -0600
@@ -250,7 +250,7 @@
debugrebuilddirstate: rev
debugrename: rev
debugrevlog: changelog, manifest, dump
- debugrevspec:
+ debugrevspec: optimize
debugsetparents:
debugsub: rev
debugsuccessorssets:
--- a/tests/test-convert-hg-sink.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-convert-hg-sink.t Wed Feb 19 16:46:47 2014 -0600
@@ -119,7 +119,7 @@
0 add baz
$ cd new-filemap
$ hg tags
- tip 2:6f4fd1df87fb
+ tip 2:3c74706b1ff8
some-tag 0:ba8636729451
$ cd ..
--- a/tests/test-convert-hg-svn.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-convert-hg-svn.t Wed Feb 19 16:46:47 2014 -0600
@@ -103,3 +103,14 @@
scanning source...
sorting...
converting...
+
+verify which shamap format we are storing and must be able to handle
+
+ $ cat svn-repo-hg/.hg/shamap
+ svn:????????-????-????-????-????????????@1 ???????????????????????????????????????? (glob)
+ svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob)
+ svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob)
+ $ cat svn-repo-wc/.svn/hg-shamap
+ ???????????????????????????????????????? 1 (glob)
+ ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob)
+ ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob)
--- a/tests/test-convert-svn-source.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-convert-svn-source.t Wed Feb 19 16:46:47 2014 -0600
@@ -198,11 +198,12 @@
extra: convert_revision=svn:........-....-....-....-............/proj B/mytrunk@1 (re)
$ cd ..
-Test converting empty heads (issue3347)
+Test converting empty heads (issue3347).
+Also tests getting logs directly without debugsvnlog.
$ svnadmin create svn-empty
$ svnadmin load -q svn-empty < "$TESTDIR/svn/empty.svndump"
- $ hg --config convert.svn.trunk= convert svn-empty
+ $ hg --config convert.svn.trunk= --config convert.svn.debugsvnlog=0 convert svn-empty
assuming destination svn-empty-hg
initializing destination svn-empty-hg repository
scanning source...
--- a/tests/test-convert.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-convert.t Wed Feb 19 16:46:47 2014 -0600
@@ -121,6 +121,14 @@
can be used to (for instance) move code in one repository from "default"
to a named branch.
+ The closemap is a file that allows closing of a branch. This is useful if
+ you want to close a branch. Each entry contains a revision or hash
+ separated by white space.
+
+ The tagpmap is a file that exactly analogous to the branchmap. This will
+ rename tags on the fly and prevent the 'update tags' commit usually found
+ at the end of a convert process.
+
Mercurial Source
################
@@ -266,6 +274,8 @@
--filemap FILE remap file names using contents of file
--splicemap FILE splice synthesized history into place
--branchmap FILE change branch names while converting
+ --closemap FILE closes given revs
+ --tagmap FILE change tag names while converting
--branchsort try to sort changesets by branches
--datesort try to sort changesets by date
--sourcesort preserve source changesets order
--- a/tests/test-debugcommands.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-debugcommands.t Wed Feb 19 16:46:47 2014 -0600
@@ -33,15 +33,16 @@
> dst('hello world')
> def g():
> f()
+ > sys.stderr.flush()
> debugstacktrace(skip=-5, f=sys.stdout)
> g()
> EOF
$ python debugstacktrace.py
hello world at:
- debugstacktrace.py:7 in * (glob)
+ debugstacktrace.py:8 in * (glob)
debugstacktrace.py:5 in g
debugstacktrace.py:3 in f
stacktrace at:
- debugstacktrace.py:7 *in * (glob)
- debugstacktrace.py:6 *in g (glob)
+ debugstacktrace.py:8 *in * (glob)
+ debugstacktrace.py:7 *in g (glob)
*/util.py:* in debugstacktrace (glob)
--- a/tests/test-doctest.py Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-doctest.py Wed Feb 19 16:46:47 2014 -0600
@@ -27,3 +27,4 @@
testmod('mercurial.util', testtarget='platform')
testmod('hgext.convert.cvsps')
testmod('hgext.convert.filemap')
+testmod('hgext.convert.subversion')
--- a/tests/test-gendoc.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-gendoc.t Wed Feb 19 16:46:47 2014 -0600
@@ -3,7 +3,7 @@
$ "$TESTDIR/hghave" docutils || exit 80
$ HGENCODING=UTF-8
$ export HGENCODING
- $ { echo C; find "$TESTDIR/../i18n" -name "*.po" | sort; } | while read PO; do
+ $ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do
> LOCALE=`basename "$PO" .po`
> echo
> echo "% extracting documentation from $LOCALE"
--- a/tests/test-histedit-arguments.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-histedit-arguments.t Wed Feb 19 16:46:47 2014 -0600
@@ -51,10 +51,12 @@
# Edit history between eb57da33312f and 08d98a8350f3
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/tests/test-histedit-bookmark-motion.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-histedit-bookmark-motion.t Wed Feb 19 16:46:47 2014 -0600
@@ -67,10 +67,12 @@
# Edit history between d2ae7f538514 and 652413bf663e
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
@@ -125,10 +127,12 @@
# Edit history between b346ab9a313d and cacdfd884a93
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/tests/test-histedit-commute.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-histedit-commute.t Wed Feb 19 16:46:47 2014 -0600
@@ -61,10 +61,12 @@
# Edit history between 177f92b77385 and 652413bf663e
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/tests/test-histedit-obsolete.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-histedit-obsolete.t Wed Feb 19 16:46:47 2014 -0600
@@ -51,10 +51,12 @@
# Edit history between d2ae7f538514 and 652413bf663e
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/tests/test-histedit-outgoing.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-histedit-outgoing.t Wed Feb 19 16:46:47 2014 -0600
@@ -43,10 +43,12 @@
# Edit history between 055a42cdd887 and 652413bf663e
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
@@ -72,10 +74,12 @@
# Edit history between 2a4042b45417 and 51281e65ba79
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
@@ -93,10 +97,12 @@
# Edit history between f26599ee3441 and f26599ee3441
#
+ # Commits are listed from least to most recent
+ #
# Commands:
# p, pick = use commit
# e, edit = use commit, but stop for amending
- # f, fold = use commit, but fold into previous commit (combines N and N-1)
+ # f, fold = use commit, but combine it with the one above
# d, drop = remove commit from history
# m, mess = edit message without changing commit content
#
--- a/tests/test-http.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-http.t Wed Feb 19 16:46:47 2014 -0600
@@ -153,7 +153,8 @@
> common.permhooks.insert(0, perform_authentication)
> EOT
$ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \
- > --config server.preferuncompressed=True
+ > --config server.preferuncompressed=True \
+ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
$ cat pid >> $DAEMON_PIDS
$ cat << EOF > get_pass.py
@@ -163,6 +164,7 @@
> getpass.getpass = newgetpass
> EOF
+#if python243
$ hg id http://localhost:$HGPORT2/
abort: http authorization required for http://localhost:$HGPORT2/
[255]
@@ -176,6 +178,7 @@
password: 5fed3813f7f5
$ hg id http://user:pass@localhost:$HGPORT2/
5fed3813f7f5
+#endif
$ echo '[auth]' >> .hg/hgrc
$ echo 'l.schemes=http' >> .hg/hgrc
$ echo 'l.prefix=lo' >> .hg/hgrc
@@ -187,6 +190,7 @@
5fed3813f7f5
$ hg id http://user@localhost:$HGPORT2/
5fed3813f7f5
+#if python243
$ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
streaming all changes
7 files to transfer, 916 bytes of data
@@ -201,6 +205,82 @@
abort: HTTP Error 403: no
[255]
+ $ hg -R dest tag -r tip top
+ $ hg -R dest push http://user:pass@localhost:$HGPORT2/
+ pushing to http://user:***@localhost:$HGPORT2/
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+ $ hg rollback -q
+
+ $ cut -c38- ../access.log
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=stream_out HTTP/1.1" 401 -
+ "GET /?cmd=stream_out HTTP/1.1" 200 -
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "POST /?cmd=unbundle HTTP/1.1" 401 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
+ "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
+ "POST /?cmd=pushkey HTTP/1.1" 401 - x-hgarg-1:key=7f4e523d01f2cc3765ac8934da3d14db775ff872&namespace=phases&new=0&old=1
+ "POST /?cmd=pushkey HTTP/1.1" 200 - x-hgarg-1:key=7f4e523d01f2cc3765ac8934da3d14db775ff872&namespace=phases&new=0&old=1
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+
+#endif
$ cd ..
clone of serve with repo in root and unserved subrepo (issue2970)
--- a/tests/test-largefiles-cache.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-largefiles-cache.t Wed Feb 19 16:46:47 2014 -0600
@@ -47,7 +47,7 @@
$ hg update -r0
getting changed largefiles
- large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file://$TESTTMP/mirror (glob)
+ large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file:/*/$TESTTMP/mirror (glob)
0 largefiles updated, 0 removed
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg status
@@ -64,7 +64,7 @@
$ hg update -r0
getting changed largefiles
- large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file://$TESTTMP/mirror (glob)
+ large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file:/*/$TESTTMP/mirror (glob)
0 largefiles updated, 0 removed
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg status
--- a/tests/test-largefiles.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-largefiles.t Wed Feb 19 16:46:47 2014 -0600
@@ -1280,7 +1280,7 @@
$ rm ${USERCACHE}/7838695e10da2bb75ac1156565f40a2595fa2fa0
$ hg up -r 6
getting changed largefiles
- large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file://$TESTTMP/d (glob)
+ large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1 largefiles updated, 2 removed
4 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ rm normal3
@@ -1301,7 +1301,7 @@
! normal3
$ hg up -Cr.
getting changed largefiles
- large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file://$TESTTMP/d (glob)
+ large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
0 largefiles updated, 0 removed
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg st
@@ -1323,7 +1323,7 @@
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
getting changed largefiles
- large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file://$TESTTMP/d (glob)
+ large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1 largefiles updated, 0 removed
$ hg rollback -q
--- a/tests/test-lfconvert.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-lfconvert.t Wed Feb 19 16:46:47 2014 -0600
@@ -342,7 +342,7 @@
$ rm largefiles-repo/.hg/largefiles/*
$ hg lfconvert --to-normal issue3519 normalized3519
initializing destination normalized3519
- large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file://$TESTTMP/largefiles-repo (glob)
+ large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file:/*/$TESTTMP/largefiles-repo (glob)
abort: missing largefile 'large' from revision d4892ec57ce212905215fad1d9018f56b99202ad
[255]
--- a/tests/test-lock-badness.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-lock-badness.t Wed Feb 19 16:46:47 2014 -0600
@@ -1,4 +1,7 @@
-#if unix-permissions no-root
+#if unix-permissions no-root no-windows
+
+Prepare
+
$ hg init a
$ echo a > a/a
$ hg -R a ci -A -m a
@@ -8,10 +11,25 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+One process waiting for another
+
+ $ cat > hooks.py << EOF
+ > import time
+ > def sleepone(**x): time.sleep(1)
+ > def sleephalf(**x): time.sleep(0.5)
+ > EOF
$ echo b > b/b
- $ hg -R b ci -A -m b
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf"
+ waiting for lock on working directory of b held by '*:*' (glob)
+ got lock after 1 seconds
+ warning: ignoring unknown working parent d2ae7f538514!
+ $ wait
+ $ cat stdout
adding b
+Pushing to a local read-only repo that can't be locked
+
$ chmod 100 a/.hg/store
$ hg -R b push a
--- a/tests/test-module-imports.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-module-imports.t Wed Feb 19 16:46:47 2014 -0600
@@ -22,18 +22,20 @@
hidden by deduplication algorithm in the cycle detector, so fixing
these may expose other cycles.
- $ hg locate 'mercurial/**.py' | xargs python "$import_checker"
- mercurial/dispatch.py mixed stdlib and relative imports:
- commands, error, extensions, fancyopts, hg, hook, util
- mercurial/fileset.py mixed stdlib and relative imports:
- error, merge, parser, util
- mercurial/revset.py mixed stdlib and relative imports:
- discovery, error, hbisect, parser, phases, util
- mercurial/templater.py mixed stdlib and relative imports:
- config, error, parser, templatefilters, util
- mercurial/ui.py mixed stdlib and relative imports:
- config, error, formatter, scmutil, util
- Import cycle: mercurial.cmdutil -> mercurial.subrepo -> mercurial.cmdutil
- Import cycle: mercurial.repoview -> mercurial.revset -> mercurial.repoview
- Import cycle: mercurial.fileset -> mercurial.merge -> mercurial.subrepo -> mercurial.match -> mercurial.fileset
- Import cycle: mercurial.filemerge -> mercurial.match -> mercurial.fileset -> mercurial.merge -> mercurial.filemerge
+ $ hg locate 'mercurial/**.py' | sed 's-\\-/-g' | xargs python "$import_checker"
+ mercurial/dispatch.py mixed imports
+ stdlib: commands
+ relative: error, extensions, fancyopts, hg, hook, util
+ mercurial/fileset.py mixed imports
+ stdlib: parser
+ relative: error, merge, util
+ mercurial/revset.py mixed imports
+ stdlib: parser
+ relative: discovery, error, hbisect, phases, util
+ mercurial/templater.py mixed imports
+ stdlib: parser
+ relative: config, error, templatefilters, templatekw, util
+ mercurial/ui.py mixed imports
+ stdlib: formatter
+ relative: config, error, scmutil, util
+ Import cycle: mercurial.cmdutil -> mercurial.context -> mercurial.subrepo -> mercurial.cmdutil -> mercurial.cmdutil
--- a/tests/test-push-warn.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-push-warn.t Wed Feb 19 16:46:47 2014 -0600
@@ -22,6 +22,7 @@
$ hg push ../a
pushing to ../a
searching for changes
+ remote has heads on branch 'default' that are not known locally: 1c9246a22a0a
abort: push creates new remote head 1e108cc5548c!
(pull and merge or see "hg help push" for details about pushing new heads)
[255]
@@ -35,6 +36,7 @@
query 2; still undecided: 1, sample size is: 1
2 total queries
listing keys for "bookmarks"
+ remote has heads on branch 'default' that are not known locally: 1c9246a22a0a
new remote heads on branch 'default':
1e108cc5548c
abort: push creates new remote head 1e108cc5548c!
@@ -405,6 +407,7 @@
$ hg -R i push h
pushing to h
searching for changes
+ remote has heads on branch 'default' that are not known locally: ce4212fc8847
abort: push creates new remote head 97bd0c84d346!
(pull and merge or see "hg help push" for details about pushing new heads)
[255]
--- a/tests/test-revset.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-revset.t Wed Feb 19 16:46:47 2014 -0600
@@ -414,6 +414,16 @@
2
1
0
+ $ log '1:: and reverse(all())'
+ 9
+ 8
+ 7
+ 6
+ 5
+ 4
+ 3
+ 2
+ 1
$ log 'rev(5)'
5
$ log 'sort(limit(reverse(all()), 3))'
@@ -434,6 +444,70 @@
$ log 'tag(tip)'
9
+check that conversion to _missingancestors works
+ $ try --optimize '::3 - ::1'
+ (minus
+ (dagrangepre
+ ('symbol', '3'))
+ (dagrangepre
+ ('symbol', '1')))
+ * optimized:
+ (func
+ ('symbol', '_missingancestors')
+ (list
+ ('symbol', '3')
+ ('symbol', '1')))
+ 3
+ $ try --optimize 'ancestors(1) - ancestors(3)'
+ (minus
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '1'))
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '3')))
+ * optimized:
+ (func
+ ('symbol', '_missingancestors')
+ (list
+ ('symbol', '1')
+ ('symbol', '3')))
+ $ try --optimize 'not ::2 and ::6'
+ (and
+ (not
+ (dagrangepre
+ ('symbol', '2')))
+ (dagrangepre
+ ('symbol', '6')))
+ * optimized:
+ (func
+ ('symbol', '_missingancestors')
+ (list
+ ('symbol', '6')
+ ('symbol', '2')))
+ 3
+ 4
+ 5
+ 6
+ $ try --optimize 'ancestors(6) and not ancestors(4)'
+ (and
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '6'))
+ (not
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '4'))))
+ * optimized:
+ (func
+ ('symbol', '_missingancestors')
+ (list
+ ('symbol', '6')
+ ('symbol', '4')))
+ 3
+ 5
+ 6
+
we can use patterns when searching for tags
$ log 'tag("1..*")'
--- a/tests/test-rollback.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-rollback.t Wed Feb 19 16:46:47 2014 -0600
@@ -184,4 +184,14 @@
$ cat a
a
- $ cd ..
+corrupt journal test
+ $ echo "foo" > .hg/store/journal
+ $ hg recover
+ rolling back interrupted transaction
+ couldn't read journal entry 'foo\n'!
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 1 files, 2 changesets, 2 total revisions
+
--- a/tests/test-shelve.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-shelve.t Wed Feb 19 16:46:47 2014 -0600
@@ -23,10 +23,6 @@
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 5 changes to 5 files
$ hg commit -q -m 'initial commit'
@@ -81,11 +77,11 @@
ensure that our shelved changes exist
$ hg shelve -l
- default-01 (*) [mq]: second.patch (glob)
- default (*) [mq]: second.patch (glob)
+ default-01 (*) changes to '[mq]: second.patch' (glob)
+ default (*) changes to '[mq]: second.patch' (glob)
$ hg shelve -l -p default
- default (*) [mq]: second.patch (glob)
+ default (*) changes to '[mq]: second.patch' (glob)
diff --git a/a/a b/a/a
--- a/a/a
@@ -104,10 +100,8 @@
$ printf "z\na\n" > a/a
$ hg unshelve --keep
unshelving change 'default-01'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
$ hg revert --all -q
@@ -117,10 +111,6 @@
$ hg unshelve
unshelving change 'default-01'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files
$ hg status -C
M a/a
A b.rename/b
@@ -192,10 +182,8 @@
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
warning: conflicts during merge.
merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -379,10 +367,8 @@
$ HGMERGE=true hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 6 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
$ hg parents -q
4:33f7f61e6c5e
@@ -400,15 +386,11 @@
shelved as default
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg shelve --list
- default (*) create conflict (glob)
+ default (*) changes to 'create conflict' (glob)
$ hg unshelve --keep
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
$ hg shelve --list
- default (*) create conflict (glob)
+ default (*) changes to 'create conflict' (glob)
$ hg shelve --cleanup
$ hg shelve --list
@@ -424,10 +406,6 @@
* test 4:33f7f61e6c5e
$ hg unshelve
unshelving change 'test'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
$ hg bookmark
* test 4:33f7f61e6c5e
@@ -437,13 +415,9 @@
shelved as test
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg --config extensions.mq=! shelve --list
- test (1s ago) create conflict
+ test (*) changes to 'create conflict' (glob)
$ hg --config extensions.mq=! unshelve
unshelving change 'test'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
shelve should leave dirstate clean (issue 4055)
@@ -468,10 +442,7 @@
saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-backup.hg (glob)
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 2 changesets with 2 changes to 2 files (+1 heads)
+ rebasing shelved changes
$ hg status
M z
@@ -497,10 +468,7 @@
$ hg up -q 1
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 3 files
+ rebasing shelved changes
$ hg status
A d
@@ -513,10 +481,7 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 3 files
+ rebasing shelved changes
$ hg status
A d
@@ -534,10 +499,6 @@
$ hg debugobsolete `hg --debug id -i -r 1`
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 2 files (+1 heads)
unshelve should leave unknown files alone (issue4113)
@@ -549,10 +510,6 @@
? e
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 2 files (+1 heads)
$ hg status
A d
? e
@@ -568,13 +525,138 @@
$ echo z > e
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 2 changes to 3 files (+1 heads)
$ cat e
e
$ cat e.orig
z
+
+unshelve and conflicts with untracked files
+
+ preparing:
+
+ $ rm *.orig
+ $ hg ci -qm 'commit stuff'
+ $ hg phase -p null:
+
+ no other changes - no merge:
+
+ $ echo f > f
+ $ hg add f
+ $ hg shelve
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo gold > f
+ $ hg unshelve
+ unshelving change 'default'
+ $ hg st
+ A f
+ ? f.orig
+ $ cat f
+ f
+ $ cat f.orig
+ gold
+
+ other uncommitted changes - merge:
+
+ $ hg st
+ A f
+ ? f.orig
+ $ hg shelve
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log -G --template '{rev} {desc|firstline} {author}' -R bundle://.hg/shelved/default.hg -r 'bundle()'
+ o 4 changes to 'commit stuff' shelve@localhost
+ |
+ $ hg log -G --template '{rev} {desc|firstline} {author}'
+ @ 3 commit stuff test
+ |
+ | o 2 c test
+ |/
+ o 0 a test
+
+ $ mv f.orig f
+ $ echo other change >> a
+ $ hg unshelve
+ unshelving change 'default'
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
+ merging f
+ warning: conflicts during merge.
+ merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+ $ hg log -G --template '{rev} {desc|firstline} {author}'
+ @ 5 changes to 'commit stuff' shelve@localhost
+ |
+ | @ 4 pending changes temporary commit shelve@localhost
+ |/
+ o 3 commit stuff test
+ |
+ | o 2 c test
+ |/
+ o 0 a test
+
+ $ hg st
+ M f
+ ? f.orig
+ $ cat f
+ <<<<<<< local
+ gold
+ =======
+ f
+ >>>>>>> other
+ $ cat f.orig
+ gold
+ $ hg unshelve --abort
+ rebase aborted
+ unshelve of 'default' aborted
+ $ hg st
+ M a
+ ? f.orig
+ $ cat f.orig
+ gold
+ $ hg unshelve
+ unshelving change 'default'
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
+ $ hg st
+ M a
+ A f
+ ? f.orig
+
+ other committed changes - merge:
+
+ $ hg shelve f
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg ci a -m 'intermediate other change'
+ $ mv f.orig f
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ merging f
+ warning: conflicts during merge.
+ merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+ $ hg st
+ M f
+ ? f.orig
+ $ cat f
+ <<<<<<< local
+ gold
+ =======
+ f
+ >>>>>>> other
+ $ cat f.orig
+ gold
+ $ hg unshelve --abort
+ rebase aborted
+ no changes needed to a
+ no changes needed to d
+ no changes needed to e
+ unshelve of 'default' aborted
+ $ hg shelve --delete default
+
$ cd ..
--- a/tests/test-ssh.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-ssh.t Wed Feb 19 16:46:47 2014 -0600
@@ -223,7 +223,7 @@
$ hg push
pushing to ssh://user@dummy/remote
searching for changes
- note: unsynced remote changes!
+ remote has heads on branch 'default' that are not known locally: 6c0482d977a3
remote: adding changesets
remote: adding manifests
remote: adding file changes
--- a/tests/test-status-color.t Wed Feb 19 22:19:45 2014 +0900
+++ b/tests/test-status-color.t Wed Feb 19 16:46:47 2014 -0600
@@ -1,5 +1,3 @@
- $ "$TESTDIR/hghave" tic || exit 80
-
$ echo "[extensions]" >> $HGRCPATH
$ echo "color=" >> $HGRCPATH
$ echo "[color]" >> $HGRCPATH
@@ -186,8 +184,11 @@
\x1b[0;0mC \x1b[0m\x1b[0;0m.hgignore\x1b[0m (esc)
\x1b[0;0mC \x1b[0m\x1b[0;0mmodified\x1b[0m (esc)
+
hg status -A (with terminfo color):
+#if tic
+
$ mkdir "$TESTTMP/terminfo"
$ TERMINFO="$TESTTMP/terminfo" tic "$TESTDIR/hgterm.ti"
$ TERM=hgterm TERMINFO="$TESTTMP/terminfo" hg status --config color.mode=terminfo --color=always -A
@@ -201,6 +202,8 @@
\x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30m.hgignore\x1b[30m (esc)
\x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30mmodified\x1b[30m (esc)
+#endif
+
$ echo "^ignoreddir$" > .hgignore
$ mkdir ignoreddir