--- a/Makefile Mon Feb 10 14:51:06 2014 -0800
+++ b/Makefile Mon Feb 10 17:31:26 2014 -0600
@@ -102,7 +102,7 @@
update-pot: i18n/hg.pot
-i18n/hg.pot: $(PYFILES) $(DOCFILES)
+i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
$(PYTHON) i18n/hggettext mercurial/commands.py \
hgext/*.py hgext/*/__init__.py \
mercurial/fileset.py mercurial/revset.py \
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/Makefile.python Mon Feb 10 17:31:26 2014 -0600
@@ -0,0 +1,79 @@
+PYTHONVER=2.7.6
+PYTHONNAME=python-
+PREFIX=$(HOME)/bin/prefix-$(PYTHONNAME)$(PYTHONVER)
+SYMLINKDIR=$(HOME)/bin
+
+help:
+ @echo
+ @echo 'Make a custom installation of a Python version'
+ @echo
+ @echo 'Common make parameters:'
+ @echo ' PYTHONVER=... [$(PYTHONVER)]'
+ @echo ' PREFIX=... [$(PREFIX)]'
+ @echo ' SYMLINKDIR=... [$(SYMLINKDIR) creating $(PYTHONNAME)$(PYTHONVER)]'
+ @echo
+ @echo 'Common make targets:'
+ @echo ' python - install Python $$PYTHONVER in $$PREFIX'
+ @echo ' symlink - create a $$SYMLINKDIR/$(PYTHONNAME)$$PYTHONVER symlink'
+ @echo
+ @echo 'Example: create a temporary Python installation:'
+ @echo ' $$ make -f Makefile.python python PYTHONVER=2.4 PREFIX=/tmp/p24'
+ @echo ' $$ /tmp/p24/bin/python -V'
+ @echo ' Python 2.4'
+ @echo
+ @echo 'Some external libraries are required for building Python: zlib bzip2 openssl.'
+ @echo 'Make sure their development packages are installed systemwide.'
+# fedora: yum install zlib-devel bzip2-devel openssl-devel
+# debian: apt-get install zlib1g-dev libbz2-dev libssl-dev
+ @echo
+ @echo 'To build a nice collection of interesting Python versions:'
+ @echo ' $$ for v in 2.{4{,.2,.3},5{,.6},6{,.1,.2,.9},7{,.6}}; do'
+ @echo ' make -f Makefile.python symlink PYTHONVER=$$v || break; done'
+ @echo 'To run a Mercurial test on all these Python versions:'
+ @echo ' $$ for py in `cd ~/bin && ls $(PYTHONNAME)2.*`; do'
+ @echo ' echo $$py; $$py run-tests.py test-http.t; echo; done'
+ @echo
+
+export LANGUAGE=C
+export LC_ALL=C
+
+python: $(PREFIX)/bin/python docutils
+ printf 'import sys, zlib, bz2, docutils\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+
+PYTHON_SRCDIR=Python-$(PYTHONVER)
+PYTHON_SRCFILE=$(PYTHON_SRCDIR).tgz
+
+$(PREFIX)/bin/python:
+ [ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ]
+ rm -rf $(PYTHON_SRCDIR)
+ tar xf $(PYTHON_SRCFILE)
+ # Ubuntu disables SSLv2 the hard way, disable it on old Pythons too
+ -sed -i 's,self.*SSLv2_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c
+ # Find multiarch system libraries on Ubuntu with Python 2.4.x
+ # http://lipyrary.blogspot.dk/2011/05/how-to-compile-python-on-ubuntu-1104.html
+ -sed -i "s|lib_dirs = .* \[|\0'/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`',|g" $(PYTHON_SRCDIR)/setup.py
+ # Find multiarch system libraries on Ubuntu and disable fortify error when setting argv
+ LDFLAGS="-L/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`"; \
+ BASECFLAGS=-U_FORTIFY_SOURCE; \
+ export LDFLAGS BASECFLAGS; \
+ cd $(PYTHON_SRCDIR) && ./configure --prefix=$(PREFIX) && make all SVNVERSION=pwd && make install
+ printf 'import sys, zlib, bz2\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+ rm -rf $(PYTHON_SRCDIR)
+
+DOCUTILSVER=0.11
+DOCUTILS_SRCDIR=docutils-$(DOCUTILSVER)
+DOCUTILS_SRCFILE=$(DOCUTILS_SRCDIR).tar.gz
+
+docutils: $(PREFIX)/bin/python
+ @$(PREFIX)/bin/python -c 'import docutils' || ( set -ex; \
+ [ -f $(DOCUTILS_SRCFILE) ] || wget http://downloads.sourceforge.net/project/docutils/docutils/$(DOCUTILSVER)/$(DOCUTILS_SRCFILE) || [ -f $(DOCUTILS_SRCFILE) ]; \
+ rm -rf $(DOCUTILS_SRCDIR); \
+ tar xf $(DOCUTILS_SRCFILE); \
+ cd $(DOCUTILS_SRCDIR) && $(PREFIX)/bin/python setup.py install --prefix=$(PREFIX); \
+ $(PREFIX)/bin/python -c 'import docutils'; \
+ rm -rf $(DOCUTILS_SRCDIR); )
+
+symlink: python $(SYMLINKDIR)
+ ln -sf $(PREFIX)/bin/python $(SYMLINKDIR)/$(PYTHONNAME)$(PYTHONVER)
+
+.PHONY: help python docutils symlink
--- a/contrib/check-code.py Mon Feb 10 14:51:06 2014 -0800
+++ b/contrib/check-code.py Mon Feb 10 17:31:26 2014 -0600
@@ -150,6 +150,9 @@
"explicit exit code checks unnecessary"),
(uprefix + r'set -e', "don't use set -e"),
(uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
+ (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
+ "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
+ 'hg pull -q file:../test'), # in test-pull.t which is skipped on windows
(r'^ saved backup bundle to \$TESTTMP.*\.hg$', winglobmsg),
(r'^ changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
winglobmsg),
--- a/contrib/hgfixes/fix_bytesmod.py Mon Feb 10 14:51:06 2014 -0800
+++ b/contrib/hgfixes/fix_bytesmod.py Mon Feb 10 17:31:26 2014 -0600
@@ -33,9 +33,10 @@
'''
def transform(self, node, results):
- if self.filename in blacklist:
- return
- elif self.filename == 'mercurial/util.py':
+ for bfn in blacklist:
+ if self.filename.endswith(bfn):
+ return
+ if not self.filename.endswith('mercurial/py3kcompat.py'):
touch_import('.', 'py3kcompat', node=node)
formatstr = results['formatstr'].clone()
@@ -60,4 +61,3 @@
call = Call(Name('bytesformatter', prefix=' '), args)
return call
-
--- a/contrib/import-checker.py Mon Feb 10 14:51:06 2014 -0800
+++ b/contrib/import-checker.py Mon Feb 10 17:31:26 2014 -0600
@@ -11,12 +11,15 @@
def dotted_name_of_path(path):
"""Given a relative path to a source file, return its dotted module name.
-
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
+ >>> dotted_name_of_path('zlibmodule.so')
+ 'zlib'
"""
parts = path.split('/')
- parts[-1] = parts[-1][:-3] # remove .py
+ parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
+ if parts[-1].endswith('module'):
+ parts[-1] = parts[-1][:-6]
return '.'.join(parts)
@@ -136,7 +139,7 @@
http://bugs.python.org/issue19510.
>>> list(verify_stdlib_on_own_line('import sys, foo'))
- ['mixed stdlib and relative imports:\\n foo, sys']
+ ['mixed imports\\n stdlib: sys\\n relative: foo']
>>> list(verify_stdlib_on_own_line('import sys, os'))
[]
>>> list(verify_stdlib_on_own_line('import foo, bar'))
@@ -144,13 +147,13 @@
"""
for node in ast.walk(ast.parse(source)):
if isinstance(node, ast.Import):
- from_stdlib = {}
+ from_stdlib = {False: [], True: []}
for n in node.names:
- from_stdlib[n.name] = n.name in stdlib_modules
- num_std = len([x for x in from_stdlib.values() if x])
- if num_std not in (len(from_stdlib.values()), 0):
- yield ('mixed stdlib and relative imports:\n %s' %
- ', '.join(sorted(from_stdlib.iterkeys())))
+ from_stdlib[n.name in stdlib_modules].append(n.name)
+ if from_stdlib[True] and from_stdlib[False]:
+ yield ('mixed imports\n stdlib: %s\n relative: %s' %
+ (', '.join(sorted(from_stdlib[True])),
+ ', '.join(sorted(from_stdlib[False]))))
class CircularImport(Exception):
pass
--- a/hgext/convert/__init__.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/__init__.py Mon Feb 10 17:31:26 2014 -0600
@@ -141,6 +141,14 @@
branch names. This can be used to (for instance) move code in one
repository from "default" to a named branch.
+ The closemap is a file that allows closing of a branch. This is useful if
+ you want to close a branch. Each entry contains a revision or hash
+ separated by white space.
+
+ The tagpmap is a file that exactly analogous to the branchmap. This will
+ rename tags on the fly and prevent the 'update tags' commit usually found
+ at the end of a convert process.
+
Mercurial Source
################
@@ -319,6 +327,10 @@
_('splice synthesized history into place'), _('FILE')),
('', 'branchmap', '',
_('change branch names while converting'), _('FILE')),
+ ('', 'closemap', '',
+ _('closes given revs'), _('FILE')),
+ ('', 'tagmap', '',
+ _('change tag names while converting'), _('FILE')),
('', 'branchsort', None, _('try to sort changesets by branches')),
('', 'datesort', None, _('try to sort changesets by date')),
('', 'sourcesort', None, _('preserve source changesets order')),
--- a/hgext/convert/common.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/common.py Mon Feb 10 17:31:26 2014 -0600
@@ -63,13 +63,13 @@
self.encoding = 'utf-8'
- def checkhexformat(self, revstr):
+ def checkhexformat(self, revstr, mapname='splicemap'):
""" fails if revstr is not a 40 byte hex. mercurial and git both uses
such format for their revision numbering
"""
if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
- raise util.Abort(_('splicemap entry %s is not a valid revision'
- ' identifier') % revstr)
+ raise util.Abort(_('%s entry %s is not a valid revision'
+ ' identifier') % (mapname, revstr))
def before(self):
pass
@@ -172,7 +172,7 @@
"""
return {}
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
"""revstr is a string that describes a revision in the given
source control system. Return true if revstr has correct
format.
@@ -192,10 +192,6 @@
self.path = path
self.created = []
- def getheads(self):
- """Return a list of this repository's heads"""
- raise NotImplementedError
-
def revmapfile(self):
"""Path to a file that will contain lines
source_rev_id sink_rev_id
@@ -208,7 +204,8 @@
mapping equivalent authors identifiers for each system."""
return None
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
"""Create a revision with all changed files listed in 'files'
and having listed parents. 'commit' is a commit object
containing at a minimum the author, date, and message for this
--- a/hgext/convert/convcmd.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/convcmd.py Mon Feb 10 17:31:26 2014 -0600
@@ -120,6 +120,43 @@
self.splicemap = self.parsesplicemap(opts.get('splicemap'))
self.branchmap = mapfile(ui, opts.get('branchmap'))
+ self.closemap = self.parseclosemap(opts.get('closemap'))
+ self.tagmap = mapfile(ui, opts.get('tagmap'))
+
+ def parseclosemap(self, path):
+ """ check and validate the closemap format and
+ return a list of revs to close.
+ Format checking has two parts.
+ 1. generic format which is same across all source types
+ 2. specific format checking which may be different for
+ different source type. This logic is implemented in
+ checkrevformat function in source files like
+ hg.py, subversion.py etc.
+ """
+
+ if not path:
+ return []
+ m = []
+ try:
+ fp = open(path, 'r')
+ for i, line in enumerate(fp):
+ line = line.splitlines()[0].rstrip()
+ if not line:
+ # Ignore blank lines
+ continue
+ # split line
+ lex = shlex.shlex(line, posix=True)
+ lex.whitespace_split = True
+ lex.whitespace += ','
+ line = list(lex)
+ for part in line:
+ self.source.checkrevformat(part, 'closemap')
+ m.extend(line)
+ # if file does not exist or error reading, exit
+ except IOError:
+ raise util.Abort(_('closemap file not found or error reading %s:')
+ % path)
+ return m
def parsesplicemap(self, path):
""" check and validate the splicemap format and
@@ -408,8 +445,11 @@
except KeyError:
parents = [b[0] for b in pbranches]
source = progresssource(self.ui, self.source, len(files))
+ if self.closemap and rev in self.closemap:
+ commit.extra['close'] = 1
+
newnode = self.dest.putcommit(files, copies, parents, commit,
- source, self.map)
+ source, self.map, self.tagmap)
source.close()
self.source.converted(rev, newnode)
self.map[rev] = newnode
@@ -445,6 +485,9 @@
self.ui.progress(_('converting'), None)
tags = self.source.gettags()
+ tags = dict((self.tagmap.get(k, k), v)
+ for k, v in tags.iteritems())
+
ctags = {}
for k in tags:
v = tags[k]
--- a/hgext/convert/git.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/git.py Mon Feb 10 17:31:26 2014 -0600
@@ -297,7 +297,7 @@
return bookmarks
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" git revision string is a 40 byte hex """
- self.checkhexformat(revstr)
+ self.checkhexformat(revstr, mapname)
--- a/hgext/convert/hg.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/hg.py Mon Feb 10 17:31:26 2014 -0600
@@ -25,6 +25,9 @@
from common import NoRepo, commit, converter_source, converter_sink
+import re
+sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
+
class mercurial_sink(converter_sink):
def __init__(self, ui, path):
converter_sink.__init__(self, ui, path)
@@ -75,10 +78,6 @@
def authorfile(self):
return self.repo.join("authormap")
- def getheads(self):
- h = self.repo.changelog.heads()
- return [hex(x) for x in h]
-
def setbranch(self, branch, pbranches):
if not self.clonebranches:
return
@@ -117,7 +116,7 @@
self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
self.before()
- def _rewritetags(self, source, revmap, data):
+ def _rewritetags(self, source, revmap, tagmap, data):
fp = cStringIO.StringIO()
for line in data.splitlines():
s = line.split(' ', 1)
@@ -126,17 +125,18 @@
revid = revmap.get(source.lookuprev(s[0]))
if not revid:
continue
- fp.write('%s %s\n' % (revid, s[1]))
+ fp.write('%s %s\n' % (revid, tagmap.get(s[1], s[1])))
return fp.getvalue()
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
files = dict(files)
def getfilectx(repo, memctx, f):
v = files[f]
data, mode = source.getfile(f, v)
if f == '.hgtags':
- data = self._rewritetags(source, revmap, data)
+ data = self._rewritetags(source, revmap, tagmap, data)
return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
copies.get(f))
@@ -157,6 +157,14 @@
p2 = parents.pop(0)
text = commit.desc
+
+ sha1s = re.findall(sha1re, text)
+ for sha1 in sha1s:
+ oldrev = source.lookuprev(sha1)
+ newrev = revmap.get(oldrev)
+ if newrev is not None:
+ text = text.replace(sha1, newrev[:len(sha1)])
+
extra = commit.extra.copy()
if self.branchnames and commit.branch:
extra['branch'] = commit.branch
@@ -190,14 +198,36 @@
parentctx = None
tagparent = nullid
- try:
- oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
- except Exception:
- oldlines = []
+ oldlines = set()
+ for branch, heads in self.repo.branchmap().iteritems():
+ for h in heads:
+ if '.hgtags' in self.repo[h]:
+ oldlines.update(
+ set(self.repo[h]['.hgtags'].data().splitlines(True)))
+ oldlines = sorted(list(oldlines))
newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
if newlines == oldlines:
return None, None
+
+ # if the old and new tags match, then there is nothing to update
+ oldtags = set()
+ newtags = set()
+ for line in oldlines:
+ s = line.strip().split(' ', 1)
+ if len(s) != 2:
+ continue
+ oldtags.add(s[1])
+ for line in newlines:
+ s = line.strip().split(' ', 1)
+ if len(s) != 2:
+ continue
+ if s[1] not in oldtags:
+ newtags.add(s[1].strip())
+
+ if not newtags:
+ return None, None
+
data = "".join(newlines)
def getfilectx(repo, memctx, f):
return context.memfilectx(f, data, False, False, None)
@@ -412,6 +442,6 @@
def getbookmarks(self):
return bookmarks.listbookmarks(self.repo)
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" Mercurial, revision string is a 40 byte hex """
- self.checkhexformat(revstr)
+ self.checkhexformat(revstr, mapname)
--- a/hgext/convert/subversion.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/convert/subversion.py Mon Feb 10 17:31:26 2014 -0600
@@ -41,13 +41,30 @@
pass
def revsplit(rev):
- """Parse a revision string and return (uuid, path, revnum)."""
- url, revnum = rev.rsplit('@', 1)
- parts = url.split('/', 1)
+ """Parse a revision string and return (uuid, path, revnum).
+ >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
+ ... '/proj%20B/mytrunk/mytrunk@1')
+ ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
+ >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
+ ('', '', 1)
+ >>> revsplit('@7')
+ ('', '', 7)
+ >>> revsplit('7')
+ ('', '', 0)
+ >>> revsplit('bad')
+ ('', '', 0)
+ """
+ parts = rev.rsplit('@', 1)
+ revnum = 0
+ if len(parts) > 1:
+ revnum = int(parts[1])
+ parts = parts[0].split('/', 1)
+ uuid = ''
mod = ''
- if len(parts) > 1:
+ if len(parts) > 1 and parts[0].startswith('svn:'):
+ uuid = parts[0][4:]
mod = '/' + parts[1]
- return parts[0][4:], mod, int(revnum)
+ return uuid, mod, revnum
def quote(s):
# As of svn 1.7, many svn calls expect "canonical" paths. In
@@ -157,6 +174,30 @@
self._stdout.close()
self._stdout = None
+class directlogstream(list):
+ """Direct revision log iterator.
+ This can be used for debugging and development but it will probably leak
+ memory and is not suitable for real conversions."""
+ def __init__(self, url, paths, start, end, limit=0,
+ discover_changed_paths=True, strict_node_history=False):
+
+ def receiver(orig_paths, revnum, author, date, message, pool):
+ paths = {}
+ if orig_paths is not None:
+ for k, v in orig_paths.iteritems():
+ paths[k] = changedpath(v)
+ self.append((paths, revnum, author, date, message))
+
+ # Use an ra of our own so that our parent can consume
+ # our results without confusing the server.
+ t = transport.SvnRaTransport(url=url)
+ svn.ra.get_log(t.ra, paths, start, end, limit,
+ discover_changed_paths,
+ strict_node_history,
+ receiver)
+
+ def close(self):
+ pass
# Check to see if the given path is a local Subversion repo. Verify this by
# looking for several svn-specific files and directories in the given
@@ -454,13 +495,13 @@
del self.commits[rev]
return commit
- def checkrevformat(self, revstr):
+ def checkrevformat(self, revstr, mapname='splicemap'):
""" fails if revision format does not match the correct format"""
if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
'{12,12}(.*)\@[0-9]+$',revstr):
- raise util.Abort(_('splicemap entry %s is not a valid revision'
- ' identifier') % revstr)
+ raise util.Abort(_('%s entry %s is not a valid revision'
+ ' identifier') % (mapname, revstr))
def gettags(self):
tags = {}
@@ -975,6 +1016,9 @@
relpaths.append(p.strip('/'))
args = [self.baseurl, relpaths, start, end, limit,
discover_changed_paths, strict_node_history]
+ # undocumented feature: debugsvnlog can be disabled
+ if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
+ return directlogstream(*args)
arg = encodeargs(args)
hgexe = util.hgexecutable()
cmd = '%s debugsvnlog' % util.shellquote(hgexe)
@@ -1183,7 +1227,8 @@
def revid(self, rev):
return u"svn:%s@%s" % (self.uuid, rev)
- def putcommit(self, files, copies, parents, commit, source, revmap):
+ def putcommit(self, files, copies, parents, commit, source,
+ revmap, tagmap):
for parent in parents:
try:
return self.revid(self.childmap[parent])
--- a/hgext/mq.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/mq.py Mon Feb 10 17:31:26 2014 -0600
@@ -1986,9 +1986,11 @@
raise util.Abort(_('-e is incompatible with import from -'))
filename = normname(filename)
self.checkreservedname(filename)
- originpath = self.join(filename)
- if not os.path.isfile(originpath):
- raise util.Abort(_("patch %s does not exist") % filename)
+ if util.url(filename).islocal():
+ originpath = self.join(filename)
+ if not os.path.isfile(originpath):
+ raise util.Abort(
+ _("patch %s does not exist") % filename)
if patchname:
self.checkpatchname(patchname, force)
--- a/hgext/shelve.py Mon Feb 10 14:51:06 2014 -0800
+++ b/hgext/shelve.py Mon Feb 10 17:31:26 2014 -0600
@@ -22,8 +22,8 @@
"""
from mercurial.i18n import _
-from mercurial.node import nullid, bin, hex
-from mercurial import changegroup, cmdutil, scmutil, phases
+from mercurial.node import nullid, nullrev, bin, hex
+from mercurial import changegroup, cmdutil, scmutil, phases, commands
from mercurial import error, hg, mdiff, merge, patch, repair, util
from mercurial import templatefilters
from mercurial import lock as lockmod
@@ -122,22 +122,21 @@
"""subcommand that creates a new shelve"""
def publicancestors(ctx):
- """Compute the heads of the public ancestors of a commit.
+ """Compute the public ancestors of a commit.
- Much faster than the revset heads(ancestors(ctx) - draft())"""
- seen = set()
+ Much faster than the revset ancestors(ctx) & draft()"""
+ seen = set([nullrev])
visit = util.deque()
visit.append(ctx)
while visit:
ctx = visit.popleft()
+ yield ctx.node()
for parent in ctx.parents():
rev = parent.rev()
if rev not in seen:
seen.add(rev)
if parent.mutable():
visit.append(parent)
- else:
- yield parent.node()
wctx = repo[None]
parents = wctx.parents()
@@ -173,9 +172,9 @@
repo.mq.checkapplied = saved
if parent.node() != nullid:
- desc = parent.description().split('\n', 1)[0]
+ desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
else:
- desc = '(empty repository)'
+ desc = '(changes in empty repository)'
if not opts['message']:
opts['message'] = desc
@@ -518,6 +517,7 @@
if not shelvedfile(repo, basename, 'files').exists():
raise util.Abort(_("shelved change '%s' not found") % basename)
+ oldquiet = ui.quiet
wlock = lock = tr = None
try:
lock = repo.lock()
@@ -537,6 +537,8 @@
# Store pending changes in a commit
m, a, r, d = repo.status()[:4]
if m or a or r or d:
+ ui.status(_("temporarily committing pending changes "
+ "(restore with 'hg unshelve --abort')\n"))
def commitfunc(ui, repo, message, match, opts):
hasmq = util.safehasattr(repo, 'mq')
if hasmq:
@@ -551,15 +553,12 @@
tempopts = {}
tempopts['message'] = "pending changes temporary commit"
- oldquiet = ui.quiet
- try:
- ui.quiet = True
- node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
- finally:
- ui.quiet = oldquiet
+ ui.quiet = True
+ node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
tmpwctx = repo[node]
try:
+ ui.quiet = True
fp = shelvedfile(repo, basename, 'hg').opener()
gen = changegroup.readbundle(fp, fp.name)
repo.addchangegroup(gen, 'unshelve', 'bundle:' + fp.name)
@@ -568,11 +567,14 @@
finally:
fp.close()
+ ui.quiet = oldquiet
+
shelvectx = repo['tip']
# If the shelve is not immediately on top of the commit
# we'll be merging with, rebase it to be on top.
if tmpwctx.node() != shelvectx.parents()[0].node():
+ ui.status(_('rebasing shelved changes\n'))
try:
rebase.rebase(ui, repo, **{
'rev' : [shelvectx.rev()],
@@ -610,6 +612,7 @@
unshelvecleanup(ui, repo, basename, opts)
finally:
+ ui.quiet = oldquiet
if tr:
tr.release()
lockmod.release(lock, wlock)
@@ -632,8 +635,8 @@
('p', 'patch', None,
_('show patch')),
('', 'stat', None,
- _('output diffstat-style summary of changes'))],
- _('hg shelve'))
+ _('output diffstat-style summary of changes'))] + commands.walkopts,
+ _('hg shelve [OPTION]... [FILE]...'))
def shelvecmd(ui, repo, *pats, **opts):
'''save and set aside changes from the working directory
--- a/i18n/de.po Mon Feb 10 14:51:06 2014 -0800
+++ b/i18n/de.po Mon Feb 10 17:31:26 2014 -0600
@@ -20,7 +20,7 @@
msgstr ""
"Project-Id-Version: Mercurial\n"
"Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n"
-"POT-Creation-Date: 2014-01-25 17:51+0100\n"
+"POT-Creation-Date: 2014-01-29 16:47+0100\n"
"PO-Revision-Date: 2013-09-30 20:52+0100\n"
"Last-Translator: Simon Heimberg <simohe@besonet.ch>\n"
"Language-Team: \n"
@@ -2928,6 +2928,7 @@
" [repository]\n"
" native = LF"
+#. do not translate: .. note::
msgid ".. note::"
msgstr ""
@@ -5029,6 +5030,7 @@
" Siehe Hilfe zu 'paths' zu Pfad-Kurznamen und 'urls' für erlaubte\n"
" Formate für die Quellangabe."
+#. do not translate: .. container::
msgid " .. container:: verbose"
msgstr ""
@@ -6548,6 +6550,7 @@
" Ohne Argumente werden die aktuell aktiven Wächter ausgegeben.\n"
" Mit einem Argument wird der aktuelle Wächter gesetzt."
+#. do not translate: .. note::
msgid " .. note::"
msgstr ""
@@ -15694,6 +15697,7 @@
" order until one or more configuration files are detected."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: The registry key ``HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node"
"\\Mercurial``\n"
@@ -15873,6 +15877,7 @@
msgid " stable5 = latest -b stable"
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: It is possible to create aliases with the same names as\n"
" existing commands, which will then override the original\n"
@@ -15918,6 +15923,7 @@
"echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: Some global configuration options such as ``-R`` are\n"
" processed before shell aliases and will thus not be passed to\n"
@@ -16101,6 +16107,7 @@
"the command."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: The tempfile mechanism is recommended for Windows systems,\n"
" where the standard shell I/O redirection operators often have\n"
@@ -16572,6 +16579,7 @@
" update failed (e.g. because conflicts not resolved), ``$HG_ERROR=1``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: It is generally better to use standard hooks rather than the\n"
" generic pre- and post- command hooks as they are guaranteed to be\n"
@@ -16580,6 +16588,7 @@
" generate a commit (e.g. tag) and not just the commit command."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note:: Environment variables with empty values may not be passed to\n"
" hooks on platforms such as Windows. As an example, ``$HG_PARENT2``\n"
@@ -18967,6 +18976,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. contents::
msgid ""
".. contents::\n"
" :backlinks: top\n"
@@ -19017,6 +19027,7 @@
" repository."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: hg.1.gendoc.txt"
msgstr ""
@@ -19121,6 +19132,7 @@
"Public License version 2 or any later version."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: common.txt\n"
msgstr ""
@@ -19143,6 +19155,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: hgignore.5.gendoc.txt"
msgstr ""
@@ -19170,6 +19183,7 @@
"Public License version 2 or any later version."
msgstr ""
+#. do not translate: .. include::
msgid ".. include:: common.txt"
msgstr ""
@@ -19281,6 +19295,7 @@
"regexp pattern, start it with ``^``."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Patterns specified in other than ``.hgignore`` are always rooted.\n"
@@ -19333,6 +19348,7 @@
":Manual group: Mercurial Manual"
msgstr ""
+#. do not translate: .. contents::
msgid ""
".. contents::\n"
" :backlinks: top\n"
@@ -19348,6 +19364,7 @@
"Beschreibung\n"
"============"
+#. do not translate: .. include::
msgid ".. include:: hgrc.5.gendoc.txt"
msgstr ""
@@ -19564,6 +19581,7 @@
msgid "8. The merge of the file fails and must be resolved before commit."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" After selecting a merge program, Mercurial will by default attempt\n"
@@ -19633,6 +19651,7 @@
msgid "Alternate pattern notations must be specified explicitly."
msgstr "Andere Schreibweisen von Mustern müssen explizit angegeben werden."
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Patterns specified in ``.hgignore`` are not rooted.\n"
@@ -19804,6 +19823,7 @@
msgid " - secret changesets are neither pushed, pulled, or cloned"
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Pulling a draft changeset from a publishing server does not mark it\n"
@@ -19823,12 +19843,14 @@
" [phases]\n"
" publish = False"
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Servers running older versions of Mercurial are treated as\n"
" publishing."
msgstr ""
+#. do not translate: .. note::
msgid ""
".. note::\n"
" Changesets in secret phase are not exchanged with the server. This\n"
@@ -20216,6 +20238,7 @@
" repositories states when committing in the parent repository."
msgstr ""
+#. do not translate: .. note::
msgid ""
" .. note::\n"
" The ``.hgsubstate`` file should not be edited manually."
--- a/i18n/posplit Mon Feb 10 14:51:06 2014 -0800
+++ b/i18n/posplit Mon Feb 10 17:31:26 2014 -0600
@@ -5,6 +5,7 @@
# license: MIT/X11/Expat
#
+import re
import sys
import polib
@@ -30,6 +31,7 @@
cache = {}
entries = po[:]
po[:] = []
+ findd = re.compile(r' *\.\. (\w+)::') # for finding directives
for entry in entries:
msgids = entry.msgid.split(u'\n\n')
if entry.msgstr:
@@ -49,8 +51,27 @@
delta = 0
for msgid, msgstr in zip(msgids, msgstrs):
- if msgid:
+ if msgid and msgid != '::':
newentry = mkentry(entry, delta, msgid, msgstr)
+ mdirective = findd.match(msgid)
+ if mdirective:
+ if not msgid[mdirective.end():].rstrip():
+ # only directive, nothing to translate here
+ continue
+ directive = mdirective.group(1)
+ if directive in ('container', 'include'):
+ if msgid.rstrip('\n').count('\n') == 0:
+ # only rst syntax, nothing to translate
+ continue
+ else:
+ # lines following directly, unexpected
+ print 'Warning: text follows line with directive' \
+ ' %s' % directive
+ comment = 'do not translate: .. %s::' % directive
+ if not newentry.comment:
+ newentry.comment = comment
+ elif comment not in newentry.comment:
+ newentry.comment += '\n' + comment
addentry(po, newentry, cache)
delta += 2 + msgid.count('\n')
po.save()
--- a/mercurial/bookmarks.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/bookmarks.py Mon Feb 10 17:31:26 2014 -0600
@@ -363,22 +363,6 @@
writer(msg)
localmarks.write()
-def updateremote(ui, repo, remote, revs):
- ui.debug("checking for updated bookmarks\n")
- revnums = map(repo.changelog.rev, revs or [])
- ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
- (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
- ) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
- srchex=hex)
-
- for b, scid, dcid in advsrc:
- if ancestors and repo[scid].rev() not in ancestors:
- continue
- if remote.pushkey('bookmarks', b, dcid, scid):
- ui.status(_("updating bookmark %s\n") % b)
- else:
- ui.warn(_('updating bookmark %s failed!\n') % b)
-
def pushtoremote(ui, repo, remote, targets):
(addsrc, adddst, advsrc, advdst, diverge, differ, invalid
) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
--- a/mercurial/cmdutil.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/cmdutil.py Mon Feb 10 17:31:26 2014 -0600
@@ -10,7 +10,7 @@
import os, sys, errno, re, tempfile
import util, scmutil, templater, patch, error, templatekw, revlog, copies
import match as matchmod
-import subrepo, context, repair, graphmod, revset, phases, obsolete, pathutil
+import context, repair, graphmod, revset, phases, obsolete, pathutil
import changelog
import bookmarks
import lock as lockmod
@@ -629,7 +629,7 @@
if listsubrepos:
ctx1 = repo[node1]
ctx2 = repo[node2]
- for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
+ for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
tempnode2 = node2
try:
if node2 is not None:
@@ -1132,7 +1132,7 @@
elif follow:
revs = repo.revs('reverse(:.)')
else:
- revs = list(repo)
+ revs = revset.baseset(repo)
revs.reverse()
if not revs:
return []
@@ -1140,6 +1140,7 @@
slowpath = match.anypats() or (match.files() and opts.get('removed'))
fncache = {}
change = repo.changectx
+ revs = revset.baseset(revs)
# First step is to fill wanted, the set of revisions that we want to yield.
# When it does not induce extra cost, we also fill fncache for revisions in
@@ -1471,10 +1472,11 @@
if follow and len(repo) > 0:
revs = repo.revs('reverse(:.)')
else:
- revs = list(repo.changelog)
+ revs = revset.baseset(repo.changelog)
revs.reverse()
if not revs:
return [], None, None
+ revs = revset.baseset(revs)
expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
if possiblyunsorted:
revs.sort(reverse=True)
--- a/mercurial/commands.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/commands.py Mon Feb 10 17:31:26 2014 -0600
@@ -1943,7 +1943,7 @@
tree = fileset.parse(expr)[0]
ui.note(tree, "\n")
- for f in fileset.getfileset(ctx, expr):
+ for f in ctx.getfileset(expr):
ui.write("%s\n" % f)
@command('debugfsinfo', [], _('[PATH]'))
@@ -2107,10 +2107,21 @@
import templater
p = templater.templatepath()
ui.status(_("checking templates (%s)...\n") % ' '.join(p))
- try:
- templater.templater(templater.templatepath("map-cmdline.default"))
- except Exception, inst:
- ui.write(" %s\n" % inst)
+ if p:
+ m = templater.templatepath("map-cmdline.default")
+ if m:
+ # template found, check if it is working
+ try:
+ templater.templater(m)
+ except Exception, inst:
+ ui.write(" %s\n" % inst)
+ p = None
+ else:
+ ui.write(_(" template 'default' not found\n"))
+ p = None
+ else:
+ ui.write(_(" no template directories found\n"))
+ if not p:
ui.write(_(" (templates seem to have been installed incorrectly)\n"))
problems += 1
@@ -2558,7 +2569,7 @@
if newtree != tree:
ui.note(revset.prettyformat(newtree), "\n")
func = revset.match(ui, expr)
- for c in func(repo, range(len(repo))):
+ for c in func(repo, revset.baseset(range(len(repo)))):
ui.write("%s\n" % c)
@command('debugsetparents', [], _('REV1 [REV2]'))
--- a/mercurial/context.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/context.py Mon Feb 10 17:31:26 2014 -0600
@@ -12,6 +12,7 @@
import os, errno, stat
import obsolete as obsmod
import repoview
+import fileset
propertycache = util.propertycache
@@ -79,6 +80,9 @@
def mutable(self):
return self.phase() > phases.public
+ def getfileset(self, expr):
+ return fileset.getfileset(self, expr)
+
def obsolete(self):
"""True if the changeset is obsolete"""
return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
--- a/mercurial/discovery.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/discovery.py Mon Feb 10 17:31:26 2014 -0600
@@ -154,7 +154,7 @@
- branch: the branch name
- remoteheads: the list of remote heads known locally
- None is the branch is new
+ None if the branch is new
- newheads: the new remote heads (known locally) with outgoing pushed
- unsyncedheads: the list of remote heads unknown locally.
"""
@@ -250,8 +250,7 @@
hint=_("use 'hg push --new-branch' to create"
" new remote branches"))
- # 2 compute newly pushed bookmarks. We
- # we don't warned about bookmarked heads.
+ # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
localbookmarks = repo._bookmarks
remotebookmarks = remote.listkeys('bookmarks')
bookmarkedheads = set()
@@ -274,18 +273,19 @@
allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
allfuturecommon.update(allmissing)
for branch, heads in sorted(headssum.iteritems()):
- candidate_newhs = set(heads[1])
+ remoteheads, newheads, unsyncedheads = heads
+ candidate_newhs = set(newheads)
# add unsynced data
- if heads[0] is None:
+ if remoteheads is None:
oldhs = set()
else:
- oldhs = set(heads[0])
- oldhs.update(heads[2])
- candidate_newhs.update(heads[2])
- dhs = None
+ oldhs = set(remoteheads)
+ oldhs.update(unsyncedheads)
+ candidate_newhs.update(unsyncedheads)
+ dhs = None # delta heads, the new heads on branch
discardedheads = set()
if repo.obsstore:
- # remove future heads which are actually obsolete by another
+ # remove future heads which are actually obsoleted by another
# pushed element:
#
# XXX as above, There are several cases this case does not handle
@@ -297,8 +297,8 @@
# (2) if the new heads have ancestors which are not obsolete and
# not ancestors of any other heads we will have a new head too.
#
- # This two case will be easy to handle for know changeset but much
- # more tricky for unsynced changes.
+ # These two cases will be easy to handle for known changeset but
+ # much more tricky for unsynced changes.
newhs = set()
for nh in candidate_newhs:
if nh in repo and repo[nh].phase() <= phases.public:
@@ -312,10 +312,10 @@
newhs.add(nh)
else:
newhs = candidate_newhs
- if [h for h in heads[2] if h not in discardedheads]:
+ if [h for h in unsyncedheads if h not in discardedheads]:
unsynced = True
- if heads[0] is None:
- if 1 < len(newhs):
+ if remoteheads is None:
+ if len(newhs) > 1:
dhs = list(newhs)
if error is None:
error = (_("push creates new branch '%s' "
@@ -324,7 +324,7 @@
" see \"hg help push\" for details about"
" pushing new heads")
elif len(newhs) > len(oldhs):
- # strip updates to existing remote heads from the new heads list
+ # remove bookmarked or existing remote heads from the new heads list
dhs = sorted(newhs - bookmarkedheads - oldhs)
if dhs:
if error is None:
@@ -334,7 +334,7 @@
else:
error = _("push creates new remote head %s!"
) % short(dhs[0])
- if heads[2]: # unsynced
+ if unsyncedheads:
hint = _("pull and merge or"
" see \"hg help push\" for details about"
" pushing new heads")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/exchange.py Mon Feb 10 17:31:26 2014 -0600
@@ -0,0 +1,276 @@
+# exchange.py - utily to exchange data between repo.
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from i18n import _
+from node import hex
+import errno
+import util, scmutil, changegroup
+import discovery, phases, obsolete, bookmarks
+
+
+class pushoperation(object):
+ """A object that represent a single push operation
+
+ It purpose is to carry push related state and very common operation.
+
+ A new should be created at the begining of each push and discarded
+ afterward.
+ """
+
+ def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
+ # repo we push from
+ self.repo = repo
+ self.ui = repo.ui
+ # repo we push to
+ self.remote = remote
+ # force option provided
+ self.force = force
+ # revs to be pushed (None is "all")
+ self.revs = revs
+ # allow push of new branch
+ self.newbranch = newbranch
+
+def push(repo, remote, force=False, revs=None, newbranch=False):
+ '''Push outgoing changesets (limited by revs) from a local
+ repository to remote. Return an integer:
+ - None means nothing to push
+ - 0 means HTTP error
+ - 1 means we pushed and remote head count is unchanged *or*
+ we have outgoing changesets but refused to push
+ - other values as described by addchangegroup()
+ '''
+ pushop = pushoperation(repo, remote, force, revs, newbranch)
+ if pushop.remote.local():
+ missing = (set(pushop.repo.requirements)
+ - pushop.remote.local().supported)
+ if missing:
+ msg = _("required features are not"
+ " supported in the destination:"
+ " %s") % (', '.join(sorted(missing)))
+ raise util.Abort(msg)
+
+ # there are two ways to push to remote repo:
+ #
+ # addchangegroup assumes local user can lock remote
+ # repo (local filesystem, old ssh servers).
+ #
+ # unbundle assumes local user cannot lock remote repo (new ssh
+ # servers, http servers).
+
+ if not pushop.remote.canpush():
+ raise util.Abort(_("destination does not support push"))
+ unfi = pushop.repo.unfiltered()
+ def localphasemove(nodes, phase=phases.public):
+ """move <nodes> to <phase> in the local source repo"""
+ if locallock is not None:
+ phases.advanceboundary(pushop.repo, phase, nodes)
+ else:
+ # repo is not locked, do not change any phases!
+ # Informs the user that phases should have been moved when
+ # applicable.
+ actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
+ phasestr = phases.phasenames[phase]
+ if actualmoves:
+ pushop.ui.status(_('cannot lock source repo, skipping '
+ 'local %s phase update\n') % phasestr)
+ # get local lock as we might write phase data
+ locallock = None
+ try:
+ locallock = pushop.repo.lock()
+ except IOError, err:
+ if err.errno != errno.EACCES:
+ raise
+ # source repo cannot be locked.
+ # We do not abort the push, but just disable the local phase
+ # synchronisation.
+ msg = 'cannot lock source repository: %s\n' % err
+ pushop.ui.debug(msg)
+ try:
+ pushop.repo.checkpush(pushop.force, pushop.revs)
+ lock = None
+ unbundle = pushop.remote.capable('unbundle')
+ if not unbundle:
+ lock = pushop.remote.lock()
+ try:
+ # discovery
+ fci = discovery.findcommonincoming
+ commoninc = fci(unfi, pushop.remote, force=pushop.force)
+ common, inc, remoteheads = commoninc
+ fco = discovery.findcommonoutgoing
+ outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
+ commoninc=commoninc, force=pushop.force)
+
+
+ if not outgoing.missing:
+ # nothing to push
+ scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
+ ret = None
+ else:
+ # something to push
+ if not pushop.force:
+ # if repo.obsstore == False --> no obsolete
+ # then, save the iteration
+ if unfi.obsstore:
+ # this message are here for 80 char limit reason
+ mso = _("push includes obsolete changeset: %s!")
+ mst = "push includes %s changeset: %s!"
+ # plain versions for i18n tool to detect them
+ _("push includes unstable changeset: %s!")
+ _("push includes bumped changeset: %s!")
+ _("push includes divergent changeset: %s!")
+ # If we are to push if there is at least one
+ # obsolete or unstable changeset in missing, at
+ # least one of the missinghead will be obsolete or
+ # unstable. So checking heads only is ok
+ for node in outgoing.missingheads:
+ ctx = unfi[node]
+ if ctx.obsolete():
+ raise util.Abort(mso % ctx)
+ elif ctx.troubled():
+ raise util.Abort(_(mst)
+ % (ctx.troubles()[0],
+ ctx))
+ newbm = pushop.ui.configlist('bookmarks', 'pushing')
+ discovery.checkheads(unfi, pushop.remote, outgoing,
+ remoteheads, pushop.newbranch,
+ bool(inc), newbm)
+
+ # TODO: get bundlecaps from remote
+ bundlecaps = None
+ # create a changegroup from local
+ if pushop.revs is None and not (outgoing.excluded
+ or pushop.repo.changelog.filteredrevs):
+ # push everything,
+ # use the fast path, no race possible on push
+ bundler = changegroup.bundle10(pushop.repo, bundlecaps)
+ cg = pushop.repo._changegroupsubset(outgoing,
+ bundler,
+ 'push',
+ fastpath=True)
+ else:
+ cg = pushop.repo.getlocalbundle('push', outgoing,
+ bundlecaps)
+
+ # apply changegroup to remote
+ if unbundle:
+ # local repo finds heads on server, finds out what
+ # revs it must push. once revs transferred, if server
+ # finds it has different heads (someone else won
+ # commit/push race), server aborts.
+ if pushop.force:
+ remoteheads = ['force']
+ # ssh: return remote's addchangegroup()
+ # http: return remote's addchangegroup() or 0 for error
+ ret = pushop.remote.unbundle(cg, remoteheads, 'push')
+ else:
+ # we return an integer indicating remote head count
+ # change
+ ret = pushop.remote.addchangegroup(cg, 'push',
+ pushop.repo.url())
+
+ if ret:
+ # push succeed, synchronize target of the push
+ cheads = outgoing.missingheads
+ elif pushop.revs is None:
+ # All out push fails. synchronize all common
+ cheads = outgoing.commonheads
+ else:
+ # I want cheads = heads(::missingheads and ::commonheads)
+ # (missingheads is revs with secret changeset filtered out)
+ #
+ # This can be expressed as:
+ # cheads = ( (missingheads and ::commonheads)
+ # + (commonheads and ::missingheads))"
+ # )
+ #
+ # while trying to push we already computed the following:
+ # common = (::commonheads)
+ # missing = ((commonheads::missingheads) - commonheads)
+ #
+ # We can pick:
+ # * missingheads part of common (::commonheads)
+ common = set(outgoing.common)
+ nm = pushop.repo.changelog.nodemap
+ cheads = [node for node in pushop.revs if nm[node] in common]
+ # and
+ # * commonheads parents on missing
+ revset = unfi.set('%ln and parents(roots(%ln))',
+ outgoing.commonheads,
+ outgoing.missing)
+ cheads.extend(c.node() for c in revset)
+ # even when we don't push, exchanging phase data is useful
+ remotephases = pushop.remote.listkeys('phases')
+ if (pushop.ui.configbool('ui', '_usedassubrepo', False)
+ and remotephases # server supports phases
+ and ret is None # nothing was pushed
+ and remotephases.get('publishing', False)):
+ # When:
+ # - this is a subrepo push
+ # - and remote support phase
+ # - and no changeset was pushed
+ # - and remote is publishing
+ # We may be in issue 3871 case!
+ # We drop the possible phase synchronisation done by
+ # courtesy to publish changesets possibly locally draft
+ # on the remote.
+ remotephases = {'publishing': 'True'}
+ if not remotephases: # old server or public only repo
+ localphasemove(cheads)
+ # don't push any phase data as there is nothing to push
+ else:
+ ana = phases.analyzeremotephases(pushop.repo, cheads,
+ remotephases)
+ pheads, droots = ana
+ ### Apply remote phase on local
+ if remotephases.get('publishing', False):
+ localphasemove(cheads)
+ else: # publish = False
+ localphasemove(pheads)
+ localphasemove(cheads, phases.draft)
+ ### Apply local phase on remote
+
+ # Get the list of all revs draft on remote by public here.
+ # XXX Beware that revset break if droots is not strictly
+ # XXX root we may want to ensure it is but it is costly
+ outdated = unfi.set('heads((%ln::%ln) and public())',
+ droots, cheads)
+ for newremotehead in outdated:
+ r = pushop.remote.pushkey('phases',
+ newremotehead.hex(),
+ str(phases.draft),
+ str(phases.public))
+ if not r:
+ pushop.ui.warn(_('updating %s to public failed!\n')
+ % newremotehead)
+ pushop.ui.debug('try to push obsolete markers to remote\n')
+ obsolete.syncpush(pushop.repo, pushop.remote)
+ finally:
+ if lock is not None:
+ lock.release()
+ finally:
+ if locallock is not None:
+ locallock.release()
+
+ _pushbookmark(pushop.ui, unfi, pushop.remote, pushop.revs)
+ return ret
+
+def _pushbookmark(ui, repo, remote, revs):
+ """Update bookmark position on remote"""
+ ui.debug("checking for updated bookmarks\n")
+ revnums = map(repo.changelog.rev, revs or [])
+ ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
+ (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
+ ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
+ srchex=hex)
+
+ for b, scid, dcid in advsrc:
+ if ancestors and repo[scid].rev() not in ancestors:
+ continue
+ if remote.pushkey('bookmarks', b, dcid, scid):
+ ui.status(_("updating bookmark %s\n") % b)
+ else:
+ ui.warn(_('updating bookmark %s failed!\n') % b)
--- a/mercurial/hgweb/webcommands.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/hgweb/webcommands.py Mon Feb 10 17:31:26 2014 -0600
@@ -187,7 +187,7 @@
mfunc = revset.match(web.repo.ui, revdef)
try:
- revs = mfunc(web.repo, list(web.repo))
+ revs = mfunc(web.repo, revset.baseset(web.repo))
return MODE_REVSET, revs
# ParseError: wrongly placed tokens, wrongs arguments, etc
# RepoLookupError: no such revision, e.g. in 'revision:'
--- a/mercurial/localrepo.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/localrepo.py Mon Feb 10 17:31:26 2014 -0600
@@ -9,7 +9,7 @@
import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
import changelog, dirstate, filelog, manifest, context, bookmarks, phases
import lock as lockmod
-import transaction, store, encoding
+import transaction, store, encoding, exchange
import scmutil, util, extensions, hook, error, revset
import match as matchmod
import merge as mergemod
@@ -428,7 +428,7 @@
'''Return a list of revisions matching the given revset'''
expr = revset.formatspec(expr, *args)
m = revset.match(None, expr)
- return [r for r in m(self, list(self))]
+ return revset.baseset([r for r in m(self, revset.baseset(self))])
def set(self, expr, *args):
'''
@@ -1005,6 +1005,7 @@
l = lockmod.lock(vfs, lockname,
int(self.ui.config("ui", "timeout", "600")),
releasefn, desc=desc)
+ self.ui.warn(_("got lock after %s seconds\n") % l.delay)
if acquirefn:
acquirefn()
return l
@@ -1578,7 +1579,7 @@
r = modified, added, removed, deleted, unknown, ignored, clean
if listsubrepos:
- for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
+ for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
if working:
rev2 = None
else:
@@ -1750,223 +1751,7 @@
pass
def push(self, remote, force=False, revs=None, newbranch=False):
- '''Push outgoing changesets (limited by revs) from the current
- repository to remote. Return an integer:
- - None means nothing to push
- - 0 means HTTP error
- - 1 means we pushed and remote head count is unchanged *or*
- we have outgoing changesets but refused to push
- - other values as described by addchangegroup()
- '''
- if remote.local():
- missing = set(self.requirements) - remote.local().supported
- if missing:
- msg = _("required features are not"
- " supported in the destination:"
- " %s") % (', '.join(sorted(missing)))
- raise util.Abort(msg)
-
- # there are two ways to push to remote repo:
- #
- # addchangegroup assumes local user can lock remote
- # repo (local filesystem, old ssh servers).
- #
- # unbundle assumes local user cannot lock remote repo (new ssh
- # servers, http servers).
-
- if not remote.canpush():
- raise util.Abort(_("destination does not support push"))
- unfi = self.unfiltered()
- def localphasemove(nodes, phase=phases.public):
- """move <nodes> to <phase> in the local source repo"""
- if locallock is not None:
- phases.advanceboundary(self, phase, nodes)
- else:
- # repo is not locked, do not change any phases!
- # Informs the user that phases should have been moved when
- # applicable.
- actualmoves = [n for n in nodes if phase < self[n].phase()]
- phasestr = phases.phasenames[phase]
- if actualmoves:
- self.ui.status(_('cannot lock source repo, skipping local'
- ' %s phase update\n') % phasestr)
- # get local lock as we might write phase data
- locallock = None
- try:
- locallock = self.lock()
- except IOError, err:
- if err.errno != errno.EACCES:
- raise
- # source repo cannot be locked.
- # We do not abort the push, but just disable the local phase
- # synchronisation.
- msg = 'cannot lock source repository: %s\n' % err
- self.ui.debug(msg)
- try:
- self.checkpush(force, revs)
- lock = None
- unbundle = remote.capable('unbundle')
- if not unbundle:
- lock = remote.lock()
- try:
- # discovery
- fci = discovery.findcommonincoming
- commoninc = fci(unfi, remote, force=force)
- common, inc, remoteheads = commoninc
- fco = discovery.findcommonoutgoing
- outgoing = fco(unfi, remote, onlyheads=revs,
- commoninc=commoninc, force=force)
-
-
- if not outgoing.missing:
- # nothing to push
- scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
- ret = None
- else:
- # something to push
- if not force:
- # if self.obsstore == False --> no obsolete
- # then, save the iteration
- if unfi.obsstore:
- # this message are here for 80 char limit reason
- mso = _("push includes obsolete changeset: %s!")
- mst = "push includes %s changeset: %s!"
- # plain versions for i18n tool to detect them
- _("push includes unstable changeset: %s!")
- _("push includes bumped changeset: %s!")
- _("push includes divergent changeset: %s!")
- # If we are to push if there is at least one
- # obsolete or unstable changeset in missing, at
- # least one of the missinghead will be obsolete or
- # unstable. So checking heads only is ok
- for node in outgoing.missingheads:
- ctx = unfi[node]
- if ctx.obsolete():
- raise util.Abort(mso % ctx)
- elif ctx.troubled():
- raise util.Abort(_(mst)
- % (ctx.troubles()[0],
- ctx))
- newbm = self.ui.configlist('bookmarks', 'pushing')
- discovery.checkheads(unfi, remote, outgoing,
- remoteheads, newbranch,
- bool(inc), newbm)
-
- # TODO: get bundlecaps from remote
- bundlecaps = None
- # create a changegroup from local
- if revs is None and not (outgoing.excluded
- or self.changelog.filteredrevs):
- # push everything,
- # use the fast path, no race possible on push
- bundler = changegroup.bundle10(self, bundlecaps)
- cg = self._changegroupsubset(outgoing,
- bundler,
- 'push',
- fastpath=True)
- else:
- cg = self.getlocalbundle('push', outgoing, bundlecaps)
-
- # apply changegroup to remote
- if unbundle:
- # local repo finds heads on server, finds out what
- # revs it must push. once revs transferred, if server
- # finds it has different heads (someone else won
- # commit/push race), server aborts.
- if force:
- remoteheads = ['force']
- # ssh: return remote's addchangegroup()
- # http: return remote's addchangegroup() or 0 for error
- ret = remote.unbundle(cg, remoteheads, 'push')
- else:
- # we return an integer indicating remote head count
- # change
- ret = remote.addchangegroup(cg, 'push', self.url())
-
- if ret:
- # push succeed, synchronize target of the push
- cheads = outgoing.missingheads
- elif revs is None:
- # All out push fails. synchronize all common
- cheads = outgoing.commonheads
- else:
- # I want cheads = heads(::missingheads and ::commonheads)
- # (missingheads is revs with secret changeset filtered out)
- #
- # This can be expressed as:
- # cheads = ( (missingheads and ::commonheads)
- # + (commonheads and ::missingheads))"
- # )
- #
- # while trying to push we already computed the following:
- # common = (::commonheads)
- # missing = ((commonheads::missingheads) - commonheads)
- #
- # We can pick:
- # * missingheads part of common (::commonheads)
- common = set(outgoing.common)
- nm = self.changelog.nodemap
- cheads = [node for node in revs if nm[node] in common]
- # and
- # * commonheads parents on missing
- revset = unfi.set('%ln and parents(roots(%ln))',
- outgoing.commonheads,
- outgoing.missing)
- cheads.extend(c.node() for c in revset)
- # even when we don't push, exchanging phase data is useful
- remotephases = remote.listkeys('phases')
- if (self.ui.configbool('ui', '_usedassubrepo', False)
- and remotephases # server supports phases
- and ret is None # nothing was pushed
- and remotephases.get('publishing', False)):
- # When:
- # - this is a subrepo push
- # - and remote support phase
- # - and no changeset was pushed
- # - and remote is publishing
- # We may be in issue 3871 case!
- # We drop the possible phase synchronisation done by
- # courtesy to publish changesets possibly locally draft
- # on the remote.
- remotephases = {'publishing': 'True'}
- if not remotephases: # old server or public only repo
- localphasemove(cheads)
- # don't push any phase data as there is nothing to push
- else:
- ana = phases.analyzeremotephases(self, cheads, remotephases)
- pheads, droots = ana
- ### Apply remote phase on local
- if remotephases.get('publishing', False):
- localphasemove(cheads)
- else: # publish = False
- localphasemove(pheads)
- localphasemove(cheads, phases.draft)
- ### Apply local phase on remote
-
- # Get the list of all revs draft on remote by public here.
- # XXX Beware that revset break if droots is not strictly
- # XXX root we may want to ensure it is but it is costly
- outdated = unfi.set('heads((%ln::%ln) and public())',
- droots, cheads)
- for newremotehead in outdated:
- r = remote.pushkey('phases',
- newremotehead.hex(),
- str(phases.draft),
- str(phases.public))
- if not r:
- self.ui.warn(_('updating %s to public failed!\n')
- % newremotehead)
- self.ui.debug('try to push obsolete markers to remote\n')
- obsolete.syncpush(self, remote)
- finally:
- if lock is not None:
- lock.release()
- finally:
- if locallock is not None:
- locallock.release()
-
- bookmarks.updateremote(self.ui, unfi, remote, revs)
- return ret
+ return exchange.push(self, remote, force, revs, newbranch)
def changegroupinfo(self, nodes, source):
if self.ui.verbose or source == 'bundle':
@@ -1976,9 +1761,9 @@
for node in nodes:
self.ui.debug("%s\n" % hex(node))
- def changegroupsubset(self, bases, heads, source):
+ def changegroupsubset(self, roots, heads, source):
"""Compute a changegroup consisting of all the nodes that are
- descendants of any of the bases and ancestors of any of the heads.
+ descendants of any of the roots and ancestors of any of the heads.
Return a chunkbuffer object whose read() method will return
successive changegroup chunks.
@@ -1990,12 +1775,12 @@
the changegroup a particular filenode or manifestnode belongs to.
"""
cl = self.changelog
- if not bases:
- bases = [nullid]
+ if not roots:
+ roots = [nullid]
# TODO: remove call to nodesbetween.
- csets, bases, heads = cl.nodesbetween(bases, heads)
+ csets, roots, heads = cl.nodesbetween(roots, heads)
discbases = []
- for n in bases:
+ for n in roots:
discbases.extend([p for p in cl.parents(n) if p != nullid])
outgoing = discovery.outgoing(cl, discbases, heads)
bundler = changegroup.bundle10(self)
--- a/mercurial/lock.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/lock.py Mon Feb 10 17:31:26 2014 -0600
@@ -38,7 +38,7 @@
self.desc = desc
self.postrelease = []
self.pid = os.getpid()
- self.lock()
+ self.delay = self.lock()
def __del__(self):
if self.held:
@@ -57,7 +57,7 @@
while True:
try:
self.trylock()
- return 1
+ return self.timeout - timeout
except error.LockHeld, inst:
if timeout != 0:
time.sleep(1)
--- a/mercurial/match.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/match.py Mon Feb 10 17:31:26 2014 -0600
@@ -6,7 +6,7 @@
# GNU General Public License version 2 or any later version.
import re
-import util, fileset, pathutil
+import util, pathutil
from i18n import _
def _rematcher(pat):
@@ -26,7 +26,7 @@
if kind == 'set':
if not ctx:
raise util.Abort("fileset expression with no context")
- s = fileset.getfileset(ctx, expr)
+ s = ctx.getfileset(expr)
fset.update(s)
continue
other.append((kind, expr))
--- a/mercurial/repoview.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/repoview.py Mon Feb 10 17:31:26 2014 -0600
@@ -9,7 +9,7 @@
import copy
import phases
import util
-import obsolete, revset
+import obsolete
def hideablerevs(repo):
@@ -28,8 +28,9 @@
cl = repo.changelog
firsthideable = min(hideable)
revs = cl.revs(start=firsthideable)
- blockers = [r for r in revset._children(repo, revs, hideable)
- if r not in hideable]
+ tofilter = repo.revs(
+ '(%ld) and children(%ld)', list(revs), list(hideable))
+ blockers = [r for r in tofilter if r not in hideable]
for par in repo[None].parents():
blockers.append(par.rev())
for bm in repo._bookmarks.values():
--- a/mercurial/revset.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/revset.py Mon Feb 10 17:31:26 2014 -0600
@@ -53,9 +53,9 @@
"""Return all paths between roots and heads, inclusive of both endpoint
sets."""
if not roots:
- return []
+ return baseset([])
parentrevs = repo.changelog.parentrevs
- visit = heads[:]
+ visit = baseset(heads)
reachable = set()
seen = {}
minroot = min(roots)
@@ -72,12 +72,12 @@
if parent >= minroot and parent not in seen:
visit.append(parent)
if not reachable:
- return []
+ return baseset([])
for rev in sorted(seen):
for parent in seen[rev]:
if parent in reachable:
reachable.add(rev)
- return sorted(reachable)
+ return baseset(sorted(reachable))
elements = {
"(": (20, ("group", 1, ")"), ("func", 1, ")")),
@@ -195,7 +195,7 @@
def getset(repo, subset, x):
if not x:
raise error.ParseError(_("missing argument"))
- return methods[x[0]](repo, subset, *x[1:])
+ return baseset(methods[x[0]](repo, subset, *x[1:]))
def _getrevsource(repo, r):
extra = repo[r].extra()
@@ -212,10 +212,10 @@
def stringset(repo, subset, x):
x = repo[x].rev()
if x == -1 and len(subset) == len(repo):
- return [-1]
+ return baseset([-1])
if len(subset) == len(repo) or x in subset:
- return [x]
- return []
+ return baseset([x])
+ return baseset([])
def symbolset(repo, subset, x):
if x in symbols:
@@ -223,39 +223,37 @@
return stringset(repo, subset, x)
def rangeset(repo, subset, x, y):
- cl = repo.changelog
+ cl = baseset(repo.changelog)
m = getset(repo, cl, x)
n = getset(repo, cl, y)
if not m or not n:
- return []
+ return baseset([])
m, n = m[0], n[-1]
if m < n:
r = range(m, n + 1)
else:
r = range(m, n - 1, -1)
- s = set(subset)
- return [x for x in r if x in s]
+ s = subset.set()
+ return baseset([x for x in r if x in s])
def dagrange(repo, subset, x, y):
- r = list(repo)
+ r = baseset(repo)
xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
- s = set(subset)
- return [r for r in xs if r in s]
+ s = subset.set()
+ return baseset([r for r in xs if r in s])
def andset(repo, subset, x, y):
return getset(repo, getset(repo, subset, x), y)
def orset(repo, subset, x, y):
xl = getset(repo, subset, x)
- s = set(xl)
- yl = getset(repo, [r for r in subset if r not in s], y)
+ yl = getset(repo, subset - xl, y)
return xl + yl
def notset(repo, subset, x):
- s = set(getset(repo, subset, x))
- return [r for r in subset if r not in s]
+ return subset - getset(repo, subset, x)
def listset(repo, subset, a, b):
raise error.ParseError(_("can't use a list in this context"))
@@ -289,7 +287,7 @@
"""
# i18n: "ancestor" is a keyword
l = getlist(x)
- rl = list(repo)
+ rl = baseset(repo)
anc = None
# (getset(repo, rl, i) for i in l) generates a list of lists
@@ -304,15 +302,15 @@
anc = rev(ancestor(node(anc), node(r)))
if anc is not None and anc in subset:
- return [anc]
- return []
+ return baseset([anc])
+ return baseset([])
def _ancestors(repo, subset, x, followfirst=False):
- args = getset(repo, list(repo), x)
+ args = getset(repo, baseset(repo), x)
if not args:
- return []
+ return baseset([])
s = set(_revancestors(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def ancestors(repo, subset, x):
"""``ancestors(set)``
@@ -336,11 +334,11 @@
raise error.ParseError(_("~ expects a number"))
ps = set()
cl = repo.changelog
- for r in getset(repo, cl, x):
+ for r in getset(repo, baseset(cl), x):
for i in range(n):
r = cl.parentrevs(r)[0]
ps.add(r)
- return [r for r in subset if r in ps]
+ return baseset([r for r in subset if r in ps])
def author(repo, subset, x):
"""``author(string)``
@@ -349,7 +347,8 @@
# i18n: "author" is a keyword
n = encoding.lower(getstring(x, _("author requires a string")))
kind, pattern, matcher = _substringmatcher(n)
- return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
+ return baseset([r for r in subset if
+ matcher(encoding.lower(repo[r].user()))])
def bisect(repo, subset, x):
"""``bisect(string)``
@@ -366,7 +365,7 @@
# i18n: "bisect" is a keyword
status = getstring(x, _("bisect requires a string")).lower()
state = set(hbisect.get(repo, status))
- return [r for r in subset if r in state]
+ return baseset([r for r in subset if r in state])
# Backward-compatibility
# - no help entry so that we do not advertise it any more
@@ -393,7 +392,7 @@
if not bmrev:
raise util.Abort(_("bookmark '%s' does not exist") % bm)
bmrev = repo[bmrev].rev()
- return [r for r in subset if r == bmrev]
+ return baseset([r for r in subset if r == bmrev])
else:
matchrevs = set()
for name, bmrev in repo._bookmarks.iteritems():
@@ -405,11 +404,11 @@
bmrevs = set()
for bmrev in matchrevs:
bmrevs.add(repo[bmrev].rev())
- return [r for r in subset if r in bmrevs]
+ return subset & bmrevs
bms = set([repo[r].rev()
for r in repo._bookmarks.values()])
- return [r for r in subset if r in bms]
+ return baseset([r for r in subset if r in bms])
def branch(repo, subset, x):
"""``branch(string or set)``
@@ -431,16 +430,16 @@
# note: falls through to the revspec case if no branch with
# this name exists
if pattern in repo.branchmap():
- return [r for r in subset if matcher(repo[r].branch())]
+ return baseset([r for r in subset if matcher(repo[r].branch())])
else:
- return [r for r in subset if matcher(repo[r].branch())]
+ return baseset([r for r in subset if matcher(repo[r].branch())])
- s = getset(repo, list(repo), x)
+ s = getset(repo, baseset(repo), x)
b = set()
for r in s:
b.add(repo[r].branch())
- s = set(s)
- return [r for r in subset if r in s or repo[r].branch() in b]
+ s = s.set()
+ return baseset([r for r in subset if r in s or repo[r].branch() in b])
def bumped(repo, subset, x):
"""``bumped()``
@@ -451,7 +450,7 @@
# i18n: "bumped" is a keyword
getargs(x, 0, 0, _("bumped takes no arguments"))
bumped = obsmod.getrevs(repo, 'bumped')
- return [r for r in subset if r in bumped]
+ return subset & bumped
def bundle(repo, subset, x):
"""``bundle()``
@@ -463,7 +462,7 @@
bundlerevs = repo.changelog.bundlerevs
except AttributeError:
raise util.Abort(_("no bundle provided - specify with -R"))
- return [r for r in subset if r in bundlerevs]
+ return subset & bundlerevs
def checkstatus(repo, subset, pat, field):
m = None
@@ -494,12 +493,12 @@
if m(f):
s.append(r)
break
- return s
+ return baseset(s)
def _children(repo, narrow, parentset):
cs = set()
if not parentset:
- return cs
+ return baseset(cs)
pr = repo.changelog.parentrevs
minrev = min(parentset)
for r in narrow:
@@ -508,15 +507,15 @@
for p in pr(r):
if p in parentset:
cs.add(r)
- return cs
+ return baseset(cs)
def children(repo, subset, x):
"""``children(set)``
Child changesets of changesets in set.
"""
- s = set(getset(repo, list(repo), x))
+ s = getset(repo, baseset(repo), x).set()
cs = _children(repo, subset, s)
- return [r for r in subset if r in cs]
+ return subset & cs
def closed(repo, subset, x):
"""``closed()``
@@ -524,7 +523,7 @@
"""
# i18n: "closed" is a keyword
getargs(x, 0, 0, _("closed takes no arguments"))
- return [r for r in subset if repo[r].closesbranch()]
+ return baseset([r for r in subset if repo[r].closesbranch()])
def contains(repo, subset, x):
"""``contains(pattern)``
@@ -553,7 +552,7 @@
if m(f):
s.append(r)
break
- return s
+ return baseset(s)
def converted(repo, subset, x):
"""``converted([id])``
@@ -575,7 +574,7 @@
source = repo[r].extra().get('convert_revision', None)
return source is not None and (rev is None or source.startswith(rev))
- return [r for r in subset if _matchvalue(r)]
+ return baseset([r for r in subset if _matchvalue(r)])
def date(repo, subset, x):
"""``date(interval)``
@@ -584,7 +583,7 @@
# i18n: "date" is a keyword
ds = getstring(x, _("date requires a string"))
dm = util.matchdate(ds)
- return [r for r in subset if dm(repo[r].date()[0])]
+ return baseset([r for r in subset if dm(repo[r].date()[0])])
def desc(repo, subset, x):
"""``desc(string)``
@@ -597,14 +596,14 @@
c = repo[r]
if ds in encoding.lower(c.description()):
l.append(r)
- return l
+ return baseset(l)
def _descendants(repo, subset, x, followfirst=False):
- args = getset(repo, list(repo), x)
+ args = getset(repo, baseset(repo), x)
if not args:
- return []
+ return baseset([])
s = set(_revdescendants(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def descendants(repo, subset, x):
"""``descendants(set)``
@@ -624,9 +623,9 @@
is the same as passing all().
"""
if x is not None:
- args = set(getset(repo, list(repo), x))
+ args = getset(repo, baseset(repo), x).set()
else:
- args = set(getall(repo, list(repo), x))
+ args = getall(repo, baseset(repo), x).set()
dests = set()
@@ -659,7 +658,7 @@
r = src
src = _getrevsource(repo, r)
- return [r for r in subset if r in dests]
+ return baseset([r for r in subset if r in dests])
def divergent(repo, subset, x):
"""``divergent()``
@@ -668,7 +667,7 @@
# i18n: "divergent" is a keyword
getargs(x, 0, 0, _("divergent takes no arguments"))
divergent = obsmod.getrevs(repo, 'divergent')
- return [r for r in subset if r in divergent]
+ return baseset([r for r in subset if r in divergent])
def draft(repo, subset, x):
"""``draft()``
@@ -676,7 +675,7 @@
# i18n: "draft" is a keyword
getargs(x, 0, 0, _("draft takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.draft]
+ return baseset([r for r in subset if pc.phase(repo, r) == phases.draft])
def extinct(repo, subset, x):
"""``extinct()``
@@ -685,7 +684,7 @@
# i18n: "extinct" is a keyword
getargs(x, 0, 0, _("extinct takes no arguments"))
extincts = obsmod.getrevs(repo, 'extinct')
- return [r for r in subset if r in extincts]
+ return subset & extincts
def extra(repo, subset, x):
"""``extra(label, [value])``
@@ -712,7 +711,7 @@
extra = repo[r].extra()
return label in extra and (value is None or matcher(extra[label]))
- return [r for r in subset if _matchvalue(r)]
+ return baseset([r for r in subset if _matchvalue(r)])
def filelog(repo, subset, x):
"""``filelog(pattern)``
@@ -744,7 +743,7 @@
for fr in fl:
s.add(fl.linkrev(fr))
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def first(repo, subset, x):
"""``first(set, [n])``
@@ -763,11 +762,11 @@
# include the revision responsible for the most recent version
s.add(cx.linkrev())
else:
- return []
+ return baseset([])
else:
s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
- return [r for r in subset if r in s]
+ return baseset([r for r in subset if r in s])
def follow(repo, subset, x):
"""``follow([file])``
@@ -809,7 +808,7 @@
if gr.search(e):
l.append(r)
break
- return l
+ return baseset(l)
def _matchfiles(repo, subset, x):
# _matchfiles takes a revset list of prefixed arguments:
@@ -872,7 +871,7 @@
if m(f):
s.append(r)
break
- return s
+ return baseset(s)
def hasfile(repo, subset, x):
"""``file(pattern)``
@@ -896,15 +895,15 @@
hs = set()
for b, ls in repo.branchmap().iteritems():
hs.update(repo[h].rev() for h in ls)
- return [r for r in subset if r in hs]
+ return baseset([r for r in subset if r in hs])
def heads(repo, subset, x):
"""``heads(set)``
Members of set with no children in set.
"""
s = getset(repo, subset, x)
- ps = set(parents(repo, subset, x))
- return [r for r in s if r not in ps]
+ ps = parents(repo, subset, x)
+ return s - ps
def hidden(repo, subset, x):
"""``hidden()``
@@ -913,7 +912,7 @@
# i18n: "hidden" is a keyword
getargs(x, 0, 0, _("hidden takes no arguments"))
hiddenrevs = repoview.filterrevs(repo, 'visible')
- return [r for r in subset if r in hiddenrevs]
+ return subset & hiddenrevs
def keyword(repo, subset, x):
"""``keyword(string)``
@@ -928,7 +927,7 @@
if util.any(kw in encoding.lower(t)
for t in c.files() + [c.user(), c.description()]):
l.append(r)
- return l
+ return baseset(l)
def limit(repo, subset, x):
"""``limit(set, [n])``
@@ -944,9 +943,9 @@
except (TypeError, ValueError):
# i18n: "limit" is a keyword
raise error.ParseError(_("limit expects a number"))
- ss = set(subset)
- os = getset(repo, list(repo), l[0])[:lim]
- return [r for r in os if r in ss]
+ ss = subset.set()
+ os = getset(repo, baseset(repo), l[0])[:lim]
+ return baseset([r for r in os if r in ss])
def last(repo, subset, x):
"""``last(set, [n])``
@@ -962,20 +961,20 @@
except (TypeError, ValueError):
# i18n: "last" is a keyword
raise error.ParseError(_("last expects a number"))
- ss = set(subset)
- os = getset(repo, list(repo), l[0])[-lim:]
- return [r for r in os if r in ss]
+ ss = subset.set()
+ os = getset(repo, baseset(repo), l[0])[-lim:]
+ return baseset([r for r in os if r in ss])
def maxrev(repo, subset, x):
"""``max(set)``
Changeset with highest revision number in set.
"""
- os = getset(repo, list(repo), x)
+ os = getset(repo, baseset(repo), x)
if os:
m = max(os)
if m in subset:
- return [m]
- return []
+ return baseset([m])
+ return baseset([])
def merge(repo, subset, x):
"""``merge()``
@@ -984,7 +983,7 @@
# i18n: "merge" is a keyword
getargs(x, 0, 0, _("merge takes no arguments"))
cl = repo.changelog
- return [r for r in subset if cl.parentrevs(r)[1] != -1]
+ return baseset([r for r in subset if cl.parentrevs(r)[1] != -1])
def branchpoint(repo, subset, x):
"""``branchpoint()``
@@ -994,25 +993,25 @@
getargs(x, 0, 0, _("branchpoint takes no arguments"))
cl = repo.changelog
if not subset:
- return []
+ return baseset([])
baserev = min(subset)
parentscount = [0]*(len(repo) - baserev)
for r in cl.revs(start=baserev + 1):
for p in cl.parentrevs(r):
if p >= baserev:
parentscount[p - baserev] += 1
- return [r for r in subset if (parentscount[r - baserev] > 1)]
+ return baseset([r for r in subset if (parentscount[r - baserev] > 1)])
def minrev(repo, subset, x):
"""``min(set)``
Changeset with lowest revision number in set.
"""
- os = getset(repo, list(repo), x)
+ os = getset(repo, baseset(repo), x)
if os:
m = min(os)
if m in subset:
- return [m]
- return []
+ return baseset([m])
+ return baseset([])
def modifies(repo, subset, x):
"""``modifies(pattern)``
@@ -1042,7 +1041,7 @@
if pm is not None:
rn = repo.changelog.rev(pm)
- return [r for r in subset if r == rn]
+ return baseset([r for r in subset if r == rn])
def obsolete(repo, subset, x):
"""``obsolete()``
@@ -1050,7 +1049,7 @@
# i18n: "obsolete" is a keyword
getargs(x, 0, 0, _("obsolete takes no arguments"))
obsoletes = obsmod.getrevs(repo, 'obsolete')
- return [r for r in subset if r in obsoletes]
+ return subset & obsoletes
def origin(repo, subset, x):
"""``origin([set])``
@@ -1061,9 +1060,9 @@
for the first operation is selected.
"""
if x is not None:
- args = set(getset(repo, list(repo), x))
+ args = getset(repo, baseset(repo), x).set()
else:
- args = set(getall(repo, list(repo), x))
+ args = getall(repo, baseset(repo), x).set()
def _firstsrc(rev):
src = _getrevsource(repo, rev)
@@ -1078,7 +1077,7 @@
src = prev
o = set([_firstsrc(r) for r in args])
- return [r for r in subset if r in o]
+ return baseset([r for r in subset if r in o])
def outgoing(repo, subset, x):
"""``outgoing([path])``
@@ -1101,7 +1100,7 @@
repo.ui.popbuffer()
cl = repo.changelog
o = set([cl.rev(r) for r in outgoing.missing])
- return [r for r in subset if r in o]
+ return baseset([r for r in subset if r in o])
def p1(repo, subset, x):
"""``p1([set])``
@@ -1109,13 +1108,13 @@
"""
if x is None:
p = repo[x].p1().rev()
- return [r for r in subset if r == p]
+ return baseset([r for r in subset if r == p])
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, baseset(repo), x):
ps.add(cl.parentrevs(r)[0])
- return [r for r in subset if r in ps]
+ return subset & ps
def p2(repo, subset, x):
"""``p2([set])``
@@ -1125,15 +1124,15 @@
ps = repo[x].parents()
try:
p = ps[1].rev()
- return [r for r in subset if r == p]
+ return baseset([r for r in subset if r == p])
except IndexError:
- return []
+ return baseset([])
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, baseset(repo), x):
ps.add(cl.parentrevs(r)[1])
- return [r for r in subset if r in ps]
+ return subset & ps
def parents(repo, subset, x):
"""``parents([set])``
@@ -1141,13 +1140,13 @@
"""
if x is None:
ps = tuple(p.rev() for p in repo[x].parents())
- return [r for r in subset if r in ps]
+ return subset & ps
ps = set()
cl = repo.changelog
- for r in getset(repo, list(repo), x):
+ for r in getset(repo, baseset(repo), x):
ps.update(cl.parentrevs(r))
- return [r for r in subset if r in ps]
+ return subset & ps
def parentspec(repo, subset, x, n):
"""``set^0``
@@ -1163,7 +1162,7 @@
raise error.ParseError(_("^ expects a number 0, 1, or 2"))
ps = set()
cl = repo.changelog
- for r in getset(repo, cl, x):
+ for r in getset(repo, baseset(cl), x):
if n == 0:
ps.add(r)
elif n == 1:
@@ -1172,7 +1171,7 @@
parents = cl.parentrevs(r)
if len(parents) > 1:
ps.add(parents[1])
- return [r for r in subset if r in ps]
+ return subset & ps
def present(repo, subset, x):
"""``present(set)``
@@ -1186,7 +1185,7 @@
try:
return getset(repo, subset, x)
except error.RepoLookupError:
- return []
+ return baseset([])
def public(repo, subset, x):
"""``public()``
@@ -1194,7 +1193,7 @@
# i18n: "public" is a keyword
getargs(x, 0, 0, _("public takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.public]
+ return baseset([r for r in subset if pc.phase(repo, r) == phases.public])
def remote(repo, subset, x):
"""``remote([id [,path]])``
@@ -1228,8 +1227,8 @@
if n in repo:
r = repo[n].rev()
if r in subset:
- return [r]
- return []
+ return baseset([r])
+ return baseset([])
def removes(repo, subset, x):
"""``removes(pattern)``
@@ -1255,7 +1254,7 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_("rev expects a number"))
- return [r for r in subset if r == l]
+ return baseset([r for r in subset if r == l])
def matching(repo, subset, x):
"""``matching(revision [, field])``
@@ -1285,7 +1284,7 @@
# i18n: "matching" is a keyword
l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
- revs = getset(repo, repo.changelog, l[0])
+ revs = getset(repo, baseset(repo.changelog), l[0])
fieldlist = ['metadata']
if len(l) > 1:
@@ -1367,15 +1366,13 @@
break
if match:
matches.add(r)
- return [r for r in subset if r in matches]
+ return baseset([r for r in subset if r in matches])
def reverse(repo, subset, x):
"""``reverse(set)``
Reverse order of set.
"""
l = getset(repo, subset, x)
- if not isinstance(l, list):
- l = list(l)
l.reverse()
return l
@@ -1383,10 +1380,10 @@
"""``roots(set)``
Changesets in set with no parent changeset in set.
"""
- s = set(getset(repo, repo.changelog, x))
- subset = [r for r in subset if r in s]
+ s = getset(repo, baseset(repo.changelog), x).set()
+ subset = baseset([r for r in subset if r in s])
cs = _children(repo, subset, s)
- return [r for r in subset if r not in cs]
+ return subset - cs
def secret(repo, subset, x):
"""``secret()``
@@ -1394,7 +1391,7 @@
# i18n: "secret" is a keyword
getargs(x, 0, 0, _("secret takes no arguments"))
pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.secret]
+ return baseset([r for r in subset if pc.phase(repo, r) == phases.secret])
def sort(repo, subset, x):
"""``sort(set[, [-]key...])``
@@ -1450,7 +1447,7 @@
e.append(r)
l.append(e)
l.sort()
- return [e[-1] for e in l]
+ return baseset([e[-1] for e in l])
def _stringmatcher(pattern):
"""
@@ -1519,7 +1516,7 @@
s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
else:
s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
- return [r for r in subset if r in s]
+ return subset & s
def tagged(repo, subset, x):
return tag(repo, subset, x)
@@ -1531,7 +1528,7 @@
# i18n: "unstable" is a keyword
getargs(x, 0, 0, _("unstable takes no arguments"))
unstables = obsmod.getrevs(repo, 'unstable')
- return [r for r in subset if r in unstables]
+ return subset & unstables
def user(repo, subset, x):
@@ -1548,11 +1545,10 @@
def _list(repo, subset, x):
s = getstring(x, "internal error")
if not s:
- return []
- if not isinstance(subset, set):
- subset = set(subset)
+ return baseset([])
ls = [repo[r].rev() for r in s.split('\0')]
- return [r for r in ls if r in subset]
+ s = subset.set()
+ return baseset([r for r in ls if r in s])
symbols = {
"adds": adds,
@@ -2046,5 +2042,84 @@
funcs.add(tree[1][1])
return funcs
+class baseset(list):
+ """Basic data structure that represents a revset and contains the basic
+ operation that it should be able to perform.
+ """
+ def __init__(self, data):
+ super(baseset, self).__init__(data)
+ self._set = None
+
+ def set(self):
+ if not self._set:
+ self._set = set(self)
+ return self._set
+
+ def __sub__(self, x):
+ if isinstance(x, baseset):
+ s = x.set()
+ else:
+ s = set(x)
+ return baseset(self.set() - s)
+
+ def __and__(self, x):
+ if isinstance(x, baseset):
+ x = x.set()
+ return baseset([y for y in self if y in x])
+
+ def __add__(self, x):
+ s = self.set()
+ l = [r for r in x if r not in s]
+ return baseset(list(self) + l)
+
+class lazyset(object):
+ """Duck type for baseset class which iterates lazily over the revisions in
+ the subset and contains a function which tests for membership in the
+ revset
+ """
+ def __init__(self, subset, condition):
+ self._subset = subset
+ self._condition = condition
+
+ def __contains__(self, x):
+ return x in self._subset and self._condition(x)
+
+ def __iter__(self):
+ cond = self._condition
+ for x in self._subset:
+ if cond(x):
+ yield x
+
+ def __and__(self, x):
+ return lazyset(self, lambda r: r in x)
+
+ def __sub__(self, x):
+ return lazyset(self, lambda r: r not in x)
+
+ def __add__(self, x):
+ l = baseset([r for r in self])
+ return l + baseset(x)
+
+ def __len__(self):
+ # Basic implementation to be changed in future patches.
+ l = baseset([r for r in self])
+ return len(l)
+
+ def __getitem__(self, x):
+ # Basic implementation to be changed in future patches.
+ l = baseset([r for r in self])
+ return l[x]
+
+ def sort(self, reverse=False):
+ # Basic implementation to be changed in future patches.
+ self._subset = baseset(self._subset)
+ self._subset.sort(reverse=reverse)
+
+ def reverse(self):
+ self._subset.reverse()
+
+ def set(self):
+ return set([r for r in self])
+
# tell hggettext to extract docstrings from these functions:
i18nfunctions = symbols.values()
--- a/mercurial/scmutil.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/scmutil.py Mon Feb 10 17:31:26 2014 -0600
@@ -20,6 +20,16 @@
systemrcpath = scmplatform.systemrcpath
userrcpath = scmplatform.userrcpath
+def itersubrepos(ctx1, ctx2):
+ """find subrepos in ctx1 or ctx2"""
+ # Create a (subpath, ctx) mapping where we prefer subpaths from
+ # ctx1. The subpaths from ctx2 are important when the .hgsub file
+ # has been modified (in ctx2) but not yet committed (in ctx1).
+ subpaths = dict.fromkeys(ctx2.substate, ctx2)
+ subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
+ for subpath, ctx in sorted(subpaths.iteritems()):
+ yield subpath, ctx.sub(subpath)
+
def nochangesfound(ui, repo, excluded=None):
'''Report no changes for push/pull, excluded is None or a list of
nodes excluded from the push/pull.
@@ -524,11 +534,11 @@
# fall through to new-style queries if old-style fails
m = revset.match(repo.ui, spec)
- dl = [r for r in m(repo, list(repo)) if r not in seen]
+ dl = [r for r in m(repo, revset.baseset(repo)) if r not in seen]
l.extend(dl)
seen.update(dl)
- return l
+ return revset.baseset(l)
def expandpats(pats):
if not util.expandglobs:
--- a/mercurial/subrepo.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/subrepo.py Mon Feb 10 17:31:26 2014 -0600
@@ -326,16 +326,6 @@
os.unlink(os.path.join(dirname, f))
os.walk(path, v, None)
-def itersubrepos(ctx1, ctx2):
- """find subrepos in ctx1 or ctx2"""
- # Create a (subpath, ctx) mapping where we prefer subpaths from
- # ctx1. The subpaths from ctx2 are important when the .hgsub file
- # has been modified (in ctx2) but not yet committed (in ctx1).
- subpaths = dict.fromkeys(ctx2.substate, ctx2)
- subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
- for subpath, ctx in sorted(subpaths.iteritems()):
- yield subpath, ctx.sub(subpath)
-
def subrepo(ctx, path):
"""return instance of the right subrepo class for subrepo in path"""
# subrepo inherently violates our import layering rules
--- a/mercurial/templater.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/templater.py Mon Feb 10 17:31:26 2014 -0600
@@ -245,6 +245,31 @@
return templatefilters.fill(text, width, initindent, hangindent)
+def pad(context, mapping, args):
+ """usage: pad(text, width, fillchar=' ', right=False)
+ """
+ if not (2 <= len(args) <= 4):
+ raise error.ParseError(_("pad() expects two to four arguments"))
+
+ width = int(args[1][1])
+
+ text = stringify(args[0][0](context, mapping, args[0][1]))
+ if args[0][0] == runstring:
+ text = stringify(runtemplate(context, mapping,
+ compiletemplate(text, context)))
+
+ right = False
+ fillchar = ' '
+ if len(args) > 2:
+ fillchar = stringify(args[2][0](context, mapping, args[2][1]))
+ if len(args) > 3:
+ right = util.parsebool(args[3][1])
+
+ if right:
+ return text.rjust(width, fillchar)
+ else:
+ return text.ljust(width, fillchar)
+
def get(context, mapping, args):
if len(args) != 2:
# i18n: "get" is a keyword
@@ -328,6 +353,52 @@
return minirst.format(text, style=style, keep=['verbose'])
+def shortest(context, mapping, args):
+ """usage: shortest(node, minlength=4)
+ """
+ if not (1 <= len(args) <= 2):
+ raise error.ParseError(_("shortest() expects one or two arguments"))
+
+ node = stringify(args[0][0](context, mapping, args[0][1]))
+
+ minlength = 4
+ if len(args) > 1:
+ minlength = int(args[1][1])
+
+ cl = mapping['ctx']._repo.changelog
+ def isvalid(test):
+ try:
+ try:
+ cl.index.partialmatch(test)
+ except AttributeError:
+ # Pure mercurial doesn't support partialmatch on the index.
+ # Fallback to the slow way.
+ if cl._partialmatch(test) is None:
+ return False
+
+ try:
+ int(test)
+ return False
+ except ValueError:
+ return True
+ except error.RevlogError:
+ return False
+
+ shortest = node
+ startlength = max(6, minlength)
+ length = startlength
+ while True:
+ test = node[:length]
+ if isvalid(test):
+ shortest = test
+ if length == minlength or length > startlength:
+ return shortest
+ length -= 1
+ else:
+ length += 1
+ if len(shortest) <= length:
+ return shortest
+
def strip(context, mapping, args):
if not (1 <= len(args) <= 2):
raise error.ParseError(_("strip expects one or two arguments"))
@@ -368,7 +439,9 @@
"ifeq": ifeq,
"join": join,
"label": label,
+ "pad": pad,
"rstdoc": rstdoc,
+ "shortest": shortest,
"strip": strip,
"sub": sub,
}
--- a/mercurial/win32.py Mon Feb 10 14:51:06 2014 -0800
+++ b/mercurial/win32.py Mon Feb 10 17:31:26 2014 -0600
@@ -119,6 +119,27 @@
_STD_ERROR_HANDLE = _DWORD(-12).value
+# CreateToolhelp32Snapshot, Process32First, Process32Next
+_TH32CS_SNAPPROCESS = 0x00000002
+_MAX_PATH = 260
+
+class _tagPROCESSENTRY32(ctypes.Structure):
+ _fields_ = [('dwsize', _DWORD),
+ ('cntUsage', _DWORD),
+ ('th32ProcessID', _DWORD),
+ ('th32DefaultHeapID', ctypes.c_void_p),
+ ('th32ModuleID', _DWORD),
+ ('cntThreads', _DWORD),
+ ('th32ParentProcessID', _DWORD),
+ ('pcPriClassBase', _LONG),
+ ('dwFlags', _DWORD),
+ ('szExeFile', ctypes.c_char * _MAX_PATH)]
+
+ def __init__(self):
+ super(_tagPROCESSENTRY32, self).__init__()
+ self.dwsize = ctypes.sizeof(self)
+
+
# types of parameters of C functions used (required by pypy)
_kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
@@ -186,6 +207,15 @@
_user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM]
_user32.EnumWindows.restype = _BOOL
+_kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD]
+_kernel32.CreateToolhelp32Snapshot.restype = _BOOL
+
+_kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.Process32First.restype = _BOOL
+
+_kernel32.Process32Next.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.Process32Next.restype = _BOOL
+
def _raiseoserror(name):
err = ctypes.WinError()
raise OSError(err.errno, '%s: %s' % (name, err.strerror))
@@ -309,6 +339,51 @@
width = csbi.srWindow.Right - csbi.srWindow.Left
return width
+def _1stchild(pid):
+ '''return the 1st found child of the given pid
+
+ None is returned when no child is found'''
+ pe = _tagPROCESSENTRY32()
+
+ # create handle to list all processes
+ ph = _kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0)
+ if ph == _INVALID_HANDLE_VALUE:
+ raise ctypes.WinError
+ try:
+ r = _kernel32.Process32First(ph, ctypes.byref(pe))
+ # loop over all processes
+ while r:
+ if pe.th32ParentProcessID == pid:
+ # return first child found
+ return pe.th32ProcessID
+ r = _kernel32.Process32Next(ph, ctypes.byref(pe))
+ finally:
+ _kernel32.CloseHandle(ph)
+ if _kernel32.GetLastError() != _ERROR_NO_MORE_FILES:
+ raise ctypes.WinError
+ return None # no child found
+
+class _tochildpid(int): # pid is _DWORD, which always matches in an int
+ '''helper for spawndetached, returns the child pid on conversion to string
+
+ Does not resolve the child pid immediately because the child may not yet be
+ started.
+ '''
+ def childpid(self):
+ '''returns the child pid of the first found child of the process
+ with this pid'''
+ return _1stchild(self)
+ def __str__(self):
+ # run when the pid is written to the file
+ ppid = self.childpid()
+ if ppid is None:
+ # race, child has exited since check
+ # fall back to this pid. Its process will also have disappeared,
+ # raising the same error type later as when the child pid would
+ # be returned.
+ return " %d" % self
+ return str(ppid)
+
def spawndetached(args):
# No standard library function really spawns a fully detached
# process under win32 because they allocate pipes or other objects
@@ -339,7 +414,8 @@
if not res:
raise ctypes.WinError
- return pi.dwProcessId
+ # _tochildpid because the process is the child of COMSPEC
+ return _tochildpid(pi.dwProcessId)
def unlink(f):
'''try to implement POSIX' unlink semantics on Windows'''
--- a/tests/hghave.py Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/hghave.py Mon Feb 10 17:31:26 2014 -0600
@@ -248,6 +248,9 @@
except ImportError:
return False
+def has_python243():
+ return sys.version_info >= (2, 4, 3)
+
def has_outer_repo():
# failing for other reasons than 'no repo' imply that there is a repo
return not matchoutput('hg root 2>&1',
@@ -320,6 +323,7 @@
"p4": (has_p4, "Perforce server and client"),
"pyflakes": (has_pyflakes, "Pyflakes python linter"),
"pygments": (has_pygments, "Pygments source highlighting library"),
+ "python243": (has_python243, "python >= 2.4.3"),
"root": (has_root, "root permissions"),
"serve": (has_serve, "platform and python can manage 'hg serve -d'"),
"ssl": (has_ssl, "python >= 2.6 ssl module and python OpenSSL"),
--- a/tests/test-command-template.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-command-template.t Mon Feb 10 17:31:26 2014 -0600
@@ -1626,3 +1626,28 @@
$ hg log -r 0 --template '{if(branches, "yes", "no")}\n'
no
+
+Test shortest(node) function:
+
+ $ echo b > b
+ $ hg ci -qAm b
+ $ hg log --template '{shortest(node)}\n'
+ d97c
+ f776
+ $ hg log --template '{shortest(node, 10)}\n'
+ d97c383ae3
+ f7769ec2ab
+
+Test pad function
+
+ $ hg log --template '{pad(rev, 20)} {author|user}\n'
+ 1 test
+ 0 test
+
+ $ hg log --template '{pad(rev, 20, " ", True)} {author|user}\n'
+ 1 test
+ 0 test
+
+ $ hg log --template '{pad(rev, 20, "-", False)} {author|user}\n'
+ 1------------------- test
+ 0------------------- test
--- a/tests/test-commandserver.py Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-commandserver.py Mon Feb 10 17:31:26 2014 -0600
@@ -267,7 +267,10 @@
runcommand(server, ['up', 'null'])
runcommand(server, ['phase', '-df', 'tip'])
- os.system('hg debugobsolete `hg log -r tip --template {node}`')
+ cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
+ if os.name == 'nt':
+ cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
+ os.system(cmd)
runcommand(server, ['log', '--hidden'])
runcommand(server, ['log'])
--- a/tests/test-convert-hg-sink.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-convert-hg-sink.t Mon Feb 10 17:31:26 2014 -0600
@@ -119,7 +119,7 @@
0 add baz
$ cd new-filemap
$ hg tags
- tip 2:6f4fd1df87fb
+ tip 2:3c74706b1ff8
some-tag 0:ba8636729451
$ cd ..
--- a/tests/test-convert-hg-svn.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-convert-hg-svn.t Mon Feb 10 17:31:26 2014 -0600
@@ -103,3 +103,14 @@
scanning source...
sorting...
converting...
+
+verify which shamap format we are storing and must be able to handle
+
+ $ cat svn-repo-hg/.hg/shamap
+ svn:????????-????-????-????-????????????@1 ???????????????????????????????????????? (glob)
+ svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob)
+ svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob)
+ $ cat svn-repo-wc/.svn/hg-shamap
+ ???????????????????????????????????????? 1 (glob)
+ ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob)
+ ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob)
--- a/tests/test-convert-svn-source.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-convert-svn-source.t Mon Feb 10 17:31:26 2014 -0600
@@ -198,11 +198,12 @@
extra: convert_revision=svn:........-....-....-....-............/proj B/mytrunk@1 (re)
$ cd ..
-Test converting empty heads (issue3347)
+Test converting empty heads (issue3347).
+Also tests getting logs directly without debugsvnlog.
$ svnadmin create svn-empty
$ svnadmin load -q svn-empty < "$TESTDIR/svn/empty.svndump"
- $ hg --config convert.svn.trunk= convert svn-empty
+ $ hg --config convert.svn.trunk= --config convert.svn.debugsvnlog=0 convert svn-empty
assuming destination svn-empty-hg
initializing destination svn-empty-hg repository
scanning source...
--- a/tests/test-convert.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-convert.t Mon Feb 10 17:31:26 2014 -0600
@@ -121,6 +121,14 @@
can be used to (for instance) move code in one repository from "default"
to a named branch.
+ The closemap is a file that allows closing of a branch. This is useful if
+ you want to close a branch. Each entry contains a revision or hash
+ separated by white space.
+
+ The tagpmap is a file that exactly analogous to the branchmap. This will
+ rename tags on the fly and prevent the 'update tags' commit usually found
+ at the end of a convert process.
+
Mercurial Source
################
@@ -266,6 +274,8 @@
--filemap FILE remap file names using contents of file
--splicemap FILE splice synthesized history into place
--branchmap FILE change branch names while converting
+ --closemap FILE closes given revs
+ --tagmap FILE change tag names while converting
--branchsort try to sort changesets by branches
--datesort try to sort changesets by date
--sourcesort preserve source changesets order
--- a/tests/test-debugcommands.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-debugcommands.t Mon Feb 10 17:31:26 2014 -0600
@@ -33,15 +33,16 @@
> dst('hello world')
> def g():
> f()
+ > sys.stderr.flush()
> debugstacktrace(skip=-5, f=sys.stdout)
> g()
> EOF
$ python debugstacktrace.py
hello world at:
- debugstacktrace.py:7 in * (glob)
+ debugstacktrace.py:8 in * (glob)
debugstacktrace.py:5 in g
debugstacktrace.py:3 in f
stacktrace at:
- debugstacktrace.py:7 *in * (glob)
- debugstacktrace.py:6 *in g (glob)
+ debugstacktrace.py:8 *in * (glob)
+ debugstacktrace.py:7 *in g (glob)
*/util.py:* in debugstacktrace (glob)
--- a/tests/test-doctest.py Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-doctest.py Mon Feb 10 17:31:26 2014 -0600
@@ -27,3 +27,4 @@
testmod('mercurial.util', testtarget='platform')
testmod('hgext.convert.cvsps')
testmod('hgext.convert.filemap')
+testmod('hgext.convert.subversion')
--- a/tests/test-gendoc.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-gendoc.t Mon Feb 10 17:31:26 2014 -0600
@@ -3,7 +3,7 @@
$ "$TESTDIR/hghave" docutils || exit 80
$ HGENCODING=UTF-8
$ export HGENCODING
- $ { echo C; find "$TESTDIR/../i18n" -name "*.po" | sort; } | while read PO; do
+ $ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do
> LOCALE=`basename "$PO" .po`
> echo
> echo "% extracting documentation from $LOCALE"
--- a/tests/test-http.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-http.t Mon Feb 10 17:31:26 2014 -0600
@@ -153,7 +153,8 @@
> common.permhooks.insert(0, perform_authentication)
> EOT
$ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \
- > --config server.preferuncompressed=True
+ > --config server.preferuncompressed=True \
+ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
$ cat pid >> $DAEMON_PIDS
$ cat << EOF > get_pass.py
@@ -163,6 +164,7 @@
> getpass.getpass = newgetpass
> EOF
+#if python243
$ hg id http://localhost:$HGPORT2/
abort: http authorization required for http://localhost:$HGPORT2/
[255]
@@ -176,6 +178,7 @@
password: 5fed3813f7f5
$ hg id http://user:pass@localhost:$HGPORT2/
5fed3813f7f5
+#endif
$ echo '[auth]' >> .hg/hgrc
$ echo 'l.schemes=http' >> .hg/hgrc
$ echo 'l.prefix=lo' >> .hg/hgrc
@@ -187,6 +190,7 @@
5fed3813f7f5
$ hg id http://user@localhost:$HGPORT2/
5fed3813f7f5
+#if python243
$ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
streaming all changes
7 files to transfer, 916 bytes of data
@@ -201,6 +205,82 @@
abort: HTTP Error 403: no
[255]
+ $ hg -R dest tag -r tip top
+ $ hg -R dest push http://user:pass@localhost:$HGPORT2/
+ pushing to http://user:***@localhost:$HGPORT2/
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+ $ hg rollback -q
+
+ $ cut -c38- ../access.log
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=stream_out HTTP/1.1" 401 -
+ "GET /?cmd=stream_out HTTP/1.1" 200 -
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces
+ "GET /?cmd=capabilities HTTP/1.1" 200 -
+ "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=branchmap HTTP/1.1" 200 -
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+ "POST /?cmd=unbundle HTTP/1.1" 401 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
+ "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
+ "POST /?cmd=pushkey HTTP/1.1" 401 - x-hgarg-1:key=7f4e523d01f2cc3765ac8934da3d14db775ff872&namespace=phases&new=0&old=1
+ "POST /?cmd=pushkey HTTP/1.1" 200 - x-hgarg-1:key=7f4e523d01f2cc3765ac8934da3d14db775ff872&namespace=phases&new=0&old=1
+ "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+ "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+
+#endif
$ cd ..
clone of serve with repo in root and unserved subrepo (issue2970)
--- a/tests/test-lock-badness.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-lock-badness.t Mon Feb 10 17:31:26 2014 -0600
@@ -1,4 +1,7 @@
-#if unix-permissions no-root
+#if unix-permissions no-root no-windows
+
+Prepare
+
$ hg init a
$ echo a > a/a
$ hg -R a ci -A -m a
@@ -8,10 +11,25 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+One process waiting for another
+
+ $ cat > hooks.py << EOF
+ > import time
+ > def sleepone(**x): time.sleep(1)
+ > def sleephalf(**x): time.sleep(0.5)
+ > EOF
$ echo b > b/b
- $ hg -R b ci -A -m b
+ $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
+ $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf"
+ waiting for lock on working directory of b held by '*:*' (glob)
+ got lock after 1 seconds
+ warning: ignoring unknown working parent d2ae7f538514!
+ $ wait
+ $ cat stdout
adding b
+Pushing to a local read-only repo that can't be locked
+
$ chmod 100 a/.hg/store
$ hg -R b push a
--- a/tests/test-module-imports.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-module-imports.t Mon Feb 10 17:31:26 2014 -0600
@@ -22,18 +22,20 @@
hidden by deduplication algorithm in the cycle detector, so fixing
these may expose other cycles.
- $ hg locate 'mercurial/**.py' | xargs python "$import_checker"
- mercurial/dispatch.py mixed stdlib and relative imports:
- commands, error, extensions, fancyopts, hg, hook, util
- mercurial/fileset.py mixed stdlib and relative imports:
- error, merge, parser, util
- mercurial/revset.py mixed stdlib and relative imports:
- discovery, error, hbisect, parser, phases, util
- mercurial/templater.py mixed stdlib and relative imports:
- config, error, parser, templatefilters, util
- mercurial/ui.py mixed stdlib and relative imports:
- config, error, formatter, scmutil, util
- Import cycle: mercurial.cmdutil -> mercurial.subrepo -> mercurial.cmdutil
- Import cycle: mercurial.repoview -> mercurial.revset -> mercurial.repoview
- Import cycle: mercurial.fileset -> mercurial.merge -> mercurial.subrepo -> mercurial.match -> mercurial.fileset
- Import cycle: mercurial.filemerge -> mercurial.match -> mercurial.fileset -> mercurial.merge -> mercurial.filemerge
+ $ hg locate 'mercurial/**.py' | sed 's-\\-/-g' | xargs python "$import_checker"
+ mercurial/dispatch.py mixed imports
+ stdlib: commands
+ relative: error, extensions, fancyopts, hg, hook, util
+ mercurial/fileset.py mixed imports
+ stdlib: parser
+ relative: error, merge, util
+ mercurial/revset.py mixed imports
+ stdlib: parser
+ relative: discovery, error, hbisect, phases, util
+ mercurial/templater.py mixed imports
+ stdlib: parser
+ relative: config, error, templatefilters, util
+ mercurial/ui.py mixed imports
+ stdlib: formatter
+ relative: config, error, scmutil, util
+ Import cycle: mercurial.cmdutil -> mercurial.context -> mercurial.subrepo -> mercurial.cmdutil -> mercurial.cmdutil
--- a/tests/test-revset.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-revset.t Mon Feb 10 17:31:26 2014 -0600
@@ -414,6 +414,16 @@
2
1
0
+ $ log '1:: and reverse(all())'
+ 9
+ 8
+ 7
+ 6
+ 5
+ 4
+ 3
+ 2
+ 1
$ log 'rev(5)'
5
$ log 'sort(limit(reverse(all()), 3))'
--- a/tests/test-shelve.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-shelve.t Mon Feb 10 17:31:26 2014 -0600
@@ -23,10 +23,6 @@
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 5 changes to 5 files
$ hg commit -q -m 'initial commit'
@@ -81,11 +77,11 @@
ensure that our shelved changes exist
$ hg shelve -l
- default-01 (*) [mq]: second.patch (glob)
- default (*) [mq]: second.patch (glob)
+ default-01 (*) changes to '[mq]: second.patch' (glob)
+ default (*) changes to '[mq]: second.patch' (glob)
$ hg shelve -l -p default
- default (*) [mq]: second.patch (glob)
+ default (*) changes to '[mq]: second.patch' (glob)
diff --git a/a/a b/a/a
--- a/a/a
@@ -104,10 +100,8 @@
$ printf "z\na\n" > a/a
$ hg unshelve --keep
unshelving change 'default-01'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
$ hg revert --all -q
@@ -117,10 +111,6 @@
$ hg unshelve
unshelving change 'default-01'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files
$ hg status -C
M a/a
A b.rename/b
@@ -192,10 +182,8 @@
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 3 changes to 8 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
warning: conflicts during merge.
merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -379,10 +367,8 @@
$ HGMERGE=true hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 6 files (+1 heads)
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
merging a/a
$ hg parents -q
4:33f7f61e6c5e
@@ -400,15 +386,11 @@
shelved as default
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg shelve --list
- default (*) create conflict (glob)
+ default (*) changes to 'create conflict' (glob)
$ hg unshelve --keep
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
$ hg shelve --list
- default (*) create conflict (glob)
+ default (*) changes to 'create conflict' (glob)
$ hg shelve --cleanup
$ hg shelve --list
@@ -424,10 +406,6 @@
* test 4:33f7f61e6c5e
$ hg unshelve
unshelving change 'test'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
$ hg bookmark
* test 4:33f7f61e6c5e
@@ -437,13 +415,9 @@
shelved as test
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg --config extensions.mq=! shelve --list
- test (1s ago) create conflict
+ test (*) changes to 'create conflict' (glob)
$ hg --config extensions.mq=! unshelve
unshelving change 'test'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 7 files
shelve should leave dirstate clean (issue 4055)
@@ -468,10 +442,7 @@
saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-backup.hg (glob)
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 2 changesets with 2 changes to 2 files (+1 heads)
+ rebasing shelved changes
$ hg status
M z
@@ -497,10 +468,7 @@
$ hg up -q 1
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 3 files
+ rebasing shelved changes
$ hg status
A d
@@ -513,10 +481,7 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 3 files
+ rebasing shelved changes
$ hg status
A d
@@ -534,10 +499,6 @@
$ hg debugobsolete `hg --debug id -i -r 1`
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 2 files (+1 heads)
unshelve should leave unknown files alone (issue4113)
@@ -549,10 +510,6 @@
? e
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 1 changes to 2 files (+1 heads)
$ hg status
A d
? e
@@ -568,13 +525,138 @@
$ echo z > e
$ hg unshelve
unshelving change 'default'
- adding changesets
- adding manifests
- adding file changes
- added 1 changesets with 2 changes to 3 files (+1 heads)
$ cat e
e
$ cat e.orig
z
+
+unshelve and conflicts with untracked files
+
+ preparing:
+
+ $ rm *.orig
+ $ hg ci -qm 'commit stuff'
+ $ hg phase -p null:
+
+ no other changes - no merge:
+
+ $ echo f > f
+ $ hg add f
+ $ hg shelve
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo gold > f
+ $ hg unshelve
+ unshelving change 'default'
+ $ hg st
+ A f
+ ? f.orig
+ $ cat f
+ f
+ $ cat f.orig
+ gold
+
+ other uncommitted changes - merge:
+
+ $ hg st
+ A f
+ ? f.orig
+ $ hg shelve
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log -G --template '{rev} {desc|firstline} {author}' -R bundle://.hg/shelved/default.hg -r 'bundle()'
+ o 4 changes to 'commit stuff' shelve@localhost
+ |
+ $ hg log -G --template '{rev} {desc|firstline} {author}'
+ @ 3 commit stuff test
+ |
+ | o 2 c test
+ |/
+ o 0 a test
+
+ $ mv f.orig f
+ $ echo other change >> a
+ $ hg unshelve
+ unshelving change 'default'
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
+ merging f
+ warning: conflicts during merge.
+ merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+ $ hg log -G --template '{rev} {desc|firstline} {author}'
+ @ 5 changes to 'commit stuff' shelve@localhost
+ |
+ | @ 4 pending changes temporary commit shelve@localhost
+ |/
+ o 3 commit stuff test
+ |
+ | o 2 c test
+ |/
+ o 0 a test
+
+ $ hg st
+ M f
+ ? f.orig
+ $ cat f
+ <<<<<<< local
+ gold
+ =======
+ f
+ >>>>>>> other
+ $ cat f.orig
+ gold
+ $ hg unshelve --abort
+ rebase aborted
+ unshelve of 'default' aborted
+ $ hg st
+ M a
+ ? f.orig
+ $ cat f.orig
+ gold
+ $ hg unshelve
+ unshelving change 'default'
+ temporarily committing pending changes (restore with 'hg unshelve --abort')
+ rebasing shelved changes
+ $ hg st
+ M a
+ A f
+ ? f.orig
+
+ other committed changes - merge:
+
+ $ hg shelve f
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg ci a -m 'intermediate other change'
+ $ mv f.orig f
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ merging f
+ warning: conflicts during merge.
+ merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+ $ hg st
+ M f
+ ? f.orig
+ $ cat f
+ <<<<<<< local
+ gold
+ =======
+ f
+ >>>>>>> other
+ $ cat f.orig
+ gold
+ $ hg unshelve --abort
+ rebase aborted
+ no changes needed to a
+ no changes needed to d
+ no changes needed to e
+ unshelve of 'default' aborted
+ $ hg shelve --delete default
+
$ cd ..
--- a/tests/test-status-color.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-status-color.t Mon Feb 10 17:31:26 2014 -0600
@@ -1,5 +1,3 @@
- $ "$TESTDIR/hghave" tic || exit 80
-
$ echo "[extensions]" >> $HGRCPATH
$ echo "color=" >> $HGRCPATH
$ echo "[color]" >> $HGRCPATH
@@ -186,8 +184,11 @@
\x1b[0;0mC \x1b[0m\x1b[0;0m.hgignore\x1b[0m (esc)
\x1b[0;0mC \x1b[0m\x1b[0;0mmodified\x1b[0m (esc)
+
hg status -A (with terminfo color):
+#if tic
+
$ mkdir "$TESTTMP/terminfo"
$ TERMINFO="$TESTTMP/terminfo" tic "$TESTDIR/hgterm.ti"
$ TERM=hgterm TERMINFO="$TESTTMP/terminfo" hg status --config color.mode=terminfo --color=always -A
@@ -201,6 +202,8 @@
\x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30m.hgignore\x1b[30m (esc)
\x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30mmodified\x1b[30m (esc)
+#endif
+
$ echo "^ignoreddir$" > .hgignore
$ mkdir ignoreddir
--- a/tests/test-treediscovery.t Mon Feb 10 14:51:06 2014 -0800
+++ b/tests/test-treediscovery.t Mon Feb 10 17:31:26 2014 -0600
@@ -29,6 +29,15 @@
$ hg init empty1
$ hg init empty2
$ tstart empty2
+check if process of pid looks reasonable ('hg' normally, 'python' for run-tests.py -l)
+#if windows
+ps of mingw does not support -p, tasklist is on any windows machine since XP
+ $ tasklist //NH //fi "pid eq `cat hg.pid`" | grep ' '
+ (.* )?(hg|python)\.exe( .*)? (re)
+#else
+ $ ps --no-heading -p `cat hg.pid`
+ (.* )?(hg|python)( .*)? (re)
+#endif
$ hg incoming -R empty1 $remote
comparing with http://localhost:$HGPORT/
no changes found