--- a/contrib/check-code.py Sat Mar 06 10:02:45 2010 +0100
+++ b/contrib/check-code.py Tue Mar 16 11:37:14 2010 -0300
@@ -52,6 +52,7 @@
(r'/bin/', "don't use explicit paths for tools"),
(r'\$PWD', "don't use $PWD, use `pwd`"),
(r'[^\n]\Z', "no trailing newline"),
+ (r'export.*=', "don't export and assign at once"),
]
testfilters = [
@@ -82,6 +83,7 @@
(r'[\x80-\xff]', "non-ASCII character literal"),
(r'("\')\.format\(', "str.format() not available in Python 2.4"),
(r'^\s*with\s+', "with not available in Python 2.4"),
+ (r'^\s*(any|all)\(', "any/all not available in Python 2.4"),
(r'if\s.*\selse', "if ... else form not available in Python 2.4"),
(r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"),
# (r'\s\s=', "gratuitous whitespace before ="),
--- a/contrib/hgdiff Sat Mar 06 10:02:45 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import os, sys, struct, stat
-import difflib
-import re
-from optparse import OptionParser
-from mercurial.bdiff import bdiff, blocks
-from mercurial.mdiff import bunidiff, diffopts
-
-VERSION="0.3"
-usage = "usage: %prog [options] file1 file2"
-parser = OptionParser(usage=usage)
-
-parser.add_option("-d", "--difflib", action="store_true", default=False)
-parser.add_option('-x', '--count', default=1)
-parser.add_option('-c', '--context', type="int", default=3)
-parser.add_option('-p', '--show-c-function', action="store_true", default=False)
-parser.add_option('-w', '--ignore-all-space', action="store_true",
- default=False)
-
-(options, args) = parser.parse_args()
-
-if not args:
- parser.print_help()
- sys.exit(1)
-
-# simple utility function to put all the
-# files from a directory tree into a dict
-def buildlist(names, top):
- tlen = len(top)
- for root, dirs, files in os.walk(top):
- l = root[tlen + 1:]
- for x in files:
- p = os.path.join(root, x)
- st = os.lstat(p)
- if stat.S_ISREG(st.st_mode):
- names[os.path.join(l, x)] = (st.st_dev, st.st_ino)
-
-def diff_files(file1, file2):
- if file1 is None:
- b = file(file2).read().splitlines(True)
- l1 = "--- %s\n" % (file2)
- l2 = "+++ %s\n" % (file2)
- l3 = "@@ -0,0 +1,%d @@\n" % len(b)
- l = [l1, l2, l3] + ["+" + e for e in b]
- elif file2 is None:
- a = file(file1).read().splitlines(True)
- l1 = "--- %s\n" % (file1)
- l2 = "+++ %s\n" % (file1)
- l3 = "@@ -1,%d +0,0 @@\n" % len(a)
- l = [l1, l2, l3] + ["-" + e for e in a]
- else:
- t1 = file(file1).read()
- t2 = file(file2).read()
- l1 = t1.splitlines(True)
- l2 = t2.splitlines(True)
- if options.difflib:
- l = difflib.unified_diff(l1, l2, file1, file2)
- else:
- l = bunidiff(t1, t2, l1, l2, file1, file2,
- diffopts(context=options.context,
- showfunc=options.show_c_function,
- ignorews=options.ignore_all_space))
- for x in l:
- if x[-1] != '\n':
- x += "\n\ No newline at end of file\n"
- print x,
-
-file1 = args[0]
-file2 = args[1]
-
-if os.path.isfile(file1) and os.path.isfile(file2):
- diff_files(file1, file2)
-elif os.path.isdir(file1):
- if not os.path.isdir(file2):
- sys.stderr.write("file types don't match\n")
- sys.exit(1)
-
- d1 = {}
- d2 = {}
-
- buildlist(d1, file1)
- buildlist(d2, file2)
- keys = d1.keys()
- keys.sort()
- for x in keys:
- if x not in d2:
- f2 = None
- else:
- f2 = os.path.join(file2, x)
- st1 = d1[x]
- st2 = d2[x]
- del d2[x]
- if st1[0] == st2[0] and st1[1] == st2[1]:
- sys.stderr.write("%s is a hard link\n" % x)
- continue
- x = os.path.join(file1, x)
- diff_files(x, f2)
- keys = d2.keys()
- keys.sort()
- for x in keys:
- f1 = None
- x = os.path.join(file2, x)
- diff_files(f1, x)
-
--- a/contrib/macosx/Readme.html Sat Mar 06 10:02:45 2010 +0100
+++ b/contrib/macosx/Readme.html Tue Mar 16 11:37:14 2010 -0300
@@ -17,11 +17,11 @@
<body>
<p class="p1"><b>Before you install</b></p>
<p class="p2"><br></p>
-<p class="p3">This is an OS X 10.5 version of Mercurial that depends on the default Python 2.5 installation.</p>
+<p class="p3">This is an OS X 10.6 version of Mercurial that depends on the default Python 2.6 installation.</p>
<p class="p2"><br></p>
<p class="p1"><b>After you install</b></p>
<p class="p2"><br></p>
-<p class="p3">This package installs the <span class="s2">hg</span> executable in <span class="s2">/usr/local/bin</span> and the Mercurial files in <span class="s2">/Library/Python/2.5/site-packages/mercurial.</span></p>
+<p class="p3">This package installs the <span class="s2">hg</span> executable in <span class="s2">/usr/local/bin</span> and the Mercurial files in <span class="s2">/Library/Python/2.6/site-packages/mercurial.</span></p>
<p class="p2"><br></p>
<p class="p1"><b>Documentation</b></p>
<p class="p2"><br></p>
--- a/contrib/mercurial.spec Sat Mar 06 10:02:45 2010 +0100
+++ b/contrib/mercurial.spec Tue Mar 16 11:37:14 2010 -0300
@@ -67,7 +67,7 @@
%files
%defattr(-,root,root,-)
-%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html doc/ja *.cgi contrib/*.fcgi
+%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi
%doc %attr(644,root,root) %{_mandir}/man?/hg*
%doc %attr(644,root,root) contrib/*.svg contrib/sample.hgrc
%{_sysconfdir}/bash_completion.d/mercurial.sh
--- a/contrib/shrink-revlog.py Sat Mar 06 10:02:45 2010 +0100
+++ b/contrib/shrink-revlog.py Tue Mar 16 11:37:14 2010 -0300
@@ -24,50 +24,81 @@
from mercurial import changegroup
from mercurial.i18n import _
-def toposort(ui, rl):
+
+def postorder(start, edges):
+ result = []
+ visit = list(start)
+ finished = set()
- children = {}
- root = []
- # build children and roots
+ while visit:
+ cur = visit[-1]
+ for p in edges[cur]:
+ if p not in finished:
+ visit.append(p)
+ break
+ else:
+ result.append(cur)
+ finished.add(cur)
+ visit.pop()
+
+ return result
+
+def toposort_reversepostorder(ui, rl):
+ # postorder of the reverse directed graph
+
+ # map rev to list of parent revs (p2 first)
+ parents = {}
+ heads = set()
ui.status(_('reading revs\n'))
try:
- for i in rl:
- ui.progress(_('reading'), i, total=len(rl))
- children[i] = []
- parents = [p for p in rl.parentrevs(i) if p != node.nullrev]
- # in case of duplicate parents
- if len(parents) == 2 and parents[0] == parents[1]:
- del parents[1]
- for p in parents:
- assert p in children
- children[p].append(i)
-
- if len(parents) == 0:
- root.append(i)
+ for rev in rl:
+ ui.progress(_('reading'), rev, total=len(rl))
+ (p1, p2) = rl.parentrevs(rev)
+ if p1 == p2 == node.nullrev:
+ parents[rev] = () # root node
+ elif p1 == p2 or p2 == node.nullrev:
+ parents[rev] = (p1,) # normal node
+ else:
+ parents[rev] = (p2, p1) # merge node
+ heads.add(rev)
+ for p in parents[rev]:
+ heads.discard(p)
finally:
ui.progress(_('reading'), None, total=len(rl))
- # XXX this is a reimplementation of the 'branchsort' topo sort
- # algorithm in hgext.convert.convcmd... would be nice not to duplicate
- # the algorithm
+ heads = list(heads)
+ heads.sort(reverse=True)
+
ui.status(_('sorting revs\n'))
- visit = root
- ret = []
- while visit:
- i = visit.pop(0)
- ret.append(i)
- if i not in children:
- # This only happens if some node's p1 == p2, which can
- # happen in the manifest in certain circumstances.
- continue
- next = []
- for c in children.pop(i):
- parents_unseen = [p for p in rl.parentrevs(c)
- if p != node.nullrev and p in children]
- if len(parents_unseen) == 0:
- next.append(c)
- visit = next + visit
- return ret
+ return postorder(heads, parents)
+
+def toposort_postorderreverse(ui, rl):
+ # reverse-postorder of the reverse directed graph
+
+ children = {}
+ roots = set()
+ ui.status(_('reading revs\n'))
+ try:
+ for rev in rl:
+ ui.progress(_('reading'), rev, total=len(rl))
+ (p1, p2) = rl.parentrevs(rev)
+ if p1 == p2 == node.nullrev:
+ roots.add(rev)
+ children[rev] = []
+ if p1 != node.nullrev:
+ children[p1].append(rev)
+ if p2 != node.nullrev:
+ children[p2].append(rev)
+ finally:
+ ui.progress(_('reading'), None, total=len(rl))
+
+ root = list(roots)
+ roots.sort()
+
+ ui.status(_('sorting revs\n'))
+ result = postorder(roots, children)
+ result.reverse()
+ return result
def writerevs(ui, r1, r2, order, tr):
@@ -118,9 +149,15 @@
% (shrink_percent, shrink_factor))
def shrink(ui, repo, **opts):
+ """shrink a revlog by reordering revisions
+
+ Rewrites all the entries in some revlog of the current repository
+ (by default, the manifest log) to save space.
+
+ Different sort algorithms have different performance
+ characteristics. Use ``--sort`` to select a sort algorithm so you
+ can determine which works best for your data.
"""
- Shrink revlog by re-ordering revisions. Will operate on manifest for
- the given repository if no other revlog is specified."""
if not repo.local():
raise util.Abort(_('not a local repository: %s') % repo.root)
@@ -139,6 +176,12 @@
raise util.Abort(_('--revlog option must specify a revlog in %s, '
'not %s') % (store, indexfn))
+ sortname = opts['sort']
+ try:
+ toposort = globals()['toposort_' + sortname]
+ except KeyError:
+ raise util.Abort(_('no such toposort algorithm: %s') % sortname)
+
if not os.path.exists(indexfn):
raise util.Abort(_('no such file: %s') % indexfn)
if '00changelog' in indexfn:
@@ -187,6 +230,15 @@
try:
try:
order = toposort(ui, r1)
+
+ suboptimal = 0
+ for i in xrange(1, len(order)):
+ parents = [p for p in r1.parentrevs(order[i])
+ if p != node.nullrev]
+ if parents and order[i - 1] not in parents:
+ suboptimal += 1
+ ui.note(_('%d suboptimal nodes\n') % suboptimal)
+
writerevs(ui, r1, r2, order, tr)
report(ui, r1, r2)
tr.close()
@@ -229,6 +281,7 @@
'shrink': (shrink,
[('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
('n', 'dry-run', None, _('do not shrink, simulate only')),
+ ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
],
_('hg shrink [--revlog PATH]'))
}
--- a/contrib/wix/mercurial.wxs Sat Mar 06 10:02:45 2010 +0100
+++ b/contrib/wix/mercurial.wxs Tue Mar 16 11:37:14 2010 -0300
@@ -37,6 +37,9 @@
<Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property>
<Property Id='REINSTALLMODE'>amus</Property>
+ <!--Auto-accept the license page-->
+ <Property Id='LicenseAccepted'>1</Property>
+
<Directory Id='TARGETDIR' Name='SourceDir'>
<Directory Id='ProgramFilesFolder' Name='PFiles'>
<Directory Id='INSTALLDIR' Name='Mercurial'>
--- a/hgext/bookmarks.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/bookmarks.py Tue Mar 16 11:37:14 2010 -0300
@@ -296,6 +296,13 @@
tags.update(self._bookmarks)
return (tags, tagtypes)
+ if hasattr(repo, 'invalidate'):
+ def invalidate(self):
+ super(bookmark_repo, self).invalidate()
+ for attr in ('_bookmarks', '_bookmarkcurrent'):
+ if attr in self.__dict__:
+ delattr(repo, attr)
+
repo.__class__ = bookmark_repo
def uisetup(ui):
--- a/hgext/churn.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/churn.py Tue Mar 16 11:37:14 2010 -0300
@@ -48,7 +48,7 @@
tmpl.show(ctx)
return ui.popbuffer()
- state = {'count': 0, 'pct': 0}
+ state = {'count': 0}
rate = {}
df = False
if opts.get('date'):
@@ -74,20 +74,13 @@
lines = changedlines(ui, repo, ctx1, ctx, fns)
rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
- if opts.get('progress'):
- state['count'] += 1
- newpct = int(100.0 * state['count'] / max(len(repo), 1))
- if state['pct'] < newpct:
- state['pct'] = newpct
- ui.write("\r" + _("generating stats: %d%%") % state['pct'])
- sys.stdout.flush()
+ state['count'] += 1
+ ui.progress(_('analyzing'), state['count'], total=len(repo))
for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
continue
- if opts.get('progress'):
- ui.write("\r")
- sys.stdout.flush()
+ ui.progress(_('analyzing'), None)
return rate
@@ -188,6 +181,6 @@
('s', 'sort', False, _('sort by key (default: sort by count)')),
('', 'diffstat', False, _('display added/removed lines separately')),
('', 'aliases', '', _('file with email aliases')),
- ('', 'progress', None, _('show progress'))],
- _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
+ ],
+ _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")),
}
--- a/hgext/convert/cvsps.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/convert/cvsps.py Tue Mar 16 11:37:14 2010 -0300
@@ -32,6 +32,7 @@
.branchpoints- the branches that start at the current entry
'''
def __init__(self, **entries):
+ self.synthetic = False
self.__dict__.update(entries)
def __repr__(self):
@@ -124,9 +125,9 @@
# Get the real directory in the repository
try:
prefix = open(os.path.join('CVS','Repository')).read().strip()
+ directory = prefix
if prefix == ".":
prefix = ""
- directory = prefix
except IOError:
raise logerror('Not a CVS sandbox')
@@ -184,7 +185,11 @@
p = util.normpath(getrepopath(root))
if not p.endswith('/'):
p += '/'
- prefix = p + util.normpath(prefix)
+ if prefix:
+ # looks like normpath replaces "" by "."
+ prefix = p + util.normpath(prefix)
+ else:
+ prefix = p
cmd.append(['log', 'rlog'][rlog])
if date:
# no space between option and date string
@@ -292,8 +297,7 @@
assert match, _('expected revision number')
e = logentry(rcs=scache(rcs), file=scache(filename),
revision=tuple([int(x) for x in match.group(1).split('.')]),
- branches=[], parent=None,
- synthetic=False)
+ branches=[], parent=None)
state = 6
elif state == 6:
@@ -465,6 +469,7 @@
.branchpoints- the branches that start at the current entry
'''
def __init__(self, **entries):
+ self.synthetic = False
self.__dict__.update(entries)
def __repr__(self):
@@ -538,8 +543,7 @@
# "File file4 was added on branch ..." (synthetic, 1 entry)
# "Add file3 and file4 to fix ..." (real, 2 entries)
# Hence the check for 1 entry here.
- synth = getattr(c.entries[0], 'synthetic', None)
- c.synthetic = (len(c.entries) == 1 and synth)
+ c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
# Sort files in each changeset
--- a/hgext/convert/subversion.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/convert/subversion.py Tue Mar 16 11:37:14 2010 -0300
@@ -812,6 +812,9 @@
break
if not paths:
self.ui.debug('revision %d has no entries\n' % revnum)
+ # If we ever leave the loop on an empty
+ # revision, do not try to get a parent branch
+ lastonbranch = lastonbranch or revnum == 0
continue
cset, lastonbranch = parselogentry(paths, revnum, author,
date, message)
--- a/hgext/hgcia.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/hgcia.py Tue Mar 16 11:37:14 2010 -0300
@@ -113,7 +113,7 @@
n = self.ctx.node()
pbuf = patchbuf()
- patch.export(self.cia.repo, [n], fp=pbuf)
+ cmdutil.export(self.cia.repo, [n], fp=pbuf)
return patch.diffstat(pbuf.lines) or ''
def logmsg(self):
--- a/hgext/inotify/__init__.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/inotify/__init__.py Tue Mar 16 11:37:14 2010 -0300
@@ -41,7 +41,7 @@
# to start an inotify server if it won't start.
_inotifyon = True
- def status(self, match, subrepos, ignored, clean, unknown=True):
+ def status(self, match, subrepos, ignored, clean, unknown):
files = match.files()
if '.' in files:
files = []
--- a/hgext/keyword.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/keyword.py Tue Mar 16 11:37:14 2010 -0300
@@ -1,6 +1,6 @@
# keyword.py - $Keyword$ expansion for Mercurial
#
-# Copyright 2007-2009 Christian Ebert <blacktrash@gmx.net>
+# Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
@@ -79,7 +79,6 @@
from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
from mercurial import patch, localrepo, templater, templatefilters, util, match
from mercurial.hgweb import webcommands
-from mercurial.lock import release
from mercurial.node import nullid
from mercurial.i18n import _
import re, shutil, tempfile
@@ -251,10 +250,8 @@
'''Bails out if [keyword] configuration is not active.
Returns status of working directory.'''
if kwt:
- unknown = (opts.get('unknown') or opts.get('all')
- or opts.get('untracked'))
return repo.status(match=cmdutil.match(repo, pats, opts), clean=True,
- unknown=unknown)
+ unknown=opts.get('unknown') or opts.get('all'))
if ui.configitems('keyword'):
raise util.Abort(_('[keyword] patterns cannot match'))
raise util.Abort(_('no [keyword] patterns configured'))
@@ -264,17 +261,15 @@
if repo.dirstate.parents()[1] != nullid:
raise util.Abort(_('outstanding uncommitted merge'))
kwt = kwtools['templater']
- status = _status(ui, repo, kwt, *pats, **opts)
- modified, added, removed, deleted = status[:4]
- if modified or added or removed or deleted:
- raise util.Abort(_('outstanding uncommitted changes'))
- wlock = lock = None
+ wlock = repo.wlock()
try:
- wlock = repo.wlock()
- lock = repo.lock()
- kwt.overwrite(None, expand, status[6])
+ status = _status(ui, repo, kwt, *pats, **opts)
+ modified, added, removed, deleted, unknown, ignored, clean = status
+ if modified or added or removed or deleted:
+ raise util.Abort(_('outstanding uncommitted changes'))
+ kwt.overwrite(None, expand, clean)
finally:
- release(lock, wlock)
+ wlock.release()
def demo(ui, repo, *args, **opts):
'''print [keywordmaps] configuration and an expansion example
@@ -398,7 +393,7 @@
cwd = pats and repo.getcwd() or ''
modified, added, removed, deleted, unknown, ignored, clean = status
files = []
- if not (opts.get('unknown') or opts.get('untracked')) or opts.get('all'):
+ if not opts.get('unknown') or opts.get('all'):
files = sorted(modified + added + clean)
wctx = repo[None]
kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
@@ -485,15 +480,10 @@
del self.commitctx
def kwcommitctx(self, ctx, error=False):
- wlock = lock = None
- try:
- wlock = self.wlock()
- lock = self.lock()
- n = super(kwrepo, self).commitctx(ctx, error)
- kwt.overwrite(n, True, None)
- return n
- finally:
- release(lock, wlock)
+ n = super(kwrepo, self).commitctx(ctx, error)
+ # no lock needed, only called from repo.commit() which already locks
+ kwt.overwrite(n, True, None)
+ return n
# monkeypatches
def kwpatchfile_init(orig, self, ui, fname, opener,
@@ -540,9 +530,6 @@
[('A', 'all', None, _('show keyword status flags of all files')),
('i', 'ignore', None, _('show files excluded from expansion')),
('u', 'unknown', None, _('only show unknown (not tracked) files')),
- ('a', 'all', None,
- _('show keyword status flags of all files (DEPRECATED)')),
- ('u', 'untracked', None, _('only show untracked files (DEPRECATED)')),
] + commands.walkopts,
_('hg kwfiles [OPTION]... [FILE]...')),
'kwshrink': (shrink, commands.walkopts,
--- a/hgext/mq.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/mq.py Tue Mar 16 11:37:14 2010 -0300
@@ -16,7 +16,6 @@
Common tasks (use "hg help command" for more details)::
- prepare repository to work with patches qinit
create new patch qnew
import existing patch qimport
@@ -54,18 +53,11 @@
normname = util.normpath
class statusentry(object):
- def __init__(self, rev, name=None):
- if not name:
- fields = rev.split(':', 1)
- if len(fields) == 2:
- self.rev, self.name = fields
- else:
- self.rev, self.name = None, None
- else:
- self.rev, self.name = rev, name
+ def __init__(self, node, name):
+ self.node, self.name = node, name
def __str__(self):
- return self.rev + ':' + self.name
+ return hex(self.node) + ':' + self.name
class patchheader(object):
def __init__(self, pf, plainmode=False):
@@ -80,8 +72,7 @@
break
def eatempty(lines):
while lines:
- l = lines[-1]
- if re.match('\s*$', l):
+ if not lines[-1].strip():
del lines[-1]
else:
break
@@ -268,8 +259,11 @@
@util.propertycache
def applied(self):
if os.path.exists(self.join(self.status_path)):
+ def parse(l):
+ n, name = l.split(':', 1)
+ return statusentry(bin(n), name)
lines = self.opener(self.status_path).read().splitlines()
- return [statusentry(l) for l in lines]
+ return [parse(l) for l in lines]
return []
@util.propertycache
@@ -332,16 +326,12 @@
return os.path.join(self.path, *p)
def find_series(self, patch):
- pre = re.compile("(\s*)([^#]+)")
- index = 0
- for l in self.full_series:
- m = pre.match(l)
- if m:
- s = m.group(2)
- s = s.rstrip()
- if s == patch:
- return index
- index += 1
+ def matchpatch(l):
+ l = l.split('#', 1)[0]
+ return l.strip() == patch
+ for index, l in enumerate(self.full_series):
+ if matchpatch(l):
+ return index
return None
guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
@@ -547,22 +537,16 @@
(p1, p2) = repo.dirstate.parents()
if p2 == nullid:
return p1
- if len(self.applied) == 0:
+ if not self.applied:
return None
- return bin(self.applied[-1].rev)
- pp = repo.changelog.parents(rev)
- if pp[1] != nullid:
- arevs = [x.rev for x in self.applied]
- p0 = hex(pp[0])
- p1 = hex(pp[1])
- if p0 in arevs:
- return pp[0]
- if p1 in arevs:
- return pp[1]
- return pp[0]
+ return self.applied[-1].node
+ p1, p2 = repo.changelog.parents(rev)
+ if p2 != nullid and p2 in [x.node for x in self.applied]:
+ return p2
+ return p1
def mergepatch(self, repo, mergeq, series, diffopts):
- if len(self.applied) == 0:
+ if not self.applied:
# each of the patches merged in will have two parents. This
# can confuse the qrefresh, qdiff, and strip code because it
# needs to know which parent is actually in the patch queue.
@@ -572,7 +556,7 @@
pname = ".hg.patches.merge.marker"
n = repo.commit('[mq]: merge marker', force=True)
self.removeundo(repo)
- self.applied.append(statusentry(hex(n), pname))
+ self.applied.append(statusentry(n, pname))
self.applied_dirty = 1
head = self.qparents(repo)
@@ -590,10 +574,10 @@
if not info:
self.ui.warn(_("patch %s is not applied\n") % patch)
return (1, None)
- rev = bin(info[1])
+ rev = info[1]
err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
if head:
- self.applied.append(statusentry(hex(head), patch))
+ self.applied.append(statusentry(head, patch))
self.applied_dirty = 1
if err:
return (err, head)
@@ -616,7 +600,7 @@
return (True, files, fuzz)
def apply(self, repo, series, list=False, update_status=True,
- strict=False, patchdir=None, merge=None, all_files={}):
+ strict=False, patchdir=None, merge=None, all_files=None):
wlock = lock = tr = None
try:
wlock = repo.wlock()
@@ -641,7 +625,7 @@
self.removeundo(repo)
def _apply(self, repo, series, list=False, update_status=True,
- strict=False, patchdir=None, merge=None, all_files={}):
+ strict=False, patchdir=None, merge=None, all_files=None):
'''returns (error, hash)
error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
# TODO unify with commands.py
@@ -674,7 +658,8 @@
if ph.haspatch:
(patcherr, files, fuzz) = self.patch(repo, pf)
- all_files.update(files)
+ if all_files is not None:
+ all_files.update(files)
patcherr = not patcherr
else:
self.ui.warn(_("patch %s is empty\n") % patchname)
@@ -704,7 +689,7 @@
raise util.Abort(_("repo commit failed"))
if update_status:
- self.applied.append(statusentry(hex(n), patchname))
+ self.applied.append(statusentry(n, patchname))
if patcherr:
self.ui.warn(_("patch failed, rejects left in working dir\n"))
@@ -736,7 +721,7 @@
self.series_dirty = 1
def _revpatches(self, repo, revs):
- firstrev = repo[self.applied[0].rev].rev()
+ firstrev = repo[self.applied[0].node].rev()
patches = []
for i, rev in enumerate(revs):
@@ -744,7 +729,7 @@
raise util.Abort(_('revision %d is not managed') % rev)
ctx = repo[rev]
- base = bin(self.applied[i].rev)
+ base = self.applied[i].node
if ctx.node() != base:
msg = _('cannot delete revision %d above applied patches')
raise util.Abort(msg % rev)
@@ -792,8 +777,8 @@
self._cleanup(realpatches, numrevs, opts.get('keep'))
def check_toppatch(self, repo):
- if len(self.applied) > 0:
- top = bin(self.applied[-1].rev)
+ if self.applied:
+ top = self.applied[-1].node
patch = self.applied[-1].name
pp = repo.dirstate.parents()
if top not in pp:
@@ -873,7 +858,7 @@
raise util.Abort(_("repo commit failed"))
try:
self.full_series[insert:insert] = [patchfn]
- self.applied.append(statusentry(hex(n), patchfn))
+ self.applied.append(statusentry(n, patchfn))
self.parse_series()
self.series_dirty = 1
self.applied_dirty = 1
@@ -930,7 +915,7 @@
"""returns (index, rev, patch)"""
for i, a in enumerate(self.applied):
if a.name == patch:
- return (i, a.rev, a.name)
+ return (i, a.node, a.name)
return None
# if the exact patch name does not exist, we try a few
@@ -953,7 +938,7 @@
return None
if matches:
return matches[0]
- if len(self.series) > 0 and len(self.applied) > 0:
+ if self.series and self.applied:
if s == 'qtip':
return self.series[self.series_end(True)-1]
if s == 'qbase':
@@ -1071,7 +1056,7 @@
end = self.series.index(patch, start) + 1
s = self.series[start:end]
- all_files = {}
+ all_files = set()
try:
if mergeq:
ret = self.mergepatch(repo, mergeq, s, diffopts)
@@ -1081,12 +1066,15 @@
self.ui.warn(_('cleaning up working directory...'))
node = repo.dirstate.parents()[0]
hg.revert(repo, node, None)
- unknown = repo.status(unknown=True)[4]
# only remove unknown files that we know we touched or
# created while patching
- for f in unknown:
- if f in all_files:
- util.unlink(repo.wjoin(f))
+ for f in all_files:
+ if f not in repo.dirstate:
+ try:
+ util.unlink(repo.wjoin(f))
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
self.ui.warn(_('done\n'))
raise
@@ -1104,10 +1092,6 @@
wlock.release()
def pop(self, repo, patch=None, force=False, update=True, all=False):
- def getfile(f, rev, flags):
- t = repo.file(f).read(rev)
- repo.wwrite(f, t, flags)
-
wlock = repo.wlock()
try:
if patch:
@@ -1119,7 +1103,7 @@
if not info:
raise util.Abort(_("patch %s is not applied") % patch)
- if len(self.applied) == 0:
+ if not self.applied:
# Allow qpop -a to work repeatedly,
# but not qpop without an argument
self.ui.warn(_("no patches applied\n"))
@@ -1138,16 +1122,16 @@
if not update:
parents = repo.dirstate.parents()
- rr = [bin(x.rev) for x in self.applied]
+ rr = [x.node for x in self.applied]
for p in parents:
if p in rr:
self.ui.warn(_("qpop: forcing dirstate update\n"))
update = True
else:
- parents = [p.hex() for p in repo[None].parents()]
+ parents = [p.node() for p in repo[None].parents()]
needupdate = False
for entry in self.applied[start:]:
- if entry.rev in parents:
+ if entry.node in parents:
needupdate = True
break
update = needupdate
@@ -1157,7 +1141,7 @@
self.applied_dirty = 1
end = len(self.applied)
- rev = bin(self.applied[start].rev)
+ rev = self.applied[start].node
if update:
top = self.check_toppatch(repo)[0]
@@ -1167,7 +1151,7 @@
node = short(rev)
raise util.Abort(_('trying to pop unknown node %s') % node)
- if heads != [bin(self.applied[-1].rev)]:
+ if heads != [self.applied[-1].node]:
raise util.Abort(_("popping would remove a revision not "
"managed by this patch queue"))
@@ -1175,8 +1159,7 @@
# form of hg.update.
if update:
qp = self.qparents(repo, rev)
- changes = repo.changelog.read(qp)
- mmap = repo.manifest.read(changes[0])
+ ctx = repo[qp]
m, a, r, d = repo.status(qp, top)[:4]
if d:
raise util.Abort(_("deletions found between repo revs"))
@@ -1189,18 +1172,16 @@
try: os.removedirs(os.path.dirname(repo.wjoin(f)))
except: pass
repo.dirstate.forget(f)
- for f in m:
- getfile(f, mmap[f], mmap.flags(f))
- for f in r:
- getfile(f, mmap[f], mmap.flags(f))
for f in m + r:
+ fctx = ctx[f]
+ repo.wwrite(f, fctx.data(), fctx.flags())
repo.dirstate.normal(f)
repo.dirstate.setparents(qp, nullid)
for patch in reversed(self.applied[start:end]):
self.ui.status(_("popping %s\n") % patch.name)
del self.applied[start:end]
self.strip(repo, rev, update=False, backup='strip')
- if len(self.applied):
+ if self.applied:
self.ui.write(_("now at: %s\n") % self.applied[-1].name)
else:
self.ui.write(_("patch queue now empty\n"))
@@ -1221,7 +1202,7 @@
self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
def refresh(self, repo, pats=None, **opts):
- if len(self.applied) == 0:
+ if not self.applied:
self.ui.write(_("no patches applied\n"))
return 1
msg = opts.get('msg', '').rstrip()
@@ -1233,8 +1214,7 @@
try:
self.check_toppatch(repo)
- (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
- top = bin(top)
+ (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
if repo.changelog.heads(top) != [top]:
raise util.Abort(_("cannot refresh a revision with children"))
@@ -1391,7 +1371,7 @@
patchf.rename()
n = repo.commit(message, user, ph.date, match=match,
force=True)
- self.applied.append(statusentry(hex(n), patchfn))
+ self.applied.append(statusentry(n, patchfn))
except:
ctx = repo[cparents[0]]
repo.dirstate.rebuild(ctx.node(), ctx.manifest())
@@ -1490,8 +1470,7 @@
return hg.repository(self.ui, path=self.path, create=create)
def restore(self, repo, rev, delete=None, qupdate=None):
- c = repo.changelog.read(rev)
- desc = c[4].strip()
+ desc = repo[rev].description().strip()
lines = desc.splitlines()
i = 0
datastart = None
@@ -1507,12 +1486,11 @@
qpp = [bin(x) for x in l]
elif datastart != None:
l = line.rstrip()
- se = statusentry(l)
- file_ = se.name
- if se.rev:
- applied.append(se)
+ n, name = l.split(':', 1)
+ if n:
+ applied.append(statusentry(bin(n), name))
else:
- series.append(file_)
+ series.append(l)
if datastart is None:
self.ui.warn(_("No saved patch data found\n"))
return 1
@@ -1546,14 +1524,13 @@
hg.clean(r, qpp[0])
def save(self, repo, msg=None):
- if len(self.applied) == 0:
+ if not self.applied:
self.ui.warn(_("save: no patches applied, exiting\n"))
return 1
if self.issaveline(self.applied[-1]):
self.ui.warn(_("status is already saved\n"))
return 1
- ar = [':' + x for x in self.full_series]
if not msg:
msg = _("hg patches saved state")
else:
@@ -1563,18 +1540,18 @@
pp = r.dirstate.parents()
msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
msg += "\n\nPatch Data:\n"
- text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
- "\n".join(ar) + '\n' or "")
- n = repo.commit(text, force=True)
+ msg += ''.join('%s\n' % x for x in self.applied)
+ msg += ''.join(':%s\n' % x for x in self.full_series)
+ n = repo.commit(msg, force=True)
if not n:
self.ui.warn(_("repo commit failed\n"))
return 1
- self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
+ self.applied.append(statusentry(n, '.hg.patches.save.line'))
self.applied_dirty = 1
self.removeundo(repo)
def full_series_end(self):
- if len(self.applied) > 0:
+ if self.applied:
p = self.applied[-1].name
end = self.find_series(p)
if end is None:
@@ -1589,17 +1566,15 @@
"""
end = 0
def next(start):
- if all_patches:
+ if all_patches or start >= len(self.series):
return start
- i = start
- while i < len(self.series):
+ for i in xrange(start, len(self.series)):
p, reason = self.pushable(i)
if p:
break
self.explain_pushable(i)
- i += 1
return i
- if len(self.applied) > 0:
+ if self.applied:
p = self.applied[-1].name
try:
end = self.series.index(p)
@@ -1636,7 +1611,6 @@
if (len(files) > 1 or len(rev) > 1) and patchname:
raise util.Abort(_('option "-n" not valid when importing multiple '
'patches'))
- i = 0
added = []
if rev:
# If mq patches are applied, we can only import revisions
@@ -1647,14 +1621,14 @@
raise util.Abort(_('revision %d is the root of more than one '
'branch') % rev[-1])
if self.applied:
- base = hex(repo.changelog.node(rev[0]))
- if base in [n.rev for n in self.applied]:
+ base = repo.changelog.node(rev[0])
+ if base in [n.node for n in self.applied]:
raise util.Abort(_('revision %d is already managed')
% rev[0])
- if heads != [bin(self.applied[-1].rev)]:
+ if heads != [self.applied[-1].node]:
raise util.Abort(_('revision %d is not the parent of '
'the queue') % rev[0])
- base = repo.changelog.rev(bin(self.applied[0].rev))
+ base = repo.changelog.rev(self.applied[0].node)
lastparent = repo.changelog.parentrevs(base)[0]
else:
if heads != [repo.changelog.node(rev[0])]:
@@ -1681,10 +1655,10 @@
self.full_series.insert(0, patchname)
patchf = self.opener(patchname, "w")
- patch.export(repo, [n], fp=patchf, opts=diffopts)
+ cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
patchf.close()
- se = statusentry(hex(n), patchname)
+ se = statusentry(n, patchname)
self.applied.insert(0, se)
added.append(patchname)
@@ -1692,7 +1666,7 @@
self.parse_series()
self.applied_dirty = 1
- for filename in files:
+ for i, filename in enumerate(files):
if existing:
if filename == '-':
raise util.Abort(_('-e is incompatible with import from -'))
@@ -1725,7 +1699,6 @@
self.full_series[index:index] = [patchname]
self.parse_series()
self.ui.warn(_("adding %s to series file\n") % patchname)
- i += 1
added.append(patchname)
patchname = None
self.series_dirty = 1
@@ -1880,7 +1853,7 @@
default. Use -p <url> to change.
The patch directory must be a nested Mercurial repository, as
- would be created by qinit -c.
+ would be created by init --mq.
'''
def patchdir(repo):
url = repo.url()
@@ -1898,11 +1871,11 @@
hg.repository(ui, patchespath)
except error.RepoError:
raise util.Abort(_('versioned patch repository not found'
- ' (see qinit -c)'))
+ ' (see init --mq)'))
qbase, destrev = None, None
if sr.local():
if sr.mq.applied:
- qbase = bin(sr.mq.applied[0].rev)
+ qbase = sr.mq.applied[0].node
if not hg.islocal(dest):
heads = set(sr.heads())
destrev = list(heads.difference(sr.heads(qbase)))
@@ -2544,8 +2517,8 @@
def abort_if_wdir_patched(self, errmsg, force=False):
if self.mq.applied and not force:
- parent = hex(self.dirstate.parents()[0])
- if parent in [s.rev for s in self.mq.applied]:
+ parent = self.dirstate.parents()[0]
+ if parent in [s.node for s in self.mq.applied]:
raise util.Abort(errmsg)
def commit(self, text="", user=None, date=None, match=None,
@@ -2570,7 +2543,7 @@
if not q.applied:
return result
- mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
+ mqtags = [(patch.node, patch.name) for patch in q.applied]
if mqtags[-1][0] not in self.changelog.nodemap:
self.ui.warn(_('mq status file refers to unknown node %s\n')
@@ -2596,7 +2569,7 @@
return super(mqrepo, self)._branchtags(partial, lrev)
cl = self.changelog
- qbasenode = bin(q.applied[0].rev)
+ qbasenode = q.applied[0].node
if qbasenode not in cl.nodemap:
self.ui.warn(_('mq status file refers to unknown node %s\n')
% short(qbasenode))
@@ -2634,7 +2607,16 @@
if not mq:
return orig(ui, *args, **kwargs)
- repopath = cmdutil.findrepo(os.getcwd())
+ if args:
+ repopath = args[0]
+ if not hg.islocal(repopath):
+ raise util.Abort(_('only a local queue repository '
+ 'may be initialized'))
+ else:
+ repopath = cmdutil.findrepo(os.getcwd())
+ if not repopath:
+ raise util.Abort(_('There is no Mercurial repository here '
+ '(.hg not found)'))
repo = hg.repository(ui, repopath)
return qinit(ui, repo, True)
--- a/hgext/patchbomb.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/patchbomb.py Tue Mar 16 11:37:14 2010 -0300
@@ -249,7 +249,7 @@
def getpatches(revs):
for r in cmdutil.revrange(repo, revs):
output = cStringIO.StringIO()
- patch.export(repo, [r], fp=output,
+ cmdutil.export(repo, [r], fp=output,
opts=patch.diffopts(ui, opts))
yield output.getvalue().split('\n')
--- a/hgext/progress.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/progress.py Tue Mar 16 11:37:14 2010 -0300
@@ -32,6 +32,7 @@
width = <none> # if set, the maximum width of the progress information
# (that is, min(width, term width) will be used)
clear-complete = True # clear the progress bar after it's done
+ disable = False # if true, don't show a progress bar
Valid entries for the format field are topic, bar, number, unit, and
item. item defaults to the last 20 characters of the item, but this
@@ -182,6 +183,7 @@
sharedprog = progbar(ui)
extensions.wrapfunction(ui, 'progress', sharedprog.progress)
extensions.wrapfunction(ui, 'write', sharedprog.write)
+ extensions.wrapfunction(ui, 'write_err', sharedprog.write)
def reposetup(ui, repo):
uisetup(repo.ui)
--- a/hgext/rebase.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/rebase.py Tue Mar 16 11:37:14 2010 -0300
@@ -14,7 +14,7 @@
http://mercurial.selenic.com/wiki/RebaseExtension
'''
-from mercurial import util, repair, merge, cmdutil, commands, error
+from mercurial import hg, util, repair, merge, cmdutil, commands, error
from mercurial import extensions, ancestor, copies, patch
from mercurial.commands import templateopts
from mercurial.node import nullrev
@@ -28,8 +28,39 @@
"""move changeset (and descendants) to a different branch
Rebase uses repeated merging to graft changesets from one part of
- history onto another. This can be useful for linearizing local
- changes relative to a master development tree.
+ history (the source) onto another (the destination). This can be
+ useful for linearizing local changes relative to a master
+ development tree.
+
+ If you don't specify a destination changeset (``-d/--dest``),
+ rebase uses the tipmost head of the current named branch as the
+ destination. (The destination changeset is not modified by
+ rebasing, but new changesets are added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a
+ "source" changeset or as a "base" changeset. Both are shorthand
+ for a topologically related set of changesets (the "source
+ branch"). If you specify source (``-s/--source``), rebase will
+ rebase that changeset and all of its descendants onto dest. If you
+ specify base (``-b/--base``), rebase will select ancestors of base
+ back to but not including the common ancestor with dest. Thus,
+ ``-b`` is less precise but more convenient than ``-s``: you can
+ specify any changeset in the source branch, and rebase will select
+ the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
+ uses the parent of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch
+ as descendants of dest and then destroys the originals. Use
+ ``--keep`` to preserve the original source changesets. Some
+ changesets in the source branch (e.g. merges from the destination
+ branch) may be dropped if they no longer contribute any change.
+
+ One result of the rules for selecting the destination changeset
+ and source branch is that, unlike ``merge``, rebase will do
+ nothing if you are at the latest (tipmost) head of a named branch
+ with two heads. You need to explicitly specify source and/or
+ destination (or ``update`` to the other head, if it's the head of
+ the intended source branch).
If a rebase is interrupted to manually resolve a merge, it can be
continued with --continue/-c or aborted with --abort/-a.
@@ -56,6 +87,9 @@
keepf = opts.get('keep', False)
keepbranchesf = opts.get('keepbranches', False)
detachf = opts.get('detach', False)
+ # keepopen is not meant for use on the command line, but by
+ # other extensions
+ keepopen = opts.get('keepopen', False)
if contf or abortf:
if contf and abortf:
@@ -150,7 +184,7 @@
ui.note(_('rebase merging completed\n'))
- if collapsef:
+ if collapsef and not keepopen:
p1, p2 = defineparents(repo, min(state), target,
state, targetancestors)
commitmsg = 'Collapsed revision'
@@ -314,10 +348,10 @@
'Update rebased mq patches - finalize and then import them'
mqrebase = {}
for p in repo.mq.applied:
- if repo[p.rev].rev() in state:
+ if repo[p.node].rev() in state:
repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
- (repo[p.rev].rev(), p.name))
- mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
+ (repo[p.node].rev(), p.name))
+ mqrebase[repo[p.node].rev()] = (p.name, isagitpatch(repo, p.name))
if mqrebase:
repo.mq.finish(repo, mqrebase.keys())
@@ -409,11 +443,15 @@
branch = repo[None].branch()
dest = repo[branch].rev()
else:
- if 'qtip' in repo.tags() and (repo[dest].hex() in
- [s.rev for s in repo.mq.applied]):
- raise util.Abort(_('cannot rebase onto an applied mq patch'))
dest = repo[dest].rev()
+ # This check isn't strictly necessary, since mq detects commits over an
+ # applied patch. But it prevents messing up the working directory when
+ # a partially completed rebase is blocked by mq.
+ if 'qtip' in repo.tags() and (repo[dest].node() in
+ [s.node for s in repo.mq.applied]):
+ raise util.Abort(_('cannot rebase onto an applied mq patch'))
+
if src:
commonbase = repo[src].ancestor(repo[dest])
if commonbase == repo[src]:
@@ -467,7 +505,14 @@
cmdutil.bail_if_changed(repo)
revsprepull = len(repo)
- orig(ui, repo, *args, **opts)
+ origpostincoming = commands.postincoming
+ def _dummy(*args, **kwargs):
+ pass
+ commands.postincoming = _dummy
+ try:
+ orig(ui, repo, *args, **opts)
+ finally:
+ commands.postincoming = origpostincoming
revspostpull = len(repo)
if revspostpull > revsprepull:
rebase(ui, repo, **opts)
@@ -475,7 +520,7 @@
dest = repo[branch].rev()
if dest != repo['.'].rev():
# there was nothing to rebase we force an update
- merge.update(repo, dest, False, False, False)
+ hg.update(repo, dest)
else:
orig(ui, repo, *args, **opts)
@@ -490,9 +535,10 @@
"rebase":
(rebase,
[
- ('s', 'source', '', _('rebase from a given revision')),
- ('b', 'base', '', _('rebase from the base of a given revision')),
- ('d', 'dest', '', _('rebase onto a given revision')),
+ ('s', 'source', '', _('rebase from the specified changeset')),
+ ('b', 'base', '', _('rebase from the base of the specified changeset '
+ '(up to greatest common ancestor of base and dest)')),
+ ('d', 'dest', '', _('rebase onto the specified changeset')),
('', 'collapse', False, _('collapse the rebased changesets')),
('', 'keep', False, _('keep original changesets')),
('', 'keepbranches', False, _('keep original branch names')),
@@ -501,6 +547,6 @@
('c', 'continue', False, _('continue an interrupted rebase')),
('a', 'abort', False, _('abort an interrupted rebase'))] +
templateopts,
- _('hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] '
- '[--keep] [--keepbranches] | [-c] | [-a]')),
+ _('hg rebase [-s REV | -b REV] [-d REV] [options]\n'
+ 'hg rebase {-a|-c}'))
}
--- a/hgext/record.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/record.py Tue Mar 16 11:37:14 2010 -0300
@@ -293,6 +293,7 @@
_('&Quit, recording no changes'),
_('&?'))
r = ui.promptchoice("%s %s" % (query, resps), choices)
+ ui.write("\n")
if r == 7: # ?
doc = gettext(record.__doc__)
c = doc.find(_('y - record this change'))
--- a/hgext/relink.py Sat Mar 06 10:02:45 2010 +0100
+++ b/hgext/relink.py Tue Mar 16 11:37:14 2010 -0300
@@ -139,13 +139,13 @@
continue
try:
relinkfile(source, tgt)
- ui.progress(_('relink'), pos, f, _(' files'), total)
+ ui.progress(_('relinking'), pos, f, _(' files'), total)
relinked += 1
savedbytes += sz
except OSError, inst:
ui.warn('%s: %s\n' % (tgt, str(inst)))
- ui.progress(_('relink'), None, f, _(' files'), total)
+ ui.progress(_('relinking'), None, f, _(' files'), total)
ui.status(_('relinked %d files (%d bytes reclaimed)\n') %
(relinked, savedbytes))
--- a/i18n/pt_BR.po Sat Mar 06 10:02:45 2010 +0100
+++ b/i18n/pt_BR.po Tue Mar 16 11:37:14 2010 -0300
@@ -3011,7 +3011,6 @@
"\n"
"Common tasks (use \"hg help command\" for more details)::\n"
"\n"
-" prepare repository to work with patches qinit\n"
" create new patch qnew\n"
" import existing patch qimport\n"
"\n"
@@ -3047,7 +3046,6 @@
"\n"
"Tarefas comuns (use \"hg help comando\" para mais detalhes)::\n"
"\n"
-" prepara um repositório para trabalhar com patches qinit\n"
" cria um novo patch qnew\n"
" importa um patch existente qimport\n"
"\n"
@@ -3499,7 +3497,7 @@
" qcommit to commit changes to this queue repository.\n"
"\n"
" This command is deprecated. Without -c, it's implied by other relevant\n"
-" commands. With -c, use hg init -Q instead."
+" commands. With -c, use hg init --mq instead."
msgstr ""
"cria um novo repositório de fila (OBSOLETO)\n"
"\n"
@@ -3511,7 +3509,7 @@
" qcommit para consolidar mudanças neste repositório de fila.\n"
"\n"
" Este comando é obsoleto. Sem -c, é subentendido em outros comandos\n"
-" relevantes. E ao invés de qinit -c, use hg init -Q ."
+" relevantes. E ao invés de qinit -c, use hg init --mq ."
msgid ""
"clone main and patch repository at same time\n"
@@ -3526,7 +3524,7 @@
" default. Use -p <url> to change.\n"
"\n"
" The patch directory must be a nested Mercurial repository, as\n"
-" would be created by qinit -c.\n"
+" would be created by init --mq.\n"
" "
msgstr ""
"clona os repositórios principal e de fila ao mesmo tempo\n"
@@ -3542,11 +3540,11 @@
" <origem>/.hg/patches . Use -p <url> para mudar.\n"
"\n"
" O diretório de patches deve ser um repositório aninhado do\n"
-" Mercurial, como criado por qinit -c.\n"
-" "
-
-msgid "versioned patch repository not found (see qinit -c)"
-msgstr "repositório versionado de patches não encontrado (veja qinit -c)"
+" Mercurial, como criado por init --mq.\n"
+" "
+
+msgid "versioned patch repository not found (see init --mq)"
+msgstr "repositório versionado de patches não encontrado (veja init --mq)"
msgid "cloning main repository\n"
msgstr "clonando repositório principal\n"
@@ -3563,11 +3561,11 @@
msgid ""
"commit changes in the queue repository (DEPRECATED)\n"
"\n"
-" This command is deprecated; use hg -Q commit instead."
+" This command is deprecated; use hg --mq commit instead."
msgstr ""
"consolida mudanças no repositório da fila de patches (OBSOLETO)\n"
"\n"
-" Este comando é obsoleto; use hg -Q commit em seu lugar."
+" Este comando é obsoleto; use hg --mq commit em seu lugar."
msgid "print the entire series file"
msgstr "imprime todo o arquivo series"
@@ -4046,6 +4044,12 @@
msgid "cannot import over an applied patch"
msgstr "não se pode importar sobre um patch aplicado"
+msgid "only a local queue repository may be initialized"
+msgstr "apenas um repositório de fila local pode ser inicializado"
+
+msgid "There is no Mercurial repository here (.hg not found)"
+msgstr "Não há um repositório do Mercurial aqui (.hg não encontrado)"
+
msgid "operate on patch repository"
msgstr "opera no repositório de patches"
@@ -4952,6 +4956,7 @@
" width = <none> # if set, the maximum width of the progress information\n"
" # (that is, min(width, term width) will be used)\n"
" clear-complete = True # clear the progress bar after it's done\n"
+" disable = False # if true, don't show a progress bar\n"
"\n"
"Valid entries for the format field are topic, bar, number, unit, and\n"
"item. item defaults to the last 20 characters of the item, but this\n"
@@ -4976,6 +4981,7 @@
" # progresso (isto é, min(largura, largura do terminal)\n"
" # será usada)\n"
" clear-complete = True # limpa a barra de progresso após terminar\n"
+" disable = False # se 'true', não exibe uma barra de progresso\n"
"\n"
"Entradas válidas para o campo format são topic (tópico), bar (barra),\n"
"number (número), unit (unidade) e item (item). item por padrão são os\n"
@@ -5093,8 +5099,39 @@
"move changeset (and descendants) to a different branch\n"
"\n"
" Rebase uses repeated merging to graft changesets from one part of\n"
-" history onto another. This can be useful for linearizing local\n"
-" changes relative to a master development tree.\n"
+" history (the source) onto another (the destination). This can be\n"
+" useful for linearizing local changes relative to a master\n"
+" development tree.\n"
+"\n"
+" If you don't specify a destination changeset (``-d/--dest``),\n"
+" rebase uses the tipmost head of the current named branch as the\n"
+" destination. (The destination changeset is not modified by\n"
+" rebasing, but new changesets are added as its descendants.)\n"
+"\n"
+" You can specify which changesets to rebase in two ways: as a\n"
+" \"source\" changeset or as a \"base\" changeset. Both are shorthand\n"
+" for a topologically related set of changesets (the \"source\n"
+" branch\"). If you specify source (``-s/--source``), rebase will\n"
+" rebase that changeset and all of its descendants onto dest. If you\n"
+" specify base (``-b/--base``), rebase will select ancestors of base\n"
+" back to but not including the common ancestor with dest. Thus,\n"
+" ``-b`` is less precise but more convenient than ``-s``: you can\n"
+" specify any changeset in the source branch, and rebase will select\n"
+" the whole branch. If you specify neither ``-s`` nor ``-b``, rebase\n"
+" uses the parent of the working directory as the base.\n"
+"\n"
+" By default, rebase recreates the changesets in the source branch\n"
+" as descendants of dest and then destroys the originals. Use\n"
+" ``--keep`` to preserve the original source changesets. Some\n"
+" changesets in the source branch (e.g. merges from the destination\n"
+" branch) may be dropped if they no longer contribute any change.\n"
+"\n"
+" One result of the rules for selecting the destination changeset\n"
+" and source branch is that, unlike ``merge``, rebase will do\n"
+" nothing if you are at the latest (tipmost) head of a named branch\n"
+" with two heads. You need to explicitly specify source and/or\n"
+" destination (or ``update`` to the other head, if it's the head of\n"
+" the intended source branch).\n"
"\n"
" If a rebase is interrupted to manually resolve a merge, it can be\n"
" continued with --continue/-c or aborted with --abort/-a.\n"
@@ -5103,12 +5140,46 @@
"move a revisão (e descendentes) para um ramo diferente\n"
"\n"
" Rebase usa mesclagens repetidamente para migrar revisões de uma\n"
-" parte do histórico para outra. Isto pode ser útil para linearizar\n"
-" mudanças locais relativas a uma árvore mestra de desenvolvimento.\n"
+" parte do histórico (a origem) para outra (o destino). Isto pode\n"
+" ser útil para linearizar mudanças locais relativas a uma árvore\n"
+" mestra de desenvolvimento.\n"
+"\n"
+" Se você não especificar uma revisão de destino (opção ``-d/--dest``),\n"
+" rebase usará a como destino a cabeça mais recente do ramo nomeado\n"
+" atual. (A revisão de destino não é modificada pelo rebaseamento, mas\n"
+" novas revisões são acrescentadas como seus descendentes.)\n"
+"\n"
+" Você pode especificar quais revisões rebasear de duas maneiras:\n"
+" revisão de \"origem\" ou revisão \"base\". Ambas são formas de\n"
+" especificar um conjunto de revisões topologicamente relacionadas (o\n"
+" \"ramo de origem\"). Se você especificar a revisão de origem (com\n"
+" ``-s/--source``), o comando rebase irá rebasear tal revisão e todos\n"
+" os seus descendentes para o destino. Se você especificar a revisão\n"
+" base (com ``-b/--base``), o comando rebase selecionará ancestrais\n"
+" dessa revisão base até, mas sem incluir, seu ancestral comum com o\n"
+" destino. Assim, ``-b`` é menos preciso, mas mais conveniente, que\n"
+" ``-s``: você pode especificar qualquer revisão no ramo de origem,\n"
+" e rebase irá selecionar o ramo completo. Se você não especificar\n"
+" nem ``-s`` nem ``-b``, rebase usará o pai do diretório de trabalho\n"
+" como revisão base.\n"
+"\n"
+" Por padrão, rebase recria as revisões do ramo de origem como\n"
+" descendentes do destino, e destrói as originais. Use a opção\n"
+" ``--keep`` para preservar as revisões originais. Algumas revisões\n"
+" no ramo de origem (por exemplo, mesclagens com o ramo de destino)\n"
+" podem ser descartadas se elas não contribuÃrem mais com nenhuma\n"
+" mudança.\n"
+"\n"
+" Um resultado das regras de seleção da revisão de destino é que, ao\n"
+" contrário de ``merge``, rebase não fará nada se você estiver na última\n"
+" cabeça (a mais próxima da tip) de um ramo nomeado com duas ou mais\n"
+" cabeças. Nesse caso você precisa especificar explicitamente a origem\n"
+" e/ou o destino (ou fazer um ``update`` para outra cabeça, se for a\n"
+" cabeça do ramo de origem desejado).\n"
"\n"
" Se um rebaseamento for interrompido para resolver uma mesclagem\n"
-" manualmente, ele pode ser continuado com --continue/-c ou abortado\n"
-" com --abort/-a.\n"
+" manualmente, pode ser continuado com --continue/-c ou abortado com\n"
+" --abort/-a.\n"
" "
msgid "cannot use both abort and continue"
@@ -5188,14 +5259,18 @@
msgid "rebase working directory to branch head"
msgstr "rebaseia o diretório de trabalho para a cabeça do ramo"
-msgid "rebase from a given revision"
-msgstr "rebaseia a partir de uma revisão dada"
-
-msgid "rebase from the base of a given revision"
-msgstr "rebaseia a partir da base de uma revisão dada"
-
-msgid "rebase onto a given revision"
-msgstr "rebaseia para a revisão dada"
+msgid "rebase from the specified changeset"
+msgstr "rebaseia a partir da revisão especificada"
+
+msgid ""
+"rebase from the base of the specified changeset (up to greatest common "
+"ancestor of base and dest)"
+msgstr ""
+"rebaseia a partir da base da revisão especificada (até o maior ancestral "
+"comum de base e dest)"
+
+msgid "rebase onto the specified changeset"
+msgstr "rebaseia sobre a revisão especificada"
msgid "collapse the rebased changesets"
msgstr "colapsa as revisões rebaseadas"
@@ -5216,11 +5291,11 @@
msgstr "aborta um rebaseamento interrompido"
msgid ""
-"hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--"
-"keepbranches] | [-c] | [-a]"
-msgstr ""
-"hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--"
-"keepbranches] [-c] | [-a]"
+"hg rebase [-s REV | -b REV] [-d REV] [options]\n"
+"hg rebase {-a|-c}"
+msgstr ""
+"hg rebase [-s REV | -b REV] [-d REV] [opções]\n"
+"hg rebase {-a|-c}"
msgid "commands to interactively select changes for commit/qrefresh"
msgstr "comandos para selecionar interativamente mudanças em um commit ou qrefresh"
@@ -5431,8 +5506,8 @@
msgid " files"
msgstr " arquivos"
-msgid "relink"
-msgstr "relink"
+msgid "relinking"
+msgstr "restaurando hardlinks"
#, python-format
msgid "relinked %d files (%d bytes reclaimed)\n"
@@ -6651,8 +6726,8 @@
msgid "reset working directory to branch %s\n"
msgstr "redefine o diretório de trabalho para o ramo %s\n"
-msgid "a branch of the same name already exists (use --force to override)"
-msgstr "um ramo de mesmo nome já existe (use --force para forçar)"
+msgid "a branch of the same name already exists (use 'hg update' to switch to it)"
+msgstr "um ramo de mesmo nome já existe (use 'hg update' para mudar para ele)"
#, python-format
msgid "marked working directory as branch %s\n"
@@ -6794,20 +6869,6 @@
" .hg/hgrc and working directory will be created on the remote side.\n"
" Please see 'hg help urls' for important details about ``ssh://`` URLs.\n"
"\n"
-" If the -U/--noupdate option is specified, the new clone will contain\n"
-" only a repository (.hg) and no working copy (the working copy parent\n"
-" will be the null changeset). Otherwise, clone will initially check\n"
-" out (in order of precedence):\n"
-"\n"
-" a) the changeset, tag or branch specified with -u/--updaterev\n"
-" b) the changeset, tag or branch given with the first -r/--rev\n"
-" c) the branch given with the first -b/--branch\n"
-" d) the branch given with the url#branch source syntax\n"
-" e) the head of the default branch\n"
-"\n"
-" Use 'hg clone -u . src dst' to checkout the source repository's\n"
-" parent changeset (applicable for local source repositories only).\n"
-"\n"
" A set of changesets (tags, or branch names) to pull may be specified\n"
" by listing each changeset (tag, or branch name) with -r/--rev.\n"
" If -r/--rev is used, the cloned repository will contain only a subset\n"
@@ -6822,12 +6883,12 @@
"\n"
" For efficiency, hardlinks are used for cloning whenever the source\n"
" and destination are on the same filesystem (note this applies only\n"
-" to the repository data, not to the checked out files). Some\n"
+" to the repository data, not to the working directory). Some\n"
" filesystems, such as AFS, implement hardlinking incorrectly, but\n"
" do not report errors. In these cases, use the --pull option to\n"
" avoid hardlinking.\n"
"\n"
-" In some cases, you can clone repositories and checked out files\n"
+" In some cases, you can clone repositories and the working directory\n"
" using full hardlinks with ::\n"
"\n"
" $ cp -al REPO REPOCLONE\n"
@@ -6838,6 +6899,20 @@
" breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,\n"
" this is not compatible with certain extensions that place their\n"
" metadata under the .hg directory, such as mq.\n"
+"\n"
+" Mercurial will update the working directory to the first applicable\n"
+" revision from this list:\n"
+"\n"
+" a) null if -U or the source repository has no changesets\n"
+" b) if -u . and the source repository is local, the first parent of\n"
+" the source repository's working directory\n"
+" c) the changeset specified with -u (if a branch name, this means the\n"
+" latest head of that branch)\n"
+" d) the changeset specified with -r\n"
+" e) the tipmost head specified with -b\n"
+" f) the tipmost head specified with the url#branch source syntax\n"
+" g) the tipmost head of the default branch\n"
+" h) tip\n"
" "
msgstr ""
"cria uma cópia de um repositório existente\n"
@@ -6858,21 +6933,6 @@
" Por favor veja 'hg help URLs' para detalhes importantes sobre\n"
" URLs ``ssh://`` .\n"
"\n"
-" Se a opção -U/--noupdate for especificada, o novo clone irá conter\n"
-" apenas um repositório (diretório .hg) e nenhuma cópia de trabalho\n"
-" (o pai do diretório de trabalho será a revisão null). De outro modo,\n"
-" o comando clone irá obter inicialmente (em ordem de precedência):\n"
-"\n"
-" a) a revisão, etiqueta ou ramo especificados com -u/--updaterev\n"
-" b) a revisão, etiqueta ou ramo especificados com o primeiro -r/--rev\n"
-" c) o ramo especificado pelo primeiro parâmetro -b/--branch\n"
-" d) o ramo especificado pela sintaxe de origem url#ramo\n"
-" e) a cabeça do ramo default\n"
-"\n"
-" Use 'hg clone -u . origem destino' para atualizar para a revisão\n"
-" pai do diretório de trabalho do repositório de origem (aplicável\n"
-" apenas a repositórios locais).\n"
-"\n"
" Um conjunto de revisões (etiquetas, ou nomes de ramo) a serem trazidas\n"
" pode ser especificado listando cada revisão (etiqueta, ou nome de\n"
" ramo) com -r/--rev. Se -r/--rev for usado, o repositório clonado irá\n"
@@ -6893,8 +6953,8 @@
" erros. Nesses casos, use a opção --pull para evitar o uso de\n"
" hardlinks.\n"
"\n"
-" Em alguns casos, você pode clonar repositórios e arquivos da\n"
-" cópia de trabalho usando hardlinks completos com ::\n"
+" Em alguns casos, você pode clonar repositórios e o diretório de\n"
+" trabalho usando hardlinks completos com ::\n"
"\n"
" $ cp -al REPO REPOCLONE\n"
"\n"
@@ -6905,6 +6965,22 @@
" utilitários do kernel Linux fazem isso). Além disso, esse modo de\n"
" criar um clone não é compatÃvel com certas extensões que colocam\n"
" seus metadados sob o diretório hg, como a mq.\n"
+"\n"
+" O Mercurial atualizará o diretório de trabalho para a primeira revisão\n"
+" aplicável da seguinte lista:\n"
+"\n"
+" a) null, se for passada a opção -U ou se o repositório de origem não\n"
+" tiver revisões\n"
+" b) o primeiro pai do diretório de trabalho do repositório de origem,\n"
+" se for passada a opção -u e o repositório de origem for local\n"
+" c) a revisão especificada por -u (se for um nome de ramo, é a última\n"
+" cabeça de tal ramo)\n"
+" d) a revisão especificada com -r\n"
+" e) a cabeça mais recente especificada com -b\n"
+" f) a cabeça mais recente especificada com a sintaxe de origem\n"
+" url#ramo\n"
+" g) a cabeça mais recente do ramo default\n"
+" h) a tip\n"
" "
msgid "cannot specify both --noupdate and --updaterev"
@@ -6991,9 +7067,6 @@
msgid "find the ancestor revision of two revisions in a given index"
msgstr "encontra a revisão ancestral de duas revisões no Ãndice dado"
-msgid "There is no Mercurial repository here (.hg not found)"
-msgstr "Não há um repositório do Mercurial aqui (.hg não encontrado)"
-
msgid "either two or three arguments required"
msgstr "ou dois ou três argumentos necessários"
@@ -11382,20 +11455,20 @@
msgid "%d changesets found\n"
msgstr "%d revisões encontradas\n"
-msgid "bundle changes"
+msgid "bundling changes"
msgstr "empacotando mudanças"
msgid "chunks"
msgstr "trechos"
-msgid "bundle manifests"
+msgid "bundling manifests"
msgstr "empacotando manifestos"
#, python-format
msgid "empty or missing revlog for %s"
msgstr "revlog vazio ou não encontrado para %s"
-msgid "bundle files"
+msgid "bundling files"
msgstr "empacotando arquivos"
msgid "adding changesets\n"
@@ -12114,8 +12187,8 @@
msgid "checking changesets\n"
msgstr "checando revisões\n"
-msgid "changelog"
-msgstr "changelog"
+msgid "checking"
+msgstr "checando"
#, python-format
msgid "unpacking changeset %s"
@@ -12138,7 +12211,7 @@
msgid "crosschecking files in changesets and manifests\n"
msgstr "checagem cruzada de arquivos em revisões e no manifesto\n"
-msgid "crosscheck"
+msgid "crosschecking"
msgstr "checagem cruzada"
#, python-format
--- a/mercurial/cmdutil.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/cmdutil.py Tue Mar 16 11:37:14 2010 -0300
@@ -289,11 +289,18 @@
'''find renamed files -- yields (before, after, score) tuples'''
copies = {}
ctx = repo['.']
- for r in removed:
+ for i, r in enumerate(removed):
+ repo.ui.progress(_('searching'), i, total=len(removed))
if r not in ctx:
continue
fctx = ctx.filectx(r)
+ # lazily load text
+ @util.cachefunc
+ def data():
+ orig = fctx.data()
+ return orig, mdiff.splitnewlines(orig)
+
def score(text):
if not len(text):
return 0.0
@@ -301,14 +308,13 @@
return 1.0
if threshold == 1.0:
return 0.0
- orig = fctx.data()
+ orig, lines = data()
# bdiff.blocks() returns blocks of matching lines
# count the number of bytes in each
equal = 0
- alines = mdiff.splitnewlines(text)
matches = bdiff.blocks(text, orig)
for x1, x2, y1, y2 in matches:
- for line in alines[x1:x2]:
+ for line in lines[y1:y2]:
equal += len(line)
lengths = len(text) + len(orig)
@@ -319,6 +325,7 @@
myscore = score(repo.wread(a))
if myscore >= bestscore:
copies[a] = (r, myscore)
+ repo.ui.progress(_('searching'), None, total=len(removed))
for dest, v in copies.iteritems():
source, score = v
@@ -356,9 +363,7 @@
removed.append(abs)
elif repo.dirstate[abs] == 'a':
added.append(abs)
- if not dry_run:
- repo.remove(deleted)
- repo.add(unknown)
+ copies = {}
if similarity > 0:
for old, new, score in findrenames(repo, added + unknown,
removed + deleted, similarity):
@@ -366,8 +371,17 @@
repo.ui.status(_('recording removal of %s as rename to %s '
'(%d%% similar)\n') %
(m.rel(old), m.rel(new), score * 100))
- if not dry_run:
+ copies[new] = old
+
+ if not dry_run:
+ wlock = repo.wlock()
+ try:
+ repo.remove(deleted)
+ repo.add(unknown)
+ for new, old in copies.iteritems():
repo.copy(old, new)
+ finally:
+ wlock.release()
def copy(ui, repo, pats, opts, rename=False):
# called with the repo lock held
@@ -646,6 +660,46 @@
if runfn:
return runfn()
+def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
+ opts=None):
+ '''export changesets as hg patches.'''
+
+ total = len(revs)
+ revwidth = max([len(str(rev)) for rev in revs])
+
+ def single(rev, seqno, fp):
+ ctx = repo[rev]
+ node = ctx.node()
+ parents = [p.node() for p in ctx.parents() if p]
+ branch = ctx.branch()
+ if switch_parent:
+ parents.reverse()
+ prev = (parents and parents[0]) or nullid
+
+ if not fp:
+ fp = make_file(repo, template, node, total=total, seqno=seqno,
+ revwidth=revwidth, mode='ab')
+ if fp != sys.stdout and hasattr(fp, 'name'):
+ repo.ui.note("%s\n" % fp.name)
+
+ fp.write("# HG changeset patch\n")
+ fp.write("# User %s\n" % ctx.user())
+ fp.write("# Date %d %d\n" % ctx.date())
+ if branch and (branch != 'default'):
+ fp.write("# Branch %s\n" % branch)
+ fp.write("# Node ID %s\n" % hex(node))
+ fp.write("# Parent %s\n" % hex(prev))
+ if len(parents) > 1:
+ fp.write("# Parent %s\n" % hex(parents[1]))
+ fp.write(ctx.description().rstrip())
+ fp.write("\n\n")
+
+ for chunk in patch.diff(repo, prev, node, opts=opts):
+ fp.write(chunk)
+
+ for seqno, rev in enumerate(revs):
+ single(rev, seqno + 1, fp)
+
class changeset_printer(object):
'''show changeset information when templating not requested.'''
--- a/mercurial/commands.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/commands.py Tue Mar 16 11:37:14 2010 -0300
@@ -12,8 +12,8 @@
import hg, util, revlog, bundlerepo, extensions, copies, error
import patch, help, mdiff, url, encoding, templatekw
import archival, changegroup, cmdutil, sshserver, hbisect
-from hgweb import server
-import merge as merge_
+from hgweb import server, hgweb_mod, hgwebdir_mod
+import merge as mergemod
import minirst
# Commands start here, listed alphabetically
@@ -158,8 +158,10 @@
By default, the revision used is the parent of the working
directory; use -r/--rev to specify a different revision.
- To specify the type of archive to create, use -t/--type. Valid
- types are:
+ The archive type is automatically detected based on file
+ extension (or override using -t/--type).
+
+ Valid types are:
:``files``: a directory full of files (default)
:``tar``: tar archive, uncompressed
@@ -184,16 +186,32 @@
dest = cmdutil.make_filename(repo, dest, node)
if os.path.realpath(dest) == repo.root:
raise util.Abort(_('repository root cannot be destination'))
- matchfn = cmdutil.match(repo, [], opts)
- kind = opts.get('type') or 'files'
+
+ def guess_type():
+ exttypes = {
+ 'tar': ['.tar'],
+ 'tbz2': ['.tbz2', '.tar.bz2'],
+ 'tgz': ['.tgz', '.tar.gz'],
+ 'zip': ['.zip'],
+ }
+
+ for type, extensions in exttypes.items():
+ if util.any(dest.endswith(ext) for ext in extensions):
+ return type
+ return None
+
+ kind = opts.get('type') or guess_type() or 'files'
prefix = opts.get('prefix')
+
if dest == '-':
if kind == 'files':
raise util.Abort(_('cannot archive plain files to stdout'))
dest = sys.stdout
if not prefix:
prefix = os.path.basename(repo.root) + '-%h'
+
prefix = cmdutil.make_filename(repo, prefix, node)
+ matchfn = cmdutil.match(repo, [], opts)
archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
matchfn, prefix)
@@ -451,7 +469,7 @@
if not opts.get('force') and utflabel in repo.branchtags():
if label not in [p.branch() for p in repo.parents()]:
raise util.Abort(_('a branch of the same name already exists'
- ' (use --force to override)'))
+ " (use 'hg update' to switch to it)"))
repo.dirstate.setbranch(utflabel)
ui.status(_('marked working directory as branch %s\n') % label)
else:
@@ -564,6 +582,10 @@
revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
o = repo.findoutgoing(other, force=opts.get('force'))
+ if not o:
+ ui.status(_("no changes found\n"))
+ return
+
if revs:
cg = repo.changegroupsubset(o, revs, 'bundle')
else:
@@ -621,20 +643,6 @@
.hg/hgrc and working directory will be created on the remote side.
Please see 'hg help urls' for important details about ``ssh://`` URLs.
- If the -U/--noupdate option is specified, the new clone will contain
- only a repository (.hg) and no working copy (the working copy parent
- will be the null changeset). Otherwise, clone will initially check
- out (in order of precedence):
-
- a) the changeset, tag or branch specified with -u/--updaterev
- b) the changeset, tag or branch given with the first -r/--rev
- c) the branch given with the first -b/--branch
- d) the branch given with the url#branch source syntax
- e) the head of the default branch
-
- Use 'hg clone -u . src dst' to checkout the source repository's
- parent changeset (applicable for local source repositories only).
-
A set of changesets (tags, or branch names) to pull may be specified
by listing each changeset (tag, or branch name) with -r/--rev.
If -r/--rev is used, the cloned repository will contain only a subset
@@ -649,12 +657,12 @@
For efficiency, hardlinks are used for cloning whenever the source
and destination are on the same filesystem (note this applies only
- to the repository data, not to the checked out files). Some
+ to the repository data, not to the working directory). Some
filesystems, such as AFS, implement hardlinking incorrectly, but
do not report errors. In these cases, use the --pull option to
avoid hardlinking.
- In some cases, you can clone repositories and checked out files
+ In some cases, you can clone repositories and the working directory
using full hardlinks with ::
$ cp -al REPO REPOCLONE
@@ -665,6 +673,20 @@
breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
this is not compatible with certain extensions that place their
metadata under the .hg directory, such as mq.
+
+ Mercurial will update the working directory to the first applicable
+ revision from this list:
+
+ a) null if -U or the source repository has no changesets
+ b) if -u . and the source repository is local, the first parent of
+ the source repository's working directory
+ c) the changeset specified with -u (if a branch name, this means the
+ latest head of that branch)
+ d) the changeset specified with -r
+ e) the tipmost head specified with -b
+ f) the tipmost head specified with the url#branch source syntax
+ g) the tipmost head of the default branch
+ h) tip
"""
if opts.get('noupdate') and opts.get('updaterev'):
raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
@@ -1204,7 +1226,7 @@
ui.note(_('exporting patches:\n'))
else:
ui.note(_('exporting patch:\n'))
- patch.export(repo, revs, template=opts.get('output'),
+ cmdutil.export(repo, revs, template=opts.get('output'),
switch_parent=opts.get('switch_parent'),
opts=patch.diffopts(ui, opts))
@@ -2574,7 +2596,7 @@
raise util.Abort(_('no files or directories specified; '
'use --all to remerge all files'))
- ms = merge_.mergestate(repo)
+ ms = mergemod.mergestate(repo)
m = cmdutil.match(repo, pats, opts)
for f in ms:
@@ -2864,6 +2886,10 @@
By default, the server logs accesses to stdout and errors to
stderr. Use the -A/--accesslog and -E/--errorlog options to log to
files.
+
+ To have the server choose a free port number to listen on, specify
+ a port number of 0; in this case, the server will print the port
+ number it uses.
"""
if opts["stdio"]:
@@ -2873,25 +2899,35 @@
s = sshserver.sshserver(ui, repo)
s.serve_forever()
+ # this way we can check if something was given in the command-line
+ if opts.get('port'):
+ opts['port'] = int(opts.get('port'))
+
baseui = repo and repo.baseui or ui
optlist = ("name templates style address port prefix ipv6"
- " accesslog errorlog webdir_conf certificate encoding")
+ " accesslog errorlog certificate encoding")
for o in optlist.split():
- if opts.get(o, None):
- baseui.setconfig("web", o, str(opts[o]))
- if (repo is not None) and (repo.ui != baseui):
- repo.ui.setconfig("web", o, str(opts[o]))
-
- if repo is None and not ui.config("web", "webdir_conf"):
- raise error.RepoError(_("There is no Mercurial repository here"
- " (.hg not found)"))
+ val = opts.get(o, '')
+ if val in (None, ''): # should check against default options instead
+ continue
+ baseui.setconfig("web", o, val)
+ if repo and repo.ui != baseui:
+ repo.ui.setconfig("web", o, val)
+
+ if opts.get('webdir_conf'):
+ app = hgwebdir_mod.hgwebdir(opts['webdir_conf'], ui)
+ elif repo is not None:
+ app = hgweb_mod.hgweb(hg.repository(repo.ui, repo.root))
+ else:
+ raise error.RepoError(_("There is no Mercurial repository"
+ " here (.hg not found)"))
class service(object):
def init(self):
util.set_signal_handler()
- self.httpd = server.create_server(baseui, repo)
-
- if not ui.verbose:
+ self.httpd = server.create_server(ui, app)
+
+ if opts['port'] and not ui.verbose:
return
if self.httpd.prefix:
@@ -2912,8 +2948,12 @@
fqaddr = self.httpd.fqaddr
if ':' in fqaddr:
fqaddr = '[%s]' % fqaddr
- ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
- (fqaddr, port, prefix, bindaddr, self.httpd.port))
+ if opts['port']:
+ write = ui.status
+ else:
+ write = ui.write
+ write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
+ (fqaddr, port, prefix, bindaddr, self.httpd.port))
def run(self):
self.httpd.serve_forever()
@@ -3043,7 +3083,7 @@
ui.status(m)
st = list(repo.status(unknown=True))[:6]
- ms = merge_.mergestate(repo)
+ ms = mergemod.mergestate(repo)
st.append([f for f in ms if ms[f] == 'u'])
labels = [_('%d modified'), _('%d added'), _('%d removed'),
_('%d deleted'), _('%d unknown'), _('%d ignored'),
@@ -3767,7 +3807,8 @@
('d', 'daemon', None, _('run server in background')),
('', 'daemon-pipefds', '', _('used internally by daemon mode')),
('E', 'errorlog', '', _('name of error log file to write to')),
- ('p', 'port', 0, _('port to listen on (default: 8000)')),
+ # use string type, then we can check if something was passed
+ ('p', 'port', '', _('port to listen on (default: 8000')),
('a', 'address', '',
_('address to listen on (default: all interfaces)')),
('', 'prefix', '',
--- a/mercurial/demandimport.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/demandimport.py Tue Mar 16 11:37:14 2010 -0300
@@ -130,6 +130,10 @@
'resource',
# this trips up many extension authors
'gtk',
+ # setuptools' pkg_resources.py expects "from __main__ import x" to
+ # raise ImportError if x not defined
+ '__main__',
+ '_ssl', # conditional imports in the stdlib, issue1964
]
def enable():
--- a/mercurial/dispatch.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/dispatch.py Tue Mar 16 11:37:14 2010 -0300
@@ -9,7 +9,7 @@
import os, sys, atexit, signal, pdb, socket, errno, shlex, time
import util, commands, hg, fancyopts, extensions, hook, error
import cmdutil, encoding
-import ui as _ui
+import ui as uimod
def run():
"run the command in sys.argv"
@@ -18,7 +18,7 @@
def dispatch(args):
"run the command specified in args"
try:
- u = _ui.ui()
+ u = uimod.ui()
if '--traceback' in args:
u.setconfig('ui', 'traceback', 'on')
except util.Abort, inst:
--- a/mercurial/graphmod.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/graphmod.py Tue Mar 16 11:37:14 2010 -0300
@@ -115,7 +115,7 @@
edges.append((ecol, next.index(eid), colors[eid]))
elif eid == cur:
for p in parents:
- edges.append((ecol, next.index(p), colors[p]))
+ edges.append((ecol, next.index(p), color))
# Yield and move on
yield (cur, type, data, (col, color), edges)
--- a/mercurial/hg.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/hg.py Tue Mar 16 11:37:14 2010 -0300
@@ -10,8 +10,8 @@
from lock import release
import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
import lock, util, extensions, error, encoding, node
-import merge as _merge
-import verify as _verify
+import merge as mergemod
+import verify as verifymod
import errno, os, shutil
def _local(path):
@@ -358,7 +358,7 @@
def update(repo, node):
"""update the working directory to node, merging linear changes"""
- stats = _merge.update(repo, node, False, False, None)
+ stats = mergemod.update(repo, node, False, False, None)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
@@ -369,14 +369,14 @@
def clean(repo, node, show_stats=True):
"""forcibly switch the working directory to node, clobbering changes"""
- stats = _merge.update(repo, node, False, True, None)
+ stats = mergemod.update(repo, node, False, True, None)
if show_stats:
_showstats(repo, stats)
return stats[3] > 0
def merge(repo, node, force=None, remind=True):
"""branch merge with node, resolving changes"""
- stats = _merge.update(repo, node, True, force, False)
+ stats = mergemod.update(repo, node, True, force, False)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
@@ -387,8 +387,8 @@
def revert(repo, node, choose):
"""revert changes to revision in node without updating dirstate"""
- return _merge.update(repo, node, False, True, choose)[3] > 0
+ return mergemod.update(repo, node, False, True, choose)[3] > 0
def verify(repo):
"""verify the consistency of a repository"""
- return _verify.verify(repo)
+ return verifymod.verify(repo)
--- a/mercurial/hgweb/hgwebdir_mod.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/hgweb/hgwebdir_mod.py Tue Mar 16 11:37:14 2010 -0300
@@ -6,7 +6,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import os, re, time
+import os, re, time, urlparse
from mercurial.i18n import _
from mercurial import ui, hg, util, templater
from mercurial import error, encoding
@@ -195,11 +195,8 @@
yield {"type" : i[0], "extension": i[1],
"node": nodeid, "url": url}
- sortdefault = None, False
- def entries(sortcolumn="", descending=False, subdir="", **map):
+ def rawentries(subdir="", **map):
- rows = []
- parity = paritygen(self.stripecount)
descend = self.ui.configbool('web', 'descend', True)
for name, path in self.repos:
@@ -229,9 +226,7 @@
parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
if req.env['SCRIPT_NAME']:
parts.insert(0, req.env['SCRIPT_NAME'])
- m = re.match('((?:https?://)?)(.*)', '/'.join(parts))
- # squish repeated slashes out of the path component
- url = m.group(1) + re.sub('/+', '/', m.group(2)) + '/'
+ url = re.sub(r'/+', '/', '/'.join(parts) + '/')
# update time with local timezone
try:
@@ -253,19 +248,19 @@
lastchange=d,
lastchange_sort=d[1]-d[0],
archives=archivelist(u, "tip", url))
- if (not sortcolumn or (sortcolumn, descending) == sortdefault):
- # fast path for unsorted output
- row['parity'] = parity.next()
- yield row
- else:
- rows.append((row["%s_sort" % sortcolumn], row))
- if rows:
- rows.sort()
- if descending:
- rows.reverse()
- for key, row in rows:
- row['parity'] = parity.next()
- yield row
+ yield row
+
+ sortdefault = None, False
+ def entries(sortcolumn="", descending=False, subdir="", **map):
+ rows = rawentries(subdir=subdir, **map)
+
+ if sortcolumn and sortdefault != (sortcolumn, descending):
+ sortkey = '%s_sort' % sortcolumn
+ rows = sorted(rows, key=lambda x: x[sortkey],
+ reverse=descending)
+ for row, parity in zip(rows, paritygen(self.stripecount)):
+ row['parity'] = parity
+ yield row
self.refresh()
sortable = ["name", "description", "contact", "lastchange"]
@@ -284,8 +279,7 @@
for column in sortable]
self.refresh()
- if self._baseurl is not None:
- req.env['SCRIPT_NAME'] = self._baseurl
+ self.updatereqenv(req.env)
return tmpl("index", entries=entries, subdir=subdir,
sortcolumn=sortcolumn, descending=descending,
@@ -308,8 +302,7 @@
def config(section, name, default=None, untrusted=True):
return self.ui.config(section, name, default, untrusted)
- if self._baseurl is not None:
- req.env['SCRIPT_NAME'] = self._baseurl
+ self.updatereqenv(req.env)
url = req.env.get('SCRIPT_NAME', '')
if not url.endswith('/'):
@@ -339,3 +332,19 @@
"staticurl": staticurl,
"sessionvars": sessionvars})
return tmpl
+
+ def updatereqenv(self, env):
+ def splitnetloc(netloc):
+ if ':' in netloc:
+ return netloc.split(':', 1)
+ else:
+ return (netloc, None)
+
+ if self._baseurl is not None:
+ urlcomp = urlparse.urlparse(self._baseurl)
+ host, port = splitnetloc(urlcomp[1])
+ path = urlcomp[2]
+ env['SERVER_NAME'] = host
+ if port:
+ env['SERVER_PORT'] = port
+ env['SCRIPT_NAME'] = path
--- a/mercurial/hgweb/server.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/hgweb/server.py Tue Mar 16 11:37:14 2010 -0300
@@ -8,8 +8,6 @@
import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
from mercurial import hg, util, error
-from hgweb_mod import hgweb
-from hgwebdir_mod import hgwebdir
from mercurial.i18n import _
def _splitURI(uri):
@@ -195,104 +193,85 @@
self.close_connection = True
pass
-def create_server(ui, repo):
- use_threads = True
-
- def openlog(opt, default):
- if opt and opt != '-':
- return open(opt, 'a')
- return default
-
- if repo is None:
- myui = ui
+try:
+ from threading import activeCount
+ _mixin = SocketServer.ThreadingMixIn
+except ImportError:
+ if hasattr(os, "fork"):
+ _mixin = SocketServer.ForkingMixIn
else:
- myui = repo.ui
- address = myui.config("web", "address", "")
- port = int(myui.config("web", "port", 8000))
- prefix = myui.config("web", "prefix", "")
- if prefix:
- prefix = "/" + prefix.strip("/")
- use_ipv6 = myui.configbool("web", "ipv6")
- webdir_conf = myui.config("web", "webdir_conf")
- ssl_cert = myui.config("web", "certificate")
- accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
- errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
+ class _mixin:
+ pass
+
+def openlog(opt, default):
+ if opt and opt != '-':
+ return open(opt, 'a')
+ return default
- if use_threads:
- try:
- from threading import activeCount
- except ImportError:
- use_threads = False
+class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
- if use_threads:
- _mixin = SocketServer.ThreadingMixIn
- else:
- if hasattr(os, "fork"):
- _mixin = SocketServer.ForkingMixIn
- else:
- class _mixin:
- pass
+ # SO_REUSEADDR has broken semantics on windows
+ if os.name == 'nt':
+ allow_reuse_address = 0
- class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
+ def __init__(self, ui, app, addr, handler, **kwargs):
+ BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs)
+ self.daemon_threads = True
+ self.application = app
- # SO_REUSEADDR has broken semantics on windows
- if os.name == 'nt':
- allow_reuse_address = 0
+ ssl_cert = ui.config('web', 'certificate')
+ if ssl_cert:
+ try:
+ from OpenSSL import SSL
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ except ImportError:
+ raise util.Abort(_("SSL support is unavailable"))
+ ctx.use_privatekey_file(ssl_cert)
+ ctx.use_certificate_file(ssl_cert)
+ sock = socket.socket(self.address_family, self.socket_type)
+ self.socket = SSL.Connection(ctx, sock)
+ self.server_bind()
+ self.server_activate()
- def __init__(self, *args, **kargs):
- BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
- self.accesslog = accesslog
- self.errorlog = errorlog
- self.daemon_threads = True
- def make_handler():
- if webdir_conf:
- hgwebobj = hgwebdir(webdir_conf, ui)
- elif repo is not None:
- hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
- else:
- raise error.RepoError(_("There is no Mercurial repository"
- " here (.hg not found)"))
- return hgwebobj
- self.application = make_handler()
+ prefix = ui.config('web', 'prefix', '')
+ if prefix:
+ prefix = '/' + prefix.strip('/')
+ self.prefix = prefix
- if ssl_cert:
- try:
- from OpenSSL import SSL
- ctx = SSL.Context(SSL.SSLv23_METHOD)
- except ImportError:
- raise util.Abort(_("SSL support is unavailable"))
- ctx.use_privatekey_file(ssl_cert)
- ctx.use_certificate_file(ssl_cert)
- sock = socket.socket(self.address_family, self.socket_type)
- self.socket = SSL.Connection(ctx, sock)
- self.server_bind()
- self.server_activate()
+ alog = openlog(ui.config('web', 'accesslog', '-'), sys.stdout)
+ elog = openlog(ui.config('web', 'errorlog', '-'), sys.stderr)
+ self.accesslog = alog
+ self.errorlog = elog
+
+ self.addr, self.port = self.socket.getsockname()[0:2]
+ self.fqaddr = socket.getfqdn(addr[0])
- self.addr, self.port = self.socket.getsockname()[0:2]
- self.prefix = prefix
- self.fqaddr = socket.getfqdn(address)
-
- class IPv6HTTPServer(MercurialHTTPServer):
- address_family = getattr(socket, 'AF_INET6', None)
+class IPv6HTTPServer(MercurialHTTPServer):
+ address_family = getattr(socket, 'AF_INET6', None)
+ def __init__(self, *args, **kwargs):
+ if self.address_family is None:
+ raise error.RepoError(_('IPv6 is not available on this system'))
+ super(IPv6HTTPServer, self).__init__(*args, **kwargs)
- def __init__(self, *args, **kwargs):
- if self.address_family is None:
- raise error.RepoError(_('IPv6 is not available on this system'))
- super(IPv6HTTPServer, self).__init__(*args, **kwargs)
+def create_server(ui, app):
- if ssl_cert:
+ if ui.config('web', 'certificate'):
handler = _shgwebhandler
else:
handler = _hgwebhandler
+ if ui.configbool('web', 'ipv6'):
+ cls = IPv6HTTPServer
+ else:
+ cls = MercurialHTTPServer
+
# ugly hack due to python issue5853 (for threaded use)
import mimetypes; mimetypes.init()
+ address = ui.config('web', 'address', '')
+ port = int(ui.config('web', 'port', 8000))
try:
- if use_ipv6:
- return IPv6HTTPServer((address, port), handler)
- else:
- return MercurialHTTPServer((address, port), handler)
+ return cls(ui, app, (address, port), handler)
except socket.error, inst:
raise util.Abort(_("cannot start server at '%s:%d': %s")
% (address, port, inst.args[1]))
--- a/mercurial/localrepo.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/localrepo.py Tue Mar 16 11:37:14 2010 -0300
@@ -11,9 +11,9 @@
import changelog, dirstate, filelog, manifest, context
import lock, transaction, store, encoding
import util, extensions, hook, error
-import match as match_
-import merge as merge_
-import tags as tags_
+import match as matchmod
+import merge as mergemod
+import tags as tagsmod
from lock import release
import weakref, stat, errno, os, time, inspect
propertycache = util.propertycache
@@ -207,7 +207,7 @@
if '.hgtags' not in self.dirstate:
self.add(['.hgtags'])
- m = match_.exact(self.root, '', ['.hgtags'])
+ m = matchmod.exact(self.root, '', ['.hgtags'])
tagnode = self.commit(message, user, date, extra=extra, match=m)
for name in names:
@@ -268,8 +268,8 @@
alltags = {} # map tag name to (node, hist)
tagtypes = {}
- tags_.findglobaltags(self.ui, self, alltags, tagtypes)
- tags_.readlocaltags(self.ui, self, alltags, tagtypes)
+ tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
+ tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
# Build the return dicts. Have to re-encode tag names because
# the tags module always uses UTF-8 (in order not to lose info
@@ -503,7 +503,7 @@
for pat, cmd in self.ui.configitems(filter):
if cmd == '!':
continue
- mf = match_.match(self.root, '', [pat])
+ mf = matchmod.match(self.root, '', [pat])
fn = None
params = cmd
for name, filterfn in self._datafilters.iteritems():
@@ -767,7 +767,7 @@
raise util.Abort('%s: %s' % (f, msg))
if not match:
- match = match_.always(self.root, '')
+ match = matchmod.always(self.root, '')
if not force:
vdirs = []
@@ -824,7 +824,7 @@
and self[None].branch() == self['.'].branch()):
return None
- ms = merge_.mergestate(self)
+ ms = mergemod.mergestate(self)
for f in changes[0]:
if f in ms and ms[f] == 'u':
raise util.Abort(_("unresolved merge conflicts "
@@ -996,7 +996,7 @@
working = ctx2.rev() is None
parentworking = working and ctx1 == self['.']
- match = match or match_.always(self.root, self.getcwd())
+ match = match or matchmod.always(self.root, self.getcwd())
listignored, listclean, listunknown = ignored, clean, unknown
# load earliest manifest first for caching reasons
@@ -1826,9 +1826,9 @@
cnt = 0
for chnk in group:
yield chnk
- self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
+ self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
cnt += 1
- self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
# Figure out which manifest nodes (of the ones we think might be
@@ -1854,9 +1854,9 @@
cnt = 0
for chnk in group:
yield chnk
- self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
+ self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
cnt += 1
- self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
# These are no longer needed, dereference and toss the memory for
# them.
@@ -1897,7 +1897,7 @@
lookup_filenode_link_func(fname))
for chnk in group:
self.ui.progress(
- _('bundle files'), cnt, item=fname, unit=_('chunks'))
+ _('bundling files'), cnt, item=fname, unit=_('chunks'))
cnt += 1
yield chnk
if fname in msng_filenode_set:
@@ -1905,7 +1905,7 @@
del msng_filenode_set[fname]
# Signal that no more groups are left.
yield changegroup.closechunk()
- self.ui.progress(_('bundle files'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling files'), None, unit=_('chunks'))
if msng_cl_lst:
self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
@@ -1954,19 +1954,19 @@
cnt = 0
for chnk in cl.group(nodes, identity, collect):
- self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
+ self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
cnt += 1
yield chnk
- self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
mnfst = self.manifest
nodeiter = gennodelst(mnfst)
cnt = 0
for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
- self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
+ self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
cnt += 1
yield chnk
- self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
cnt = 0
for fname in sorted(changedfiles):
@@ -1981,10 +1981,10 @@
lookup = lookuprevlink_func(filerevlog)
for chnk in filerevlog.group(nodeiter, lookup):
self.ui.progress(
- _('bundle files'), cnt, item=fname, unit=_('chunks'))
+ _('bundling files'), cnt, item=fname, unit=_('chunks'))
cnt += 1
yield chnk
- self.ui.progress(_('bundle files'), None, unit=_('chunks'))
+ self.ui.progress(_('bundling files'), None, unit=_('chunks'))
yield changegroup.closechunk()
--- a/mercurial/mdiff.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/mdiff.py Tue Mar 16 11:37:14 2010 -0300
@@ -125,12 +125,12 @@
else:
al = splitnewlines(a)
bl = splitnewlines(b)
- l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts))
+ l = list(_unidiff(a, b, al, bl, opts=opts))
if not l:
return ""
- # difflib uses a space, rather than a tab
- l[0] = "%s%s" % (l[0][:-2], datetag(ad))
- l[1] = "%s%s" % (l[1][:-2], datetag(bd))
+
+ l.insert(0, "--- a/%s%s" % (fn1, datetag(ad)))
+ l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd)))
for ln in xrange(len(l)):
if l[ln][-1] != '\n':
@@ -141,11 +141,10 @@
return "".join(l)
-# somewhat self contained replacement for difflib.unified_diff
+# creates a headerless unified diff
# t1 and t2 are the text to be diffed
# l1 and l2 are the text broken up into lines
-# header1 and header2 are the filenames for the diff output
-def bunidiff(t1, t2, l1, l2, header1, header2, opts=defaultopts):
+def _unidiff(t1, t2, l1, l2, opts=defaultopts):
def contextend(l, len):
ret = l + opts.context
if ret > len:
@@ -158,10 +157,7 @@
return 0
return ret
- def yieldhunk(hunk, header):
- if header:
- for x in header:
- yield x
+ def yieldhunk(hunk):
(astart, a2, bstart, b2, delta) = hunk
aend = contextend(a2, len(l1))
alen = aend - astart
@@ -184,8 +180,6 @@
for x in xrange(a2, aend):
yield ' ' + l1[x]
- header = ["--- %s\t\n" % header1, "+++ %s\t\n" % header2]
-
if opts.showfunc:
funcre = re.compile('\w')
@@ -236,11 +230,8 @@
astart = hunk[1]
bstart = hunk[3]
else:
- for x in yieldhunk(hunk, header):
+ for x in yieldhunk(hunk):
yield x
- # we only want to yield the header if the files differ, and
- # we only want to yield it once.
- header = None
if prev:
# we've joined the previous hunk, record the new ending points.
hunk[1] = a2
@@ -255,7 +246,7 @@
delta[len(delta):] = ['+' + x for x in new]
if hunk:
- for x in yieldhunk(hunk, header):
+ for x in yieldhunk(hunk):
yield x
def patchtext(bin):
--- a/mercurial/patch.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/patch.py Tue Mar 16 11:37:14 2010 -0300
@@ -1175,20 +1175,6 @@
return -1
return err
-def diffopts(ui, opts=None, untrusted=False):
- def get(key, name=None, getter=ui.configbool):
- return ((opts and opts.get(key)) or
- getter('diff', name or key, None, untrusted=untrusted))
- return mdiff.diffopts(
- text=opts and opts.get('text'),
- git=get('git'),
- nodates=get('nodates'),
- showfunc=get('show_function', 'showfunc'),
- ignorews=get('ignore_all_space', 'ignorews'),
- ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
- ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
- context=get('unified', getter=ui.config))
-
def updatedir(ui, repo, patches, similarity=0):
'''Update dirstate after patch application according to metadata'''
if not patches:
@@ -1376,6 +1362,20 @@
class GitDiffRequired(Exception):
pass
+def diffopts(ui, opts=None, untrusted=False):
+ def get(key, name=None, getter=ui.configbool):
+ return ((opts and opts.get(key)) or
+ getter('diff', name or key, None, untrusted=untrusted))
+ return mdiff.diffopts(
+ text=opts and opts.get('text'),
+ git=get('git'),
+ nodates=get('nodates'),
+ showfunc=get('show_function', 'showfunc'),
+ ignorews=get('ignore_all_space', 'ignorews'),
+ ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
+ ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
+ context=get('unified', getter=ui.config))
+
def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
losedatafn=None):
'''yields diff of changes to files between two nodes, or node and
@@ -1551,47 +1551,6 @@
if text:
yield text
-def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
- opts=None):
- '''export changesets as hg patches.'''
-
- total = len(revs)
- revwidth = max([len(str(rev)) for rev in revs])
-
- def single(rev, seqno, fp):
- ctx = repo[rev]
- node = ctx.node()
- parents = [p.node() for p in ctx.parents() if p]
- branch = ctx.branch()
- if switch_parent:
- parents.reverse()
- prev = (parents and parents[0]) or nullid
-
- if not fp:
- fp = cmdutil.make_file(repo, template, node, total=total,
- seqno=seqno, revwidth=revwidth,
- mode='ab')
- if fp != sys.stdout and hasattr(fp, 'name'):
- repo.ui.note("%s\n" % fp.name)
-
- fp.write("# HG changeset patch\n")
- fp.write("# User %s\n" % ctx.user())
- fp.write("# Date %d %d\n" % ctx.date())
- if branch and (branch != 'default'):
- fp.write("# Branch %s\n" % branch)
- fp.write("# Node ID %s\n" % hex(node))
- fp.write("# Parent %s\n" % hex(prev))
- if len(parents) > 1:
- fp.write("# Parent %s\n" % hex(parents[1]))
- fp.write(ctx.description().rstrip())
- fp.write("\n\n")
-
- for chunk in diff(repo, prev, node, opts=opts):
- fp.write(chunk)
-
- for seqno, rev in enumerate(revs):
- single(rev, seqno + 1, fp)
-
def diffstatdata(lines):
filename, adds, removes = None, 0, 0
for line in lines:
--- a/mercurial/pure/osutil.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/pure/osutil.py Tue Mar 16 11:37:14 2010 -0300
@@ -6,25 +6,25 @@
# GNU General Public License version 2 or any later version.
import os
-import stat as _stat
+import stat as statmod
posixfile = open
def _mode_to_kind(mode):
- if _stat.S_ISREG(mode):
- return _stat.S_IFREG
- if _stat.S_ISDIR(mode):
- return _stat.S_IFDIR
- if _stat.S_ISLNK(mode):
- return _stat.S_IFLNK
- if _stat.S_ISBLK(mode):
- return _stat.S_IFBLK
- if _stat.S_ISCHR(mode):
- return _stat.S_IFCHR
- if _stat.S_ISFIFO(mode):
- return _stat.S_IFIFO
- if _stat.S_ISSOCK(mode):
- return _stat.S_IFSOCK
+ if statmod.S_ISREG(mode):
+ return statmod.S_IFREG
+ if statmod.S_ISDIR(mode):
+ return statmod.S_IFDIR
+ if statmod.S_ISLNK(mode):
+ return statmod.S_IFLNK
+ if statmod.S_ISBLK(mode):
+ return statmod.S_IFBLK
+ if statmod.S_ISCHR(mode):
+ return statmod.S_IFCHR
+ if statmod.S_ISFIFO(mode):
+ return statmod.S_IFIFO
+ if statmod.S_ISSOCK(mode):
+ return statmod.S_IFSOCK
return mode
def listdir(path, stat=False, skip=None):
@@ -49,7 +49,7 @@
names.sort()
for fn in names:
st = os.lstat(prefix + fn)
- if fn == skip and _stat.S_ISDIR(st.st_mode):
+ if fn == skip and statmod.S_ISDIR(st.st_mode):
return []
if stat:
result.append((fn, _mode_to_kind(st.st_mode), st))
--- a/mercurial/subrepo.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/subrepo.py Tue Mar 16 11:37:14 2010 -0300
@@ -131,7 +131,7 @@
source = repo._subsource
if source.startswith('/') or '://' in source:
return source
- parent = _abssource(repo._subparent)
+ parent = _abssource(repo._subparent, push)
if '://' in parent:
if parent[-1] == '/':
parent = parent[:-1]
@@ -177,24 +177,35 @@
self._state = state
r = ctx._repo
root = r.wjoin(path)
- if os.path.exists(os.path.join(root, '.hg')):
- self._repo = hg.repository(r.ui, root)
- else:
+ create = False
+ if not os.path.exists(os.path.join(root, '.hg')):
+ create = True
util.makedirs(root)
- self._repo = hg.repository(r.ui, root, create=True)
- f = file(os.path.join(root, '.hg', 'hgrc'), 'w')
- f.write('[paths]\ndefault = %s\n' % os.path.join(
- _abssource(ctx._repo), path))
- f.close()
+ self._repo = hg.repository(r.ui, root, create=create)
self._repo._subparent = r
self._repo._subsource = state[0]
+ if create:
+ fp = self._repo.opener("hgrc", "w", text=True)
+ fp.write('[paths]\n')
+
+ def addpathconfig(key, value):
+ fp.write('%s = %s\n' % (key, value))
+ self._repo.ui.setconfig('paths', key, value)
+
+ defpath = _abssource(self._repo)
+ defpushpath = _abssource(self._repo, True)
+ addpathconfig('default', defpath)
+ if defpath != defpushpath:
+ addpathconfig('default-push', defpushpath)
+ fp.close()
+
def dirty(self):
r = self._state[1]
if r == '':
return True
w = self._repo[None]
- if w.p1() != self._repo[r]: # version checked out changed
+ if w.p1() != self._repo[r]: # version checked out change
return True
return w.dirty() # working directory changed
@@ -217,8 +228,9 @@
self._repo.lookup(revision)
except error.RepoError:
self._repo._subsource = source
- self._repo.ui.status(_('pulling subrepo %s\n') % self._path)
srcurl = _abssource(self._repo)
+ self._repo.ui.status(_('pulling subrepo %s from %s\n')
+ % (self._path, srcurl))
other = hg.repository(self._repo.ui, srcurl)
self._repo.pull(other)
--- a/mercurial/templatefilters.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/templatefilters.py Tue Mar 16 11:37:14 2010 -0300
@@ -14,15 +14,13 @@
return "".join([stringify(t) for t in thing if t is not None])
return str(thing)
-agescales = [("second", 1),
- ("minute", 60),
- ("hour", 3600),
- ("day", 3600 * 24),
+agescales = [("year", 3600 * 24 * 365),
+ ("month", 3600 * 24 * 30),
("week", 3600 * 24 * 7),
- ("month", 3600 * 24 * 30),
- ("year", 3600 * 24 * 365)]
-
-agescales.reverse()
+ ("day", 3600 * 24),
+ ("hour", 3600),
+ ("minute", 60),
+ ("second", 1),]
def age(date):
'''turn a (timestamp, tzoff) tuple into an age string.'''
--- a/mercurial/verify.py Sat Mar 06 10:02:45 2010 +0100
+++ b/mercurial/verify.py Tue Mar 16 11:37:14 2010 -0300
@@ -122,7 +122,7 @@
checklog(cl, "changelog", 0)
total = len(repo)
for i in repo:
- ui.progress(_('changelog'), i, total=total)
+ ui.progress(_('checking'), i, total=total)
n = cl.node(i)
checkentry(cl, i, n, seen, [i], "changelog")
@@ -133,14 +133,14 @@
filelinkrevs.setdefault(f, []).append(i)
except Exception, inst:
exc(i, _("unpacking changeset %s") % short(n), inst)
- ui.progress(_('changelog'), None)
+ ui.progress(_('checking'), None)
ui.status(_("checking manifests\n"))
seen = {}
checklog(mf, "manifest", 0)
total = len(mf)
for i in mf:
- ui.progress(_('manifests'), i, total=total)
+ ui.progress(_('checking'), i, total=total)
n = mf.node(i)
lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
if n in mflinkrevs:
@@ -156,7 +156,7 @@
filenodes.setdefault(f, {}).setdefault(fn, lr)
except Exception, inst:
exc(lr, _("reading manifest delta %s") % short(n), inst)
- ui.progress(_('manifests'), None)
+ ui.progress(_('checking'), None)
ui.status(_("crosschecking files in changesets and manifests\n"))
@@ -166,13 +166,13 @@
for c, m in sorted([(c, m) for m in mflinkrevs
for c in mflinkrevs[m]]):
count += 1
- ui.progress(_('crosscheck'), count, total=total)
+ ui.progress(_('crosschecking'), count, total=total)
err(c, _("changeset refers to unknown manifest %s") % short(m))
mflinkrevs = None # del is bad here due to scope issues
for f in sorted(filelinkrevs):
count += 1
- ui.progress(_('crosscheck'), count, total=total)
+ ui.progress(_('crosschecking'), count, total=total)
if f not in filenodes:
lr = filelinkrevs[f][0]
err(lr, _("in changeset but not in manifest"), f)
@@ -180,7 +180,7 @@
if havecl:
for f in sorted(filenodes):
count += 1
- ui.progress(_('crosscheck'), count, total=total)
+ ui.progress(_('crosschecking'), count, total=total)
if f not in filelinkrevs:
try:
fl = repo.file(f)
@@ -189,7 +189,7 @@
lr = None
err(lr, _("in manifest but not in changeset"), f)
- ui.progress(_('crosscheck'), None)
+ ui.progress(_('crosschecking'), None)
ui.status(_("checking files\n"))
@@ -203,7 +203,7 @@
files = sorted(set(filenodes) | set(filelinkrevs))
total = len(files)
for i, f in enumerate(files):
- ui.progress(_('files'), i, item=f, total=total)
+ ui.progress(_('checking'), i, item=f, total=total)
try:
linkrevs = filelinkrevs[f]
except KeyError:
@@ -281,7 +281,7 @@
fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
for lr, node in sorted(fns):
err(lr, _("%s in manifests not found") % short(node), f)
- ui.progress(_('files'), None)
+ ui.progress(_('checking'), None)
for f in storefiles:
warn(_("warning: orphan revlog '%s'") % f)
--- a/tests/coverage.py Sat Mar 06 10:02:45 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1166 +0,0 @@
-#!/usr/bin/env python
-#
-# Perforce Defect Tracking Integration Project
-# <http://www.ravenbrook.com/project/p4dti/>
-#
-# COVERAGE.PY -- COVERAGE TESTING
-#
-# Gareth Rees, Ravenbrook Limited, 2001-12-04
-# Ned Batchelder, 2004-12-12
-# http://nedbatchelder.com/code/modules/coverage.html
-#
-#
-# 1. INTRODUCTION
-#
-# This module provides coverage testing for Python code.
-#
-# The intended readership is all Python developers.
-#
-# This document is not confidential.
-#
-# See [GDR 2001-12-04a] for the command-line interface, programmatic
-# interface and limitations. See [GDR 2001-12-04b] for requirements and
-# design.
-
-r"""Usage:
-
-coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
- Execute module, passing the given command-line arguments, collecting
- coverage data. With the -p option, write to a temporary file containing
- the machine name and process ID.
-
-coverage.py -e
- Erase collected coverage data.
-
-coverage.py -c
- Collect data from multiple coverage files (as created by -p option above)
- and store it into a single file representing the union of the coverage.
-
-coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
- Report on the statement coverage for the given files. With the -m
- option, show line numbers of the statements that weren't executed.
-
-coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
- Make annotated copies of the given files, marking statements that
- are executed with > and statements that are missed with !. With
- the -d option, make the copies in that directory. Without the -d
- option, make each copy in the same directory as the original.
-
--o dir,dir2,...
- Omit reporting or annotating files when their filename path starts with
- a directory listed in the omit list.
- e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
-
-Coverage data is saved in the file .coverage by default. Set the
-COVERAGE_FILE environment variable to save it somewhere else."""
-
-__version__ = "2.85.20080914" # see detailed history at the end of this file.
-
-import compiler
-import compiler.visitor
-import glob
-import os
-import re
-import string
-import symbol
-import sys
-import threading
-import token
-import types
-import zipimport
-from socket import gethostname
-
-# Python version compatibility
-try:
- strclass = basestring # new to 2.3
-except:
- strclass = str
-
-# 2. IMPLEMENTATION
-#
-# This uses the "singleton" pattern.
-#
-# The word "morf" means a module object (from which the source file can
-# be deduced by suitable manipulation of the __file__ attribute) or a
-# filename.
-#
-# When we generate a coverage report we have to canonicalize every
-# filename in the coverage dictionary just in case it refers to the
-# module we are reporting on. It seems a shame to throw away this
-# information so the data in the coverage dictionary is transferred to
-# the 'cexecuted' dictionary under the canonical filenames.
-#
-# The coverage dictionary is called "c" and the trace function "t". The
-# reason for these short names is that Python looks up variables by name
-# at runtime and so execution time depends on the length of variables!
-# In the bottleneck of this application it's appropriate to abbreviate
-# names to increase speed.
-
-class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
- """ A visitor for a parsed Abstract Syntax Tree which finds executable
- statements.
- """
- def __init__(self, statements, excluded, suite_spots):
- compiler.visitor.ASTVisitor.__init__(self)
- self.statements = statements
- self.excluded = excluded
- self.suite_spots = suite_spots
- self.excluding_suite = 0
-
- def doRecursive(self, node):
- for n in node.getChildNodes():
- self.dispatch(n)
-
- visitStmt = visitModule = doRecursive
-
- def doCode(self, node):
- if hasattr(node, 'decorators') and node.decorators:
- self.dispatch(node.decorators)
- self.recordAndDispatch(node.code)
- else:
- self.doSuite(node, node.code)
-
- visitFunction = visitClass = doCode
-
- def getFirstLine(self, node):
- # Find the first line in the tree node.
- lineno = node.lineno
- for n in node.getChildNodes():
- f = self.getFirstLine(n)
- if lineno and f:
- lineno = min(lineno, f)
- else:
- lineno = lineno or f
- return lineno
-
- def getLastLine(self, node):
- # Find the first line in the tree node.
- lineno = node.lineno
- for n in node.getChildNodes():
- lineno = max(lineno, self.getLastLine(n))
- return lineno
-
- def doStatement(self, node):
- self.recordLine(self.getFirstLine(node))
-
- visitAssert = visitAssign = visitAssTuple = visitPrint = \
- visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
- doStatement
-
- def visitPass(self, node):
- # Pass statements have weird interactions with docstrings. If
- # this pass statement is part of one of those pairs, claim
- # that the statement is on the later of the two lines.
- l = node.lineno
- if l:
- lines = self.suite_spots.get(l, [l, l])
- self.statements[lines[1]] = 1
-
- def visitDiscard(self, node):
- # Discard nodes are statements that execute an expression, but then
- # discard the results. This includes function calls, so we can't
- # ignore them all. But if the expression is a constant, the statement
- # won't be "executed", so don't count it now.
- if node.expr.__class__.__name__ != 'Const':
- self.doStatement(node)
-
- def recordNodeLine(self, node):
- # Stmt nodes often have None, but shouldn't claim the first line of
- # their children (because the first child might be an ignorable line
- # like "global a").
- if node.__class__.__name__ != 'Stmt':
- return self.recordLine(self.getFirstLine(node))
- else:
- return 0
-
- def recordLine(self, lineno):
- # Returns a bool, whether the line is included or excluded.
- if lineno:
- # Multi-line tests introducing suites have to get charged to their
- # keyword.
- if lineno in self.suite_spots:
- lineno = self.suite_spots[lineno][0]
- # If we're inside an excluded suite, record that this line was
- # excluded.
- if self.excluding_suite:
- self.excluded[lineno] = 1
- return 0
- # If this line is excluded, or suite_spots maps this line to
- # another line that is exlcuded, then we're excluded.
- elif self.excluded.has_key(lineno) or \
- self.suite_spots.has_key(lineno) and \
- self.excluded.has_key(self.suite_spots[lineno][1]):
- return 0
- # Otherwise, this is an executable line.
- else:
- self.statements[lineno] = 1
- return 1
- return 0
-
- default = recordNodeLine
-
- def recordAndDispatch(self, node):
- self.recordNodeLine(node)
- self.dispatch(node)
-
- def doSuite(self, intro, body, exclude=0):
- exsuite = self.excluding_suite
- if exclude or (intro and not self.recordNodeLine(intro)):
- self.excluding_suite = 1
- self.recordAndDispatch(body)
- self.excluding_suite = exsuite
-
- def doPlainWordSuite(self, prevsuite, suite):
- # Finding the exclude lines for else's is tricky, because they aren't
- # present in the compiler parse tree. Look at the previous suite,
- # and find its last line. If any line between there and the else's
- # first line are excluded, then we exclude the else.
- lastprev = self.getLastLine(prevsuite)
- firstelse = self.getFirstLine(suite)
- for l in range(lastprev + 1, firstelse):
- if self.suite_spots.has_key(l):
- self.doSuite(None, suite, exclude=self.excluded.has_key(l))
- break
- else:
- self.doSuite(None, suite)
-
- def doElse(self, prevsuite, node):
- if node.else_:
- self.doPlainWordSuite(prevsuite, node.else_)
-
- def visitFor(self, node):
- self.doSuite(node, node.body)
- self.doElse(node.body, node)
-
- visitWhile = visitFor
-
- def visitIf(self, node):
- # The first test has to be handled separately from the rest.
- # The first test is credited to the line with the "if", but the others
- # are credited to the line with the test for the elif.
- self.doSuite(node, node.tests[0][1])
- for t, n in node.tests[1:]:
- self.doSuite(t, n)
- self.doElse(node.tests[-1][1], node)
-
- def visitTryExcept(self, node):
- self.doSuite(node, node.body)
- for i in range(len(node.handlers)):
- a, b, h = node.handlers[i]
- if not a:
- # It's a plain "except:". Find the previous suite.
- if i > 0:
- prev = node.handlers[i - 1][2]
- else:
- prev = node.body
- self.doPlainWordSuite(prev, h)
- else:
- self.doSuite(a, h)
- self.doElse(node.handlers[-1][2], node)
-
- def visitTryFinally(self, node):
- self.doSuite(node, node.body)
- self.doPlainWordSuite(node.body, node.final)
-
- def visitWith(self, node):
- self.doSuite(node, node.body)
-
- def visitGlobal(self, node):
- # "global" statements don't execute like others (they don't call the
- # trace function), so don't record their line numbers.
- pass
-
-the_coverage = None
-
-class CoverageException(Exception):
- pass
-
-class coverage:
- # Name of the cache file (unless environment variable is set).
- cache_default = ".coverage"
-
- # Environment variable naming the cache file.
- cache_env = "COVERAGE_FILE"
-
- # A dictionary with an entry for (Python source file name, line number
- # in that file) if that line has been executed.
- c = {}
-
- # A map from canonical Python source file name to a dictionary in
- # which there's an entry for each line number that has been
- # executed.
- cexecuted = {}
-
- # Cache of results of calling the analysis2() method, so that you can
- # specify both -r and -a without doing double work.
- analysis_cache = {}
-
- # Cache of results of calling the canonical_filename() method, to
- # avoid duplicating work.
- canonical_filename_cache = {}
-
- def __init__(self):
- global the_coverage
- if the_coverage:
- raise CoverageException("Only one coverage object allowed.")
- self.usecache = 1
- self.cache = None
- self.parallel_mode = False
- self.exclude_re = ''
- self.nesting = 0
- self.cstack = []
- self.xstack = []
- self.relative_dir = self.abs_file(os.curdir)+os.sep
- self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
-
- # t(f, x, y). This method is passed to sys.settrace as a trace function.
- # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
- # the arguments and return value of the trace function.
- # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
- # objects.
- def t(self, f, w, unused): #pragma: no cover
- if w == 'line':
- self.c[(f.f_code.co_filename, f.f_lineno)] = 1
- #-for c in self.cstack:
- #- c[(f.f_code.co_filename, f.f_lineno)] = 1
- return self.t
-
- def help(self, error=None): #pragma: no cover
- if error:
- print error
- print
- print __doc__
- sys.exit(1)
-
- def command_line(self, argv, help_fn=None):
- import getopt
- help_fn = help_fn or self.help
- settings = {}
- optmap = {
- '-a': 'annotate',
- '-c': 'collect',
- '-d:': 'directory=',
- '-e': 'erase',
- '-h': 'help',
- '-i': 'ignore-errors',
- '-m': 'show-missing',
- '-p': 'parallel-mode',
- '-r': 'report',
- '-x': 'execute',
- '-o:': 'omit=',
- }
- short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
- long_opts = optmap.values()
- options, args = getopt.getopt(argv, short_opts, long_opts)
- for o, a in options:
- if optmap.has_key(o):
- settings[optmap[o]] = 1
- elif optmap.has_key(o + ':'):
- settings[optmap[o + ':']] = a
- elif o[2:] in long_opts:
- settings[o[2:]] = 1
- elif o[2:] + '=' in long_opts:
- settings[o[2:]+'='] = a
- else: #pragma: no cover
- # Can't get here, because getopt won't return anything unknown.
- pass
-
- if settings.get('help'):
- help_fn()
-
- for i in ['erase', 'execute']:
- for j in ['annotate', 'report', 'collect']:
- if settings.get(i) and settings.get(j):
- help_fn("You can't specify the '%s' and '%s' "
- "options at the same time." % (i, j))
-
- args_needed = (settings.get('execute')
- or settings.get('annotate')
- or settings.get('report'))
- action = (settings.get('erase')
- or settings.get('collect')
- or args_needed)
- if not action:
- help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
- if not args_needed and args:
- help_fn("Unexpected arguments: %s" % " ".join(args))
-
- self.parallel_mode = settings.get('parallel-mode')
- self.get_ready()
-
- if settings.get('erase'):
- self.erase()
- if settings.get('execute'):
- if not args:
- help_fn("Nothing to do.")
- sys.argv = args
- self.start()
- import __main__
- sys.path[0] = os.path.dirname(sys.argv[0])
- execfile(sys.argv[0], __main__.__dict__)
- if settings.get('collect'):
- self.collect()
- if not args:
- args = self.cexecuted.keys()
-
- ignore_errors = settings.get('ignore-errors')
- show_missing = settings.get('show-missing')
- directory = settings.get('directory=')
-
- omit = settings.get('omit=')
- if omit is not None:
- omit = [self.abs_file(p) for p in omit.split(',')]
- else:
- omit = []
-
- if settings.get('report'):
- self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
- if settings.get('annotate'):
- self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
-
- def use_cache(self, usecache, cache_file=None):
- self.usecache = usecache
- if cache_file and not self.cache:
- self.cache_default = cache_file
-
- def get_ready(self, parallel_mode=False):
- if self.usecache and not self.cache:
- self.cache = os.environ.get(self.cache_env, self.cache_default)
- if self.parallel_mode:
- self.cache += "." + gethostname() + "." + str(os.getpid())
- self.restore()
- self.analysis_cache = {}
-
- def start(self, parallel_mode=False):
- self.get_ready()
- if self.nesting == 0: #pragma: no cover
- sys.settrace(self.t)
- if hasattr(threading, 'settrace'):
- threading.settrace(self.t)
- self.nesting += 1
-
- def stop(self):
- self.nesting -= 1
- if self.nesting == 0: #pragma: no cover
- sys.settrace(None)
- if hasattr(threading, 'settrace'):
- threading.settrace(None)
-
- def erase(self):
- self.get_ready()
- self.c = {}
- self.analysis_cache = {}
- self.cexecuted = {}
- if self.cache and os.path.exists(self.cache):
- os.remove(self.cache)
-
- def exclude(self, re):
- if self.exclude_re:
- self.exclude_re += "|"
- self.exclude_re += "(" + re + ")"
-
- def begin_recursive(self):
- self.cstack.append(self.c)
- self.xstack.append(self.exclude_re)
-
- def end_recursive(self):
- self.c = self.cstack.pop()
- self.exclude_re = self.xstack.pop()
-
- # save(). Save coverage data to the coverage cache.
-
- def save(self):
- if self.usecache and self.cache:
- self.canonicalize_filenames()
- cache = open(self.cache, 'wb')
- import marshal
- marshal.dump(self.cexecuted, cache)
- cache.close()
-
- # restore(). Restore coverage data from the coverage cache (if it exists).
-
- def restore(self):
- self.c = {}
- self.cexecuted = {}
- assert self.usecache
- if os.path.exists(self.cache):
- self.cexecuted = self.restore_file(self.cache)
-
- def restore_file(self, file_name):
- try:
- cache = open(file_name, 'rb')
- import marshal
- cexecuted = marshal.load(cache)
- cache.close()
- if isinstance(cexecuted, types.DictType):
- return cexecuted
- else:
- return {}
- except:
- return {}
-
- # collect(). Collect data in multiple files produced by parallel mode
-
- def collect(self):
- cache_dir, local = os.path.split(self.cache)
- for f in os.listdir(cache_dir or '.'):
- if not f.startswith(local):
- continue
-
- full_path = os.path.join(cache_dir, f)
- cexecuted = self.restore_file(full_path)
- self.merge_data(cexecuted)
-
- def merge_data(self, new_data):
- for file_name, file_data in new_data.items():
- if self.cexecuted.has_key(file_name):
- self.merge_file_data(self.cexecuted[file_name], file_data)
- else:
- self.cexecuted[file_name] = file_data
-
- def merge_file_data(self, cache_data, new_data):
- for line_number in new_data.keys():
- if not cache_data.has_key(line_number):
- cache_data[line_number] = new_data[line_number]
-
- def abs_file(self, filename):
- """ Helper function to turn a filename into an absolute normalized
- filename.
- """
- return os.path.normcase(os.path.abspath(os.path.realpath(filename)))
-
- def get_zip_data(self, filename):
- """ Get data from `filename` if it is a zip file path, or return None
- if it is not.
- """
- markers = ['.zip'+os.sep, '.egg'+os.sep]
- for marker in markers:
- if marker in filename:
- parts = filename.split(marker)
- try:
- zi = zipimport.zipimporter(parts[0]+marker[:-1])
- except zipimport.ZipImportError:
- continue
- try:
- data = zi.get_data(parts[1])
- except IOError:
- continue
- return data
- return None
-
- # canonical_filename(filename). Return a canonical filename for the
- # file (that is, an absolute path with no redundant components and
- # normalized case). See [GDR 2001-12-04b, 3.3].
-
- def canonical_filename(self, filename):
- if not self.canonical_filename_cache.has_key(filename):
- f = filename
- if os.path.isabs(f) and not os.path.exists(f):
- if not self.get_zip_data(f):
- f = os.path.basename(f)
- if not os.path.isabs(f):
- for path in [os.curdir] + sys.path:
- g = os.path.join(path, f)
- if os.path.exists(g):
- f = g
- break
- cf = self.abs_file(f)
- self.canonical_filename_cache[filename] = cf
- return self.canonical_filename_cache[filename]
-
- # canonicalize_filenames(). Copy results from "c" to "cexecuted",
- # canonicalizing filenames on the way. Clear the "c" map.
-
- def canonicalize_filenames(self):
- for filename, lineno in self.c.keys():
- if filename == '<string>':
- # Can't do anything useful with exec'd strings, so skip them.
- continue
- f = self.canonical_filename(filename)
- if not self.cexecuted.has_key(f):
- self.cexecuted[f] = {}
- self.cexecuted[f][lineno] = 1
- self.c = {}
-
- # morf_filename(morf). Return the filename for a module or file.
-
- def morf_filename(self, morf):
- if hasattr(morf, '__file__'):
- f = morf.__file__
- else:
- f = morf
- return self.canonical_filename(f)
-
- # analyze_morf(morf). Analyze the module or filename passed as
- # the argument. If the source code can't be found, raise an error.
- # Otherwise, return a tuple of (1) the canonical filename of the
- # source code for the module, (2) a list of lines of statements
- # in the source code, (3) a list of lines of excluded statements,
- # and (4), a map of line numbers to multi-line line number ranges, for
- # statements that cross lines.
- def analyze_morf(self, morf):
- if self.analysis_cache.has_key(morf):
- return self.analysis_cache[morf]
- filename = self.morf_filename(morf)
- ext = os.path.splitext(filename)[1]
- source, sourcef = None, None
- if ext == '.pyc':
- if not os.path.exists(filename[:-1]):
- source = self.get_zip_data(filename[:-1])
- if not source:
- raise CoverageException(
- "No source for compiled code '%s'." % filename
- )
- filename = filename[:-1]
- if not source:
- sourcef = open(filename, 'rU')
- source = sourcef.read()
- try:
- lines, excluded_lines, line_map = self.find_executable_statements(
- source, exclude=self.exclude_re
- )
- except SyntaxError, synerr:
- raise CoverageException(
- "Couldn't parse '%s' as Python source: '%s' at line %d" %
- (filename, synerr.msg, synerr.lineno)
- )
- if sourcef:
- sourcef.close()
- result = filename, lines, excluded_lines, line_map
- self.analysis_cache[morf] = result
- return result
-
- def first_line_of_tree(self, tree):
- while True:
- if len(tree) == 3 and type(tree[2]) == type(1):
- return tree[2]
- tree = tree[1]
-
- def last_line_of_tree(self, tree):
- while True:
- if len(tree) == 3 and type(tree[2]) == type(1):
- return tree[2]
- tree = tree[-1]
-
- def find_docstring_pass_pair(self, tree, spots):
- for i in range(1, len(tree)):
- if (self.is_string_constant(tree[i]) and
- self.is_pass_stmt(tree[i + 1])):
- first_line = self.first_line_of_tree(tree[i])
- last_line = self.last_line_of_tree(tree[i + 1])
- self.record_multiline(spots, first_line, last_line)
-
- def is_string_constant(self, tree):
- try:
- return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
- except:
- return False
-
- def is_pass_stmt(self, tree):
- try:
- return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
- except:
- return False
-
- def record_multiline(self, spots, i, j):
- for l in range(i, j + 1):
- spots[l] = (i, j)
-
- def get_suite_spots(self, tree, spots):
- """ Analyze a parse tree to find suite introducers which span a number
- of lines.
- """
- for i in range(1, len(tree)):
- if type(tree[i]) == type(()):
- if tree[i][0] == symbol.suite:
- # Found a suite, look back for the colon and keyword.
- lineno_colon = lineno_word = None
- for j in range(i - 1, 0, -1):
- if tree[j][0] == token.COLON:
- # Colons are never executed themselves: we want the
- # line number of the last token before the colon.
- lineno_colon = self.last_line_of_tree(tree[j - 1])
- elif tree[j][0] == token.NAME:
- if tree[j][1] == 'elif':
- # Find the line number of the first
- # non-terminal after the keyword.
- t = tree[j + 1]
- while t and token.ISNONTERMINAL(t[0]):
- t = t[1]
- if t:
- lineno_word = t[2]
- else:
- lineno_word = tree[j][2]
- break
- elif tree[j][0] == symbol.except_clause:
- # "except" clauses look like:
- # ('except_clause', ('NAME', 'except', lineno),...)
- if tree[j][1][0] == token.NAME:
- lineno_word = tree[j][1][2]
- break
- if lineno_colon and lineno_word:
- # Found colon and keyword, mark all the lines
- # between the two with the two line numbers.
- self.record_multiline(spots, lineno_word, lineno_colon)
-
- # "pass" statements are tricky: different versions
- # of Python treat them differently, especially in
- # the common case of a function with a doc string
- # and a single pass statement.
- self.find_docstring_pass_pair(tree[i], spots)
- elif tree[i][0] == symbol.simple_stmt:
- first_line = self.first_line_of_tree(tree[i])
- last_line = self.last_line_of_tree(tree[i])
- if first_line != last_line:
- self.record_multiline(spots, first_line, last_line)
- self.get_suite_spots(tree[i], spots)
-
- def find_executable_statements(self, text, exclude=None):
- # Find lines which match an exclusion pattern.
- excluded = {}
- suite_spots = {}
- if exclude:
- reExclude = re.compile(exclude)
- lines = text.split('\n')
- for i in range(len(lines)):
- if reExclude.search(lines[i]):
- excluded[i + 1] = 1
-
- # Parse the code and analyze the parse tree to find out which statements
- # are multiline, and where suites begin and end.
- import parser
- tree = parser.suite(text+'\n\n').totuple(1)
- self.get_suite_spots(tree, suite_spots)
- #print "Suite spots:", suite_spots
-
- # Use the compiler module to parse the text and find the executable
- # statements. We add newlines to be impervious to final partial lines.
- statements = {}
- ast = compiler.parse(text+'\n\n')
- visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
- compiler.walk(ast, visitor, walker=visitor)
-
- lines = statements.keys()
- lines.sort()
- excluded_lines = excluded.keys()
- excluded_lines.sort()
- return lines, excluded_lines, suite_spots
-
- # format_lines(statements, lines). Format a list of line numbers
- # for printing by coalescing groups of lines as long as the lines
- # represent consecutive statements. This will coalesce even if
- # there are gaps between statements, so if statements =
- # [1,2,3,4,5,10,11,12,13,14] and lines = [1,2,5,10,11,13,14] then
- # format_lines will return "1-2, 5-11, 13-14".
-
- def format_lines(self, statements, lines):
- pairs = []
- i = 0
- j = 0
- start = None
- pairs = []
- while i < len(statements) and j < len(lines):
- if statements[i] == lines[j]:
- if start == None:
- start = lines[j]
- end = lines[j]
- j = j + 1
- elif start:
- pairs.append((start, end))
- start = None
- i = i + 1
- if start:
- pairs.append((start, end))
- def stringify(pair):
- start, end = pair
- if start == end:
- return "%d" % start
- else:
- return "%d-%d" % (start, end)
- ret = string.join(map(stringify, pairs), ", ")
- return ret
-
- # Backward compatibility with version 1.
- def analysis(self, morf):
- f, s, _, m, mf = self.analysis2(morf)
- return f, s, m, mf
-
- def analysis2(self, morf):
- filename, statements, excluded, line_map = self.analyze_morf(morf)
- self.canonicalize_filenames()
- if not self.cexecuted.has_key(filename):
- self.cexecuted[filename] = {}
- missing = []
- for line in statements:
- lines = line_map.get(line, [line, line])
- for l in range(lines[0], lines[1]+1):
- if self.cexecuted[filename].has_key(l):
- break
- else:
- missing.append(line)
- return (filename, statements, excluded, missing,
- self.format_lines(statements, missing))
-
- def relative_filename(self, filename):
- """ Convert filename to relative filename from self.relative_dir.
- """
- return filename.replace(self.relative_dir, "")
-
- def morf_name(self, morf):
- """ Return the name of morf as used in report.
- """
- if hasattr(morf, '__name__'):
- return morf.__name__
- else:
- return self.relative_filename(os.path.splitext(morf)[0])
-
- def filter_by_prefix(self, morfs, omit_prefixes):
- """ Return list of morfs where the morf name does not begin
- with any one of the omit_prefixes.
- """
- filtered_morfs = []
- for morf in morfs:
- for prefix in omit_prefixes:
- if self.morf_name(morf).startswith(prefix):
- break
- else:
- filtered_morfs.append(morf)
-
- return filtered_morfs
-
- def morf_name_compare(self, x, y):
- return cmp(self.morf_name(x), self.morf_name(y))
-
- def report(self, morfs, show_missing=1, ignore_errors=0, file=None,
- omit_prefixes=[]):
- if not isinstance(morfs, types.ListType):
- morfs = [morfs]
- # On windows, the shell doesn't expand wildcards. Do it here.
- globbed = []
- for morf in morfs:
- if isinstance(morf, strclass):
- globbed.extend(glob.glob(morf))
- else:
- globbed.append(morf)
- morfs = globbed
-
- morfs = self.filter_by_prefix(morfs, omit_prefixes)
- morfs.sort(self.morf_name_compare)
-
- max_name = max([5] + map(len, map(self.morf_name, morfs)))
- fmt_name = "%%- %ds " % max_name
- fmt_err = fmt_name + "%s: %s"
- header = fmt_name % "Name" + " Stmts Exec Cover"
- fmt_coverage = fmt_name + "% 6d % 6d % 5d%%"
- if show_missing:
- header = header + " Missing"
- fmt_coverage = fmt_coverage + " %s"
- if not file:
- file = sys.stdout
- print >> file, header
- print >> file, "-" * len(header)
- total_statements = 0
- total_executed = 0
- for morf in morfs:
- name = self.morf_name(morf)
- try:
- _, statements, _, missing, readable = self.analysis2(morf)
- n = len(statements)
- m = n - len(missing)
- if n > 0:
- pc = 100.0 * m / n
- else:
- pc = 100.0
- args = (name, n, m, pc)
- if show_missing:
- args = args + (readable,)
- print >>file, fmt_coverage % args
- total_statements = total_statements + n
- total_executed = total_executed + m
- except KeyboardInterrupt: #pragma: no cover
- raise
- except:
- if not ignore_errors:
- typ, msg = sys.exc_info()[:2]
- print >>file, fmt_err % (name, typ, msg)
- if len(morfs) > 1:
- print >>file, "-" * len(header)
- if total_statements > 0:
- pc = 100.0 * total_executed / total_statements
- else:
- pc = 100.0
- args = ("TOTAL", total_statements, total_executed, pc)
- if show_missing:
- args = args + ("",)
- print >>file, fmt_coverage % args
-
- # annotate(morfs, ignore_errors).
-
- blank_re = re.compile(r"\s*(#|$)")
- else_re = re.compile(r"\s*else\s*:\s*(#|$)")
-
- def annotate(self, morfs, directory=None, ignore_errors=0, omit_prefixes=[]):
- morfs = self.filter_by_prefix(morfs, omit_prefixes)
- for morf in morfs:
- try:
- filename, statements, excluded, missing, _ = self.analysis2(morf)
- self.annotate_file(filename, statements, excluded, missing,
- directory)
- except KeyboardInterrupt:
- raise
- except:
- if not ignore_errors:
- raise
-
- def annotate_file(self, filename, statements, excluded, missing,
- directory=None):
- source = open(filename, 'r')
- if directory:
- dest_file = os.path.join(directory,
- os.path.basename(filename)
- + ',cover')
- else:
- dest_file = filename + ',cover'
- dest = open(dest_file, 'w')
- lineno = 0
- i = 0
- j = 0
- covered = 1
- while 1:
- line = source.readline()
- if line == '':
- break
- lineno = lineno + 1
- while i < len(statements) and statements[i] < lineno:
- i = i + 1
- while j < len(missing) and missing[j] < lineno:
- j = j + 1
- if i < len(statements) and statements[i] == lineno:
- covered = j >= len(missing) or missing[j] > lineno
- if self.blank_re.match(line):
- dest.write(' ')
- elif self.else_re.match(line):
- # Special logic for lines containing only 'else:'.
- # See [GDR 2001-12-04b, 3.2].
- if i >= len(statements) and j >= len(missing):
- dest.write('! ')
- elif i >= len(statements) or j >= len(missing):
- dest.write('> ')
- elif statements[i] == missing[j]:
- dest.write('! ')
- else:
- dest.write('> ')
- elif lineno in excluded:
- dest.write('- ')
- elif covered:
- dest.write('> ')
- else:
- dest.write('! ')
- dest.write(line)
- source.close()
- dest.close()
-
-# Singleton object.
-the_coverage = coverage()
-
-# Module functions call methods in the singleton object.
-def use_cache(*args, **kw):
- return the_coverage.use_cache(*args, **kw)
-
-def start(*args, **kw):
- return the_coverage.start(*args, **kw)
-
-def stop(*args, **kw):
- return the_coverage.stop(*args, **kw)
-
-def erase(*args, **kw):
- return the_coverage.erase(*args, **kw)
-
-def begin_recursive(*args, **kw):
- return the_coverage.begin_recursive(*args, **kw)
-
-def end_recursive(*args, **kw):
- return the_coverage.end_recursive(*args, **kw)
-
-def exclude(*args, **kw):
- return the_coverage.exclude(*args, **kw)
-
-def analysis(*args, **kw):
- return the_coverage.analysis(*args, **kw)
-
-def analysis2(*args, **kw):
- return the_coverage.analysis2(*args, **kw)
-
-def report(*args, **kw):
- return the_coverage.report(*args, **kw)
-
-def annotate(*args, **kw):
- return the_coverage.annotate(*args, **kw)
-
-def annotate_file(*args, **kw):
- return the_coverage.annotate_file(*args, **kw)
-
-# Save coverage data when Python exits. (The atexit module wasn't
-# introduced until Python 2.0, so use sys.exitfunc when it's not
-# available.)
-try:
- import atexit
- atexit.register(the_coverage.save)
-except ImportError:
- sys.exitfunc = the_coverage.save
-
-def main():
- the_coverage.command_line(sys.argv[1:])
-
-# Command-line interface.
-if __name__ == '__main__':
- main()
-
-
-# A. REFERENCES
-#
-# [GDR 2001-12-04a] "Statement coverage for Python"; Gareth Rees;
-# Ravenbrook Limited; 2001-12-04;
-# <http://www.nedbatchelder.com/code/modules/rees-coverage.html>.
-#
-# [GDR 2001-12-04b] "Statement coverage for Python: design and
-# analysis"; Gareth Rees; Ravenbrook Limited; 2001-12-04;
-# <http://www.nedbatchelder.com/code/modules/rees-design.html>.
-#
-# [van Rossum 2001-07-20a] "Python Reference Manual (releae 2.1.1)";
-# Guide van Rossum; 2001-07-20;
-# <http://www.python.org/doc/2.1.1/ref/ref.html>.
-#
-# [van Rossum 2001-07-20b] "Python Library Reference"; Guido van Rossum;
-# 2001-07-20; <http://www.python.org/doc/2.1.1/lib/lib.html>.
-#
-#
-# B. DOCUMENT HISTORY
-#
-# 2001-12-04 GDR Created.
-#
-# 2001-12-06 GDR Added command-line interface and source code
-# annotation.
-#
-# 2001-12-09 GDR Moved design and interface to separate documents.
-#
-# 2001-12-10 GDR Open cache file as binary on Windows. Allow
-# simultaneous -e and -x, or -a and -r.
-#
-# 2001-12-12 GDR Added command-line help. Cache analysis so that it
-# only needs to be done once when you specify -a and -r.
-#
-# 2001-12-13 GDR Improved speed while recording. Portable between
-# Python 1.5.2 and 2.1.1.
-#
-# 2002-01-03 GDR Module-level functions work correctly.
-#
-# 2002-01-07 GDR Update sys.path when running a file with the -x option,
-# so that it matches the value the program would get if it were run on
-# its own.
-#
-# 2004-12-12 NMB Significant code changes.
-# - Finding executable statements has been rewritten so that docstrings and
-# other quirks of Python execution aren't mistakenly identified as missing
-# lines.
-# - Lines can be excluded from consideration, even entire suites of lines.
-# - The filesystem cache of covered lines can be disabled programmatically.
-# - Modernized the code.
-#
-# 2004-12-14 NMB Minor tweaks. Return 'analysis' to its original behavior
-# and add 'analysis2'. Add a global for 'annotate', and factor it, adding
-# 'annotate_file'.
-#
-# 2004-12-31 NMB Allow for keyword arguments in the module global functions.
-# Thanks, Allen.
-#
-# 2005-12-02 NMB Call threading.settrace so that all threads are measured.
-# Thanks Martin Fuzzey. Add a file argument to report so that reports can be
-# captured to a different destination.
-#
-# 2005-12-03 NMB coverage.py can now measure itself.
-#
-# 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
-# and sorting and omitting files to report on.
-#
-# 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
-#
-# 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
-# handling.
-#
-# 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
-#
-# 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line
-# logic for parallel mode and collect.
-#
-# 2006-08-25 NMB "#pragma: nocover" is excluded by default.
-#
-# 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
-# appear in the middle of a function, a problem reported by Tim Leslie.
-# Minor changes to avoid lint warnings.
-#
-# 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
-# Change how parallel mode is invoked, and fix erase() so that it erases the
-# cache when called programmatically.
-#
-# 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
-# do anything useful with it anyway.
-# Better file handling on Linux, thanks Guillaume Chazarain.
-# Better shell support on Windows, thanks Noel O'Boyle.
-# Python 2.2 support maintained, thanks Catherine Proulx.
-#
-# 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
-# multi-line statements is now less sensitive to the exact line that Python
-# reports during execution. Pass statements are handled specially so that their
-# disappearance during execution won't throw off the measurement.
-#
-# 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
-# new with statement is counted as executable.
-#
-# 2007-07-29 NMB Better packaging.
-#
-# 2007-09-30 NMB Don't try to predict whether a file is Python source based on
-# the extension. Extensionless files are often Pythons scripts. Instead, simply
-# parse the file and catch the syntax errors. Hat tip to Ben Finney.
-#
-# 2008-05-25 NMB Open files in rU mode to avoid line ending craziness.
-# Thanks, Edward Loper.
-#
-# 2008-09-14 NMB Add support for finding source files in eggs.
-# Don't check for morf's being instances of ModuleType, instead use duck typing
-# so that pseudo-modules can participate. Thanks, Imri Goldberg.
-# Use os.realpath as part of the fixing of filenames so that symlinks won't
-# confuse things. Thanks, Patrick Mezard.
-#
-#
-# C. COPYRIGHT AND LICENCE
-#
-# Copyright 2001 Gareth Rees. All rights reserved.
-# Copyright 2004-2008 Ned Batchelder. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# 1. Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# 2. Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the
-# distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-# DAMAGE.
-#
-# $Id: coverage.py 96 2008-09-14 18:34:13Z nedbat $
--- a/tests/run-tests.py Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/run-tests.py Tue Mar 16 11:37:14 2010 -0300
@@ -41,6 +41,7 @@
# completes fairly quickly, includes both shell and Python scripts, and
# includes some scripts that run daemon processes.)
+from distutils import version
import difflib
import errno
import optparse
@@ -110,8 +111,6 @@
" (default: $%s or %d)" % defaults['port'])
parser.add_option("-r", "--retest", action="store_true",
help="retest failed tests")
- parser.add_option("-s", "--cover_stdlib", action="store_true",
- help="print a test coverage report inc. standard libraries")
parser.add_option("-S", "--noskips", action="store_true",
help="don't report skip tests verbosely")
parser.add_option("-t", "--timeout", type="int",
@@ -155,16 +154,20 @@
% hgbin)
options.with_hg = hgbin
- options.anycoverage = (options.cover or
- options.cover_stdlib or
- options.annotate)
+ options.anycoverage = options.cover or options.annotate
+ if options.anycoverage:
+ try:
+ import coverage
+ covver = version.StrictVersion(coverage.__version__).version
+ if covver < (3, 3):
+ parser.error('coverage options require coverage 3.3 or later')
+ except ImportError:
+ parser.error('coverage options now require the coverage package')
- if options.anycoverage and options.with_hg:
- # I'm not sure if this is a fundamental limitation or just a
- # bug. But I don't want to waste people's time and energy doing
- # test runs that don't give the results they want.
- parser.error("sorry, coverage options do not work when --with-hg "
- "or --local specified")
+ if options.anycoverage and options.local:
+ # this needs some path mangling somewhere, I guess
+ parser.error("sorry, coverage options do not work when --local "
+ "is specified")
global vlog
if options.verbose:
@@ -390,20 +393,15 @@
f.close()
if options.anycoverage:
- vlog("# Installing coverage wrapper")
- os.environ['COVERAGE_FILE'] = COVERAGE_FILE
- if os.path.exists(COVERAGE_FILE):
- os.unlink(COVERAGE_FILE)
- # Create a wrapper script to invoke hg via coverage.py
- os.rename(os.path.join(BINDIR, "hg"), os.path.join(BINDIR, "_hg.py"))
- f = open(os.path.join(BINDIR, 'hg'), 'w')
- f.write('#!' + sys.executable + '\n')
- f.write('import sys, os; os.execv(sys.executable, [sys.executable, '
- '"%s", "-x", "-p", "%s"] + sys.argv[1:])\n' %
- (os.path.join(TESTDIR, 'coverage.py'),
- os.path.join(BINDIR, '_hg.py')))
- f.close()
- os.chmod(os.path.join(BINDIR, 'hg'), 0700)
+ custom = os.path.join(TESTDIR, 'sitecustomize.py')
+ target = os.path.join(PYTHONDIR, 'sitecustomize.py')
+ vlog('# Installing coverage trigger to %s' % target)
+ shutil.copyfile(custom, target)
+ rc = os.path.join(TESTDIR, '.coveragerc')
+ vlog('# Installing coverage rc to %s' % rc)
+ os.environ['COVERAGE_PROCESS_START'] = rc
+ fn = os.path.join(INST, '..', '.coverage')
+ os.environ['COVERAGE_FILE'] = fn
def outputcoverage(options):
@@ -411,22 +409,15 @@
os.chdir(PYTHONDIR)
def covrun(*args):
- start = sys.executable, os.path.join(TESTDIR, 'coverage.py')
- cmd = '"%s" "%s" %s' % (start[0], start[1], ' '.join(args))
+ cmd = 'coverage %s' % ' '.join(args)
vlog('# Running: %s' % cmd)
os.system(cmd)
- omit = [BINDIR, TESTDIR, PYTHONDIR]
- if not options.cover_stdlib:
- # Exclude as system paths (ignoring empty strings seen on win)
- omit += [x for x in sys.path if x != '']
- omit = ','.join(omit)
+ if options.child:
+ return
- covrun('-c') # combine from parallel processes
- for fn in os.listdir(TESTDIR):
- if fn.startswith('.coverage.'):
- os.unlink(os.path.join(TESTDIR, fn))
-
+ covrun('-c')
+ omit = ','.join([BINDIR, TESTDIR])
covrun('-i', '-r', '"--omit=%s"' % omit) # report
if options.annotate:
adir = os.path.join(TESTDIR, 'annotated')
@@ -668,6 +659,8 @@
optcopy['jobs'] = 1
if optcopy['with_hg'] is None:
optcopy['with_hg'] = os.path.join(BINDIR, "hg")
+ optcopy.pop('anycoverage', None)
+
opts = []
for opt, value in optcopy.iteritems():
name = '--' + opt.replace('_', '-')
@@ -729,6 +722,9 @@
_checkhglib("Tested")
print "# Ran %d tests, %d skipped, %d failed." % (
tested, skipped, failed)
+
+ if options.anycoverage:
+ outputcoverage(options)
sys.exit(failures != 0)
def runtests(options, tests):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sitecustomize.py Tue Mar 16 11:37:14 2010 -0300
@@ -0,0 +1,6 @@
+try:
+ import coverage
+ if hasattr(coverage, 'process_startup'):
+ coverage.process_startup()
+except ImportError:
+ pass
--- a/tests/test-acl.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-acl.out Tue Mar 16 11:37:14 2010 -0300
@@ -20,38 +20,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -90,38 +90,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -164,38 +164,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -243,38 +243,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -323,38 +323,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -406,38 +406,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -488,38 +488,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -573,38 +573,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -657,38 +657,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -742,38 +742,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -828,38 +828,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -918,38 +918,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -1007,38 +1007,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
@@ -1103,38 +1103,38 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
adding changesets
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle changes: 7 chunks
-bundle changes: 8 chunks
-bundle changes: 9 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle manifests: 7 chunks
-bundle manifests: 8 chunks
-bundle manifests: 9 chunks
-bundle files: foo/Bar/file.txt 0 chunks
-bundle files: foo/Bar/file.txt 1 chunks
-bundle files: foo/Bar/file.txt 2 chunks
-bundle files: foo/Bar/file.txt 3 chunks
-bundle files: foo/file.txt 4 chunks
-bundle files: foo/file.txt 5 chunks
-bundle files: foo/file.txt 6 chunks
-bundle files: foo/file.txt 7 chunks
-bundle files: quux/file.py 8 chunks
-bundle files: quux/file.py 9 chunks
-bundle files: quux/file.py 10 chunks
-bundle files: quux/file.py 11 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling changes: 7 chunks
+bundling changes: 8 chunks
+bundling changes: 9 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling manifests: 7 chunks
+bundling manifests: 8 chunks
+bundling manifests: 9 chunks
+bundling files: foo/Bar/file.txt 0 chunks
+bundling files: foo/Bar/file.txt 1 chunks
+bundling files: foo/Bar/file.txt 2 chunks
+bundling files: foo/Bar/file.txt 3 chunks
+bundling files: foo/file.txt 4 chunks
+bundling files: foo/file.txt 5 chunks
+bundling files: foo/file.txt 6 chunks
+bundling files: foo/file.txt 7 chunks
+bundling files: quux/file.py 8 chunks
+bundling files: quux/file.py 9 chunks
+bundling files: quux/file.py 10 chunks
+bundling files: quux/file.py 11 chunks
changesets: 1 chunks
add changeset ef1ea85a6374
changesets: 2 chunks
--- a/tests/test-archive Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-archive Tue Mar 16 11:37:14 2010 -0300
@@ -74,6 +74,20 @@
hg archive -t tgz -p %b-%h test-%h.tar.gz
gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null | sed "s/$QTIP/TIP/"
+hg archive autodetected_test.tar
+tar tf autodetected_test.tar
+
+# The '-t' should override autodetection
+hg archive -t tar autodetect_override_test.zip
+tar tf autodetect_override_test.zip
+
+for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
+ hg archive auto_test.$ext
+ if [ -d auto_test.$ext ]; then
+ echo "extension $ext was not autodetected."
+ fi
+done
+
cat > md5comp.py <<EOF
try:
from hashlib import md5
--- a/tests/test-archive.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-archive.out Tue Mar 16 11:37:14 2010 -0300
@@ -45,6 +45,14 @@
test-TIP/bar
test-TIP/baz/bletch
test-TIP/foo
+autodetected_test/.hg_archival.txt
+autodetected_test/bar
+autodetected_test/baz/bletch
+autodetected_test/foo
+autodetect_override_test.zip/.hg_archival.txt
+autodetect_override_test.zip/bar
+autodetect_override_test.zip/baz/bletch
+autodetect_override_test.zip/foo
True
abort: archive prefix contains illegal components
Archive: test.zip
--- a/tests/test-bundle-r Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-bundle-r Tue Mar 16 11:37:14 2010 -0300
@@ -72,6 +72,8 @@
hg -R test bundle --base 2 -r 7 test-bundle-branch2.hg
hg -R test bundle --base 2 test-bundle-all.hg
hg -R test bundle --base 3 -r tip test-bundle-should-fail.hg
+# empty bundle
+hg -R test bundle --base 7 --base 8 test-bundle-empty.hg
# issue76 msg2163
hg -R test bundle --base 3 -r 3 -r 3 test-bundle-cset-3.hg
--- a/tests/test-bundle-r.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-bundle-r.out Tue Mar 16 11:37:14 2010 -0300
@@ -162,6 +162,7 @@
4 changesets found
6 changesets found
1 changesets found
+no changes found
1 changesets found
4 changesets found
updating to branch default
--- a/tests/test-bundle.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-bundle.out Tue Mar 16 11:37:14 2010 -0300
@@ -341,25 +341,25 @@
list of changesets:
d2ae7f538514cd87c17547b0de4cea71fe1af9fb
5ece8e77363e2b5269e27c66828b72da29e4341a
-bundle changes: 0 chunks
-bundle changes: 1 chunks
-bundle changes: 2 chunks
-bundle changes: 3 chunks
-bundle changes: 4 chunks
-bundle changes: 5 chunks
-bundle changes: 6 chunks
-bundle manifests: 0 chunks
-bundle manifests: 1 chunks
-bundle manifests: 2 chunks
-bundle manifests: 3 chunks
-bundle manifests: 4 chunks
-bundle manifests: 5 chunks
-bundle manifests: 6 chunks
-bundle files: b 0 chunks
-bundle files: b 1 chunks
-bundle files: b 2 chunks
-bundle files: b 3 chunks
-bundle files: b1 4 chunks
-bundle files: b1 5 chunks
-bundle files: b1 6 chunks
-bundle files: b1 7 chunks
+bundling changes: 0 chunks
+bundling changes: 1 chunks
+bundling changes: 2 chunks
+bundling changes: 3 chunks
+bundling changes: 4 chunks
+bundling changes: 5 chunks
+bundling changes: 6 chunks
+bundling manifests: 0 chunks
+bundling manifests: 1 chunks
+bundling manifests: 2 chunks
+bundling manifests: 3 chunks
+bundling manifests: 4 chunks
+bundling manifests: 5 chunks
+bundling manifests: 6 chunks
+bundling files: b 0 chunks
+bundling files: b 1 chunks
+bundling files: b 2 chunks
+bundling files: b 3 chunks
+bundling files: b1 4 chunks
+bundling files: b1 5 chunks
+bundling files: b1 6 chunks
+bundling files: b1 7 chunks
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clone-update-order Tue Mar 16 11:37:14 2010 -0300
@@ -0,0 +1,66 @@
+#!/bin/sh
+
+echo
+echo % prepare repo a
+mkdir a
+cd a
+hg init
+echo foo > bar
+hg commit -Am default
+hg up -r null
+hg branch mine
+echo hello > world
+hg commit -Am hello
+hg up -r null
+hg branch other
+echo good > bye
+hg commit -Am other
+hg up -r mine
+
+echo % test -U -u
+hg clone -U -u . .#other ../b -r 0 -r 1 -r 2 -b other
+
+echo % test -U
+hg clone -U .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -u .
+hg clone -u . .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -u 0
+hg clone -u 0 .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -u 1
+hg clone -u 1 .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -u 2
+hg clone -u 2 .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -r 0
+hg clone -u 2 .#other ../b -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -r mine ... mine is ignored
+hg clone -u 2 .#other ../b -r mine -r 0 -r 1 -r 2 -b other
+rm -rf ../b
+
+echo % test -b default
+hg clone .#other ../b -b default -b mine
+rm -rf ../b
+
+echo % test #other
+hg clone .#other ../b
+rm -rf ../b
+
+echo % test tip
+hg clone -U . ../c -r 1 -r 2 > /dev/null
+hg clone ../c ../b
+rm -rf ../b ../c
+cd ..
+
+rm -rf a
+exit 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clone-update-order.out Tue Mar 16 11:37:14 2010 -0300
@@ -0,0 +1,87 @@
+
+% prepare repo a
+adding bar
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+marked working directory as branch mine
+adding world
+created new head
+0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+marked working directory as branch other
+adding bye
+created new head
+1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+% test -U -u
+abort: cannot specify both --noupdate and --updaterev
+% test -U
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+% test -u .
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch mine
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -u 0
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch default
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -u 1
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch mine
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -u 2
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch other
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -r 0
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch other
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -r mine ... mine is ignored
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch other
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test -b default
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 3 changesets with 3 changes to 3 files (+2 heads)
+updating to branch default
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test
+requesting all changes
+adding changesets
+adding manifests
+adding file changes
+added 1 changesets with 1 changes to 1 files
+updating to branch other
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+% test tip
+updating to branch other
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-convert-cvs Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-convert-cvs Tue Mar 16 11:37:14 2010 -0300
@@ -75,6 +75,15 @@
hgcat b/c
hg -R src-filemap log --template '{rev} {desc} files: {files}\n'
+echo % 'convert full repository (issue1649)'
+cvscall -q -d "$CVSROOT" checkout -d srcfull "." | grep -v CVSROOT
+ls srcfull
+hg convert srcfull srcfull-hg \
+ | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g' \
+ | grep -v 'log entries' | grep -v 'hook:'
+hg cat -r tip srcfull-hg/src/a
+hg cat -r tip srcfull-hg/src/b/c
+
echo % commit new file revisions
cd src
echo a >> a
--- a/tests/test-convert-cvs.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-convert-cvs.out Tue Mar 16 11:37:14 2010 -0300
@@ -53,6 +53,28 @@
2 update tags files: .hgtags
1 ci0 files: b/c
0 Initial revision files: b/c
+% convert full repository (issue1649)
+U srcfull/src/a
+U srcfull/src/b/c
+CVS
+CVSROOT
+src
+initializing destination srcfull-hg repository
+connecting to cvsrepo
+scanning source...
+collecting CVS rlog
+creating changesets
+4 changeset entries
+sorting...
+converting...
+3 Initial revision
+2 import
+1 initial checkin
+0 ci0
+updating tags
+a
+c
+c
% commit new file revisions
checking in src/a,v
checking in src/b/c,v
--- a/tests/test-diff-color.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-diff-color.out Tue Mar 16 11:37:14 2010 -0300
@@ -31,7 +31,8 @@
[0;36;1mold mode 100644[0m
[0;36;1mnew mode 100755[0m
1 hunks, 2 lines changed
-examine changes to 'a'? [Ynsfdaq?] [0;35m@@ -2,7 +2,7 @@[0m
+examine changes to 'a'? [Ynsfdaq?]
+[0;35m@@ -2,7 +2,7 @@[0m
c
a
a
@@ -41,13 +42,15 @@
a
c
record this change to 'a'? [Ynsfdaq?]
+
rolling back last transaction
% qrecord
[0;1mdiff --git a/a b/a[0m
[0;36;1mold mode 100644[0m
[0;36;1mnew mode 100755[0m
1 hunks, 2 lines changed
-examine changes to 'a'? [Ynsfdaq?] [0;35m@@ -2,7 +2,7 @@[0m
+examine changes to 'a'? [Ynsfdaq?]
+[0;35m@@ -2,7 +2,7 @@[0m
c
a
a
@@ -57,3 +60,4 @@
a
c
record this change to 'a'? [Ynsfdaq?]
+
--- a/tests/test-extension Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-extension Tue Mar 16 11:37:14 2010 -0300
@@ -168,12 +168,13 @@
cat > hgext/broken.py <<EOF
"broken extension'
EOF
-TMPPYTHONPATH="$PYTHONPATH"
-PYTHONPATH="`pwd`:$PYTHONPATH"
-export PYTHONPATH
-hg help broken
-hg help foo > /dev/null
-PYTHONPATH="$TMPPYTHONPATH"
-export PYTHONPATH
+cat > path.py <<EOF
+import os, sys
+sys.path.insert(0, os.environ['HGEXTPATH'])
+EOF
+HGEXTPATH=`pwd`
+export HGEXTPATH
+hg --config extensions.path=./path.py help broken
+hg --config extensions.path=./path.py help foo > /dev/null
exit 0
--- a/tests/test-hgwebdir Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-hgwebdir Tue Mar 16 11:37:14 2010 -0300
@@ -51,6 +51,13 @@
echo % should give a 404 - repo is not published
"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/c/file/tip/c?style=raw'
+echo % atom-log without basedir
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/atom-log' \
+ | grep '<link' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
+
+echo % rss-log without basedir
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT '/a/rss-log' \
+ | grep '<guid' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
cat > paths.conf <<EOF
[paths]
@@ -119,6 +126,28 @@
"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/b/file/tip/b?style=raw'
"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/c/file/tip/c?style=raw'
+echo % atom-log with basedir /
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/atom-log' \
+ | grep '<link' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
+
+echo % rss-log with basedir /
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/rss-log' \
+ | grep '<guid' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
+
+"$TESTDIR/killdaemons.py"
+
+hg serve --config web.baseurl=http://hg.example.com:8080/foo/ -p $HGPORT2 -d \
+ --pid-file=hg.pid --webdir-conf collections.conf \
+ -A access-collections-2.log -E error-collections-2.log
+cat hg.pid >> $DAEMON_PIDS
+
+echo % atom-log with basedir /foo/
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/atom-log' \
+ | grep '<link' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
+
+echo % rss-log with basedir /foo/
+"$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/rss-log' \
+ | grep '<guid' | sed 's|//[.a-zA-Z0-9\-_]*:[0-9][0-9]*/|//example.com:8080/|'
echo % paths errors 1
cat error-paths-1.log
@@ -128,3 +157,5 @@
cat error-paths-3.log
echo % collections errors
cat error-collections.log
+echo % collections errors 2
+cat error-collections-2.log
--- a/tests/test-hgwebdir.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-hgwebdir.out Tue Mar 16 11:37:14 2010 -0300
@@ -25,6 +25,12 @@
error: repository c not found
+% atom-log without basedir
+ <link rel="self" href="http://example.com:8080/a/atom-log"/>
+ <link rel="alternate" href="http://example.com:8080/a/"/>
+ <link href="http://example.com:8080/a/rev/8580ff50825a"/>
+% rss-log without basedir
+ <guid isPermaLink="true">http://example.com:8080/a/rev/8580ff50825a</guid>
% should succeed, slashy names
200 Script output follows
@@ -323,10 +329,10 @@
200 Script output follows
-http://hg.example.com:8080/a/
-http://hg.example.com:8080/a/.hg/patches/
-http://hg.example.com:8080/b/
-http://hg.example.com:8080/c/
+/a/
+/a/.hg/patches/
+/b/
+/c/
200 Script output follows
@@ -337,7 +343,20 @@
200 Script output follows
c
+% atom-log with basedir /
+ <link rel="self" href="http://example.com:8080/a/atom-log"/>
+ <link rel="alternate" href="http://example.com:8080/a/"/>
+ <link href="http://example.com:8080/a/rev/8580ff50825a"/>
+% rss-log with basedir /
+ <guid isPermaLink="true">http://example.com:8080/a/rev/8580ff50825a</guid>
+% atom-log with basedir /foo/
+ <link rel="self" href="http://example.com:8080/foo/a/atom-log"/>
+ <link rel="alternate" href="http://example.com:8080/foo/a/"/>
+ <link href="http://example.com:8080/foo/a/rev/8580ff50825a"/>
+% rss-log with basedir /foo/
+ <guid isPermaLink="true">http://example.com:8080/foo/a/rev/8580ff50825a</guid>
% paths errors 1
% paths errors 2
% paths errors 3
% collections errors
+% collections errors 2
--- a/tests/test-merge-default Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-merge-default Tue Mar 16 11:37:14 2010 -0300
@@ -24,7 +24,7 @@
hg up
echo % should fail because \> 2 heads
-export HGMERGE=internal:other
+HGMERGE=internal:other; export HGMERGE
hg merge
echo % should succeed
--- a/tests/test-mq Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-mq Tue Mar 16 11:37:14 2010 -0300
@@ -80,6 +80,19 @@
cat .hg/patches/series
cd ..
+echo '% init --mq without repo'
+mkdir f
+cd f
+hg init --mq
+cd ..
+
+echo '% init --mq with nonexistent directory'
+hg init --mq nonexistentdir
+
+echo '% init --mq with bundle (non "local")'
+hg -R a bundle --all a.bundle >/dev/null
+hg init --mq a.bundle
+
cd a
hg qnew -m 'foo bar' test.patch
--- a/tests/test-mq.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-mq.out Tue Mar 16 11:37:14 2010 -0300
@@ -10,7 +10,6 @@
Common tasks (use "hg help command" for more details):
- prepare repository to work with patches qinit
create new patch qnew
import existing patch qimport
@@ -90,6 +89,12 @@
series:
A
B
+% init --mq without repo
+abort: There is no Mercurial repository here (.hg not found)
+% init --mq with nonexistent directory
+abort: repository nonexistentdir not found!
+% init --mq with bundle (non "local")
+abort: only a local queue repository may be initialized
% qrefresh
foo bar
@@ -513,7 +518,7 @@
summary: add foo
% qclone
-abort: versioned patch repository not found (see qinit -c)
+abort: versioned patch repository not found (see init --mq)
adding .hg/patches/patch1
main repo:
rev 1: change foo
--- a/tests/test-newbranch.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-newbranch.out Tue Mar 16 11:37:14 2010 -0300
@@ -2,7 +2,7 @@
foo
marked working directory as branch bar
% branch shadowing
-abort: a branch of the same name already exists (use --force to override)
+abort: a branch of the same name already exists (use 'hg update' to switch to it)
marked working directory as branch default
% there should be only one default branch head
changeset: 3:bf1bc2f45e83
--- a/tests/test-qrecord.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-qrecord.out Tue Mar 16 11:37:14 2010 -0300
@@ -80,28 +80,34 @@
% qrecord a.patch
diff --git a/1.txt b/1.txt
2 hunks, 4 lines changed
-examine changes to '1.txt'? [Ynsfdaq?] @@ -1,3 +1,3 @@
+examine changes to '1.txt'? [Ynsfdaq?]
+@@ -1,3 +1,3 @@
1
-2
+2 2
3
-record change 1/6 to '1.txt'? [Ynsfdaq?] @@ -3,3 +3,3 @@
+record change 1/6 to '1.txt'? [Ynsfdaq?]
+@@ -3,3 +3,3 @@
3
-4
+4 4
5
-record change 2/6 to '1.txt'? [Ynsfdaq?] diff --git a/2.txt b/2.txt
+record change 2/6 to '1.txt'? [Ynsfdaq?]
+diff --git a/2.txt b/2.txt
1 hunks, 2 lines changed
-examine changes to '2.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@
+examine changes to '2.txt'? [Ynsfdaq?]
+@@ -1,5 +1,5 @@
a
-b
+b b
c
d
e
-record change 4/6 to '2.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt
+record change 4/6 to '2.txt'? [Ynsfdaq?]
+diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 2 lines changed
examine changes to 'dir/a.txt'? [Ynsfdaq?]
+
% after qrecord a.patch 'tip'
changeset: 1:5d1ca63427ee
tag: qtip
@@ -157,22 +163,26 @@
% qrecord b.patch
diff --git a/1.txt b/1.txt
1 hunks, 2 lines changed
-examine changes to '1.txt'? [Ynsfdaq?] @@ -1,5 +1,5 @@
+examine changes to '1.txt'? [Ynsfdaq?]
+@@ -1,5 +1,5 @@
1
2 2
3
-4
+4 4
5
-record change 1/3 to '1.txt'? [Ynsfdaq?] diff --git a/dir/a.txt b/dir/a.txt
+record change 1/3 to '1.txt'? [Ynsfdaq?]
+diff --git a/dir/a.txt b/dir/a.txt
1 hunks, 2 lines changed
-examine changes to 'dir/a.txt'? [Ynsfdaq?] @@ -1,4 +1,4 @@
+examine changes to 'dir/a.txt'? [Ynsfdaq?]
+@@ -1,4 +1,4 @@
-hello world
+hello world!
someone
up
record change 3/3 to 'dir/a.txt'? [Ynsfdaq?]
+
% after qrecord b.patch 'tip'
changeset: 2:b056198bf878
tag: qtip
--- a/tests/test-rebase-mq Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-rebase-mq Tue Mar 16 11:37:14 2010 -0300
@@ -45,6 +45,12 @@
hg rebase -s 1 -d 3
echo
+echo '% Rebase - same thing, but mq patch is default dest'
+hg update -q 1
+hg rebase
+hg update -q qtip
+
+echo
echo '% Rebase - generate a conflict'
hg rebase -s 2 -d 1
--- a/tests/test-rebase-mq.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-rebase-mq.out Tue Mar 16 11:37:14 2010 -0300
@@ -11,6 +11,9 @@
% Rebase - try to rebase on an applied mq patch
abort: cannot rebase onto an applied mq patch
+% Rebase - same thing, but mq patch is default dest
+abort: cannot rebase onto an applied mq patch
+
% Rebase - generate a conflict
merging f
warning: conflicts during merge.
--- a/tests/test-rebase-parameters.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-rebase-parameters.out Tue Mar 16 11:37:14 2010 -0300
@@ -2,22 +2,52 @@
% Use continue and abort
hg rebase: cannot use both abort and continue
-hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--keepbranches] | [-c] | [-a]
+hg rebase [-s REV | -b REV] [-d REV] [options]
+hg rebase {-a|-c}
move changeset (and descendants) to a different branch
Rebase uses repeated merging to graft changesets from one part of history
- onto another. This can be useful for linearizing local changes relative to
- a master development tree.
+ (the source) onto another (the destination). This can be useful for
+ linearizing local changes relative to a master development tree.
+
+ If you don't specify a destination changeset ("-d/--dest"), rebase uses
+ the tipmost head of the current named branch as the destination. (The
+ destination changeset is not modified by rebasing, but new changesets are
+ added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a "source"
+ changeset or as a "base" changeset. Both are shorthand for a topologically
+ related set of changesets (the "source branch"). If you specify source
+ ("-s/--source"), rebase will rebase that changeset and all of its
+ descendants onto dest. If you specify base ("-b/--base"), rebase will
+ select ancestors of base back to but not including the common ancestor
+ with dest. Thus, "-b" is less precise but more convenient than "-s": you
+ can specify any changeset in the source branch, and rebase will select the
+ whole branch. If you specify neither "-s" nor "-b", rebase uses the parent
+ of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch as
+ descendants of dest and then destroys the originals. Use "--keep" to
+ preserve the original source changesets. Some changesets in the source
+ branch (e.g. merges from the destination branch) may be dropped if they no
+ longer contribute any change.
+
+ One result of the rules for selecting the destination changeset and source
+ branch is that, unlike "merge", rebase will do nothing if you are at the
+ latest (tipmost) head of a named branch with two heads. You need to
+ explicitly specify source and/or destination (or "update" to the other
+ head, if it's the head of the intended source branch).
If a rebase is interrupted to manually resolve a merge, it can be
continued with --continue/-c or aborted with --abort/-a.
options:
- -s --source rebase from a given revision
- -b --base rebase from the base of a given revision
- -d --dest rebase onto a given revision
+ -s --source rebase from the specified changeset
+ -b --base rebase from the base of the specified changeset (up to
+ greatest common ancestor of base and dest)
+ -d --dest rebase onto the specified changeset
--collapse collapse the rebased changesets
--keep keep original changesets
--keepbranches keep original branch names
@@ -31,22 +61,52 @@
% Use continue and collapse
hg rebase: cannot use collapse with continue or abort
-hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--keepbranches] | [-c] | [-a]
+hg rebase [-s REV | -b REV] [-d REV] [options]
+hg rebase {-a|-c}
move changeset (and descendants) to a different branch
Rebase uses repeated merging to graft changesets from one part of history
- onto another. This can be useful for linearizing local changes relative to
- a master development tree.
+ (the source) onto another (the destination). This can be useful for
+ linearizing local changes relative to a master development tree.
+
+ If you don't specify a destination changeset ("-d/--dest"), rebase uses
+ the tipmost head of the current named branch as the destination. (The
+ destination changeset is not modified by rebasing, but new changesets are
+ added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a "source"
+ changeset or as a "base" changeset. Both are shorthand for a topologically
+ related set of changesets (the "source branch"). If you specify source
+ ("-s/--source"), rebase will rebase that changeset and all of its
+ descendants onto dest. If you specify base ("-b/--base"), rebase will
+ select ancestors of base back to but not including the common ancestor
+ with dest. Thus, "-b" is less precise but more convenient than "-s": you
+ can specify any changeset in the source branch, and rebase will select the
+ whole branch. If you specify neither "-s" nor "-b", rebase uses the parent
+ of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch as
+ descendants of dest and then destroys the originals. Use "--keep" to
+ preserve the original source changesets. Some changesets in the source
+ branch (e.g. merges from the destination branch) may be dropped if they no
+ longer contribute any change.
+
+ One result of the rules for selecting the destination changeset and source
+ branch is that, unlike "merge", rebase will do nothing if you are at the
+ latest (tipmost) head of a named branch with two heads. You need to
+ explicitly specify source and/or destination (or "update" to the other
+ head, if it's the head of the intended source branch).
If a rebase is interrupted to manually resolve a merge, it can be
continued with --continue/-c or aborted with --abort/-a.
options:
- -s --source rebase from a given revision
- -b --base rebase from the base of a given revision
- -d --dest rebase onto a given revision
+ -s --source rebase from the specified changeset
+ -b --base rebase from the base of the specified changeset (up to
+ greatest common ancestor of base and dest)
+ -d --dest rebase onto the specified changeset
--collapse collapse the rebased changesets
--keep keep original changesets
--keepbranches keep original branch names
@@ -60,22 +120,52 @@
% Use continue/abort and dest/source
hg rebase: abort and continue do not allow specifying revisions
-hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--keepbranches] | [-c] | [-a]
+hg rebase [-s REV | -b REV] [-d REV] [options]
+hg rebase {-a|-c}
move changeset (and descendants) to a different branch
Rebase uses repeated merging to graft changesets from one part of history
- onto another. This can be useful for linearizing local changes relative to
- a master development tree.
+ (the source) onto another (the destination). This can be useful for
+ linearizing local changes relative to a master development tree.
+
+ If you don't specify a destination changeset ("-d/--dest"), rebase uses
+ the tipmost head of the current named branch as the destination. (The
+ destination changeset is not modified by rebasing, but new changesets are
+ added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a "source"
+ changeset or as a "base" changeset. Both are shorthand for a topologically
+ related set of changesets (the "source branch"). If you specify source
+ ("-s/--source"), rebase will rebase that changeset and all of its
+ descendants onto dest. If you specify base ("-b/--base"), rebase will
+ select ancestors of base back to but not including the common ancestor
+ with dest. Thus, "-b" is less precise but more convenient than "-s": you
+ can specify any changeset in the source branch, and rebase will select the
+ whole branch. If you specify neither "-s" nor "-b", rebase uses the parent
+ of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch as
+ descendants of dest and then destroys the originals. Use "--keep" to
+ preserve the original source changesets. Some changesets in the source
+ branch (e.g. merges from the destination branch) may be dropped if they no
+ longer contribute any change.
+
+ One result of the rules for selecting the destination changeset and source
+ branch is that, unlike "merge", rebase will do nothing if you are at the
+ latest (tipmost) head of a named branch with two heads. You need to
+ explicitly specify source and/or destination (or "update" to the other
+ head, if it's the head of the intended source branch).
If a rebase is interrupted to manually resolve a merge, it can be
continued with --continue/-c or aborted with --abort/-a.
options:
- -s --source rebase from a given revision
- -b --base rebase from the base of a given revision
- -d --dest rebase onto a given revision
+ -s --source rebase from the specified changeset
+ -b --base rebase from the base of the specified changeset (up to
+ greatest common ancestor of base and dest)
+ -d --dest rebase onto the specified changeset
--collapse collapse the rebased changesets
--keep keep original changesets
--keepbranches keep original branch names
@@ -89,22 +179,52 @@
% Use source and base
hg rebase: cannot specify both a revision and a base
-hg rebase [-s REV | -b REV] [-d REV] [--collapse] [--detach] [--keep] [--keepbranches] | [-c] | [-a]
+hg rebase [-s REV | -b REV] [-d REV] [options]
+hg rebase {-a|-c}
move changeset (and descendants) to a different branch
Rebase uses repeated merging to graft changesets from one part of history
- onto another. This can be useful for linearizing local changes relative to
- a master development tree.
+ (the source) onto another (the destination). This can be useful for
+ linearizing local changes relative to a master development tree.
+
+ If you don't specify a destination changeset ("-d/--dest"), rebase uses
+ the tipmost head of the current named branch as the destination. (The
+ destination changeset is not modified by rebasing, but new changesets are
+ added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a "source"
+ changeset or as a "base" changeset. Both are shorthand for a topologically
+ related set of changesets (the "source branch"). If you specify source
+ ("-s/--source"), rebase will rebase that changeset and all of its
+ descendants onto dest. If you specify base ("-b/--base"), rebase will
+ select ancestors of base back to but not including the common ancestor
+ with dest. Thus, "-b" is less precise but more convenient than "-s": you
+ can specify any changeset in the source branch, and rebase will select the
+ whole branch. If you specify neither "-s" nor "-b", rebase uses the parent
+ of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch as
+ descendants of dest and then destroys the originals. Use "--keep" to
+ preserve the original source changesets. Some changesets in the source
+ branch (e.g. merges from the destination branch) may be dropped if they no
+ longer contribute any change.
+
+ One result of the rules for selecting the destination changeset and source
+ branch is that, unlike "merge", rebase will do nothing if you are at the
+ latest (tipmost) head of a named branch with two heads. You need to
+ explicitly specify source and/or destination (or "update" to the other
+ head, if it's the head of the intended source branch).
If a rebase is interrupted to manually resolve a merge, it can be
continued with --continue/-c or aborted with --abort/-a.
options:
- -s --source rebase from a given revision
- -b --base rebase from the base of a given revision
- -d --dest rebase onto a given revision
+ -s --source rebase from the specified changeset
+ -b --base rebase from the base of the specified changeset (up to
+ greatest common ancestor of base and dest)
+ -d --dest rebase onto the specified changeset
--collapse collapse the rebased changesets
--keep keep original changesets
--keepbranches keep original branch names
--- a/tests/test-rebase-pull.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-rebase-pull.out Tue Mar 16 11:37:14 2010 -0300
@@ -10,7 +10,6 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
-(run 'hg heads' to see heads, 'hg merge' to merge)
saving bundle to
adding branch
adding changesets
@@ -39,8 +38,8 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
-(run 'hg update' to get a working copy)
nothing to rebase
+1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@ 2
|
--- a/tests/test-record.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-record.out Tue Mar 16 11:37:14 2010 -0300
@@ -39,7 +39,8 @@
% select no files
diff --git a/empty-rw b/empty-rw
new file mode 100644
-examine changes to 'empty-rw'? [Ynsfdaq?] no changes to record
+examine changes to 'empty-rw'? [Ynsfdaq?]
+no changes to record
changeset: -1:000000000000
tag: tip
@@ -50,7 +51,8 @@
% select files but no hunks
diff --git a/empty-rw b/empty-rw
new file mode 100644
-examine changes to 'empty-rw'? [Ynsfdaq?] abort: empty commit message
+examine changes to 'empty-rw'? [Ynsfdaq?]
+abort: empty commit message
changeset: -1:000000000000
tag: tip
@@ -62,6 +64,7 @@
diff --git a/empty-rw b/empty-rw
new file mode 100644
examine changes to 'empty-rw'? [Ynsfdaq?]
+
changeset: 0:c0708cf4e46e
tag: tip
user: test
@@ -74,6 +77,7 @@
rename from empty-rw
rename to empty-rename
examine changes to 'empty-rw' and 'empty-rename'? [Ynsfdaq?]
+
changeset: 1:d695e8dcb197
tag: tip
user: test
@@ -86,6 +90,7 @@
copy from empty-rename
copy to empty-copy
examine changes to 'empty-rename' and 'empty-copy'? [Ynsfdaq?]
+
changeset: 2:1d4b90bea524
tag: tip
user: test
@@ -97,6 +102,7 @@
diff --git a/empty-copy b/empty-copy
deleted file mode 100644
examine changes to 'empty-copy'? [Ynsfdaq?]
+
changeset: 3:b39a238f01a1
tag: tip
user: test
@@ -110,6 +116,7 @@
new file mode 100644
this is a binary file
examine changes to 'tip.bundle'? [Ynsfdaq?]
+
changeset: 4:ad816da3711e
tag: tip
user: test
@@ -124,6 +131,7 @@
diff --git a/tip.bundle b/tip.bundle
this modifies a binary file (all or nothing)
examine changes to 'tip.bundle'? [Ynsfdaq?]
+
changeset: 5:dccd6f3eb485
tag: tip
user: test
@@ -140,6 +148,7 @@
rename to top.bundle
this modifies a binary file (all or nothing)
examine changes to 'tip.bundle' and 'top.bundle'? [Ynsfdaq?]
+
changeset: 6:7fa44105f5b3
tag: tip
user: test
@@ -155,6 +164,7 @@
diff --git a/plain b/plain
new file mode 100644
examine changes to 'plain'? [Ynsfdaq?]
+
changeset: 7:11fb457c1be4
tag: tip
user: test
@@ -179,46 +189,55 @@
% modify end of plain file
diff --git a/plain b/plain
1 hunks, 1 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -8,3 +8,4 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -8,3 +8,4 @@
8
9
10
+11
-record this change to 'plain'? [Ynsfdaq?] % modify end of plain file, no EOL
+record this change to 'plain'? [Ynsfdaq?]
+% modify end of plain file, no EOL
diff --git a/plain b/plain
1 hunks, 1 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,4 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -9,3 +9,4 @@
9
10
11
+7264f99c5f5ff3261504828afa4fb4d406c3af54
\ No newline at end of file
-record this change to 'plain'? [Ynsfdaq?] % modify end of plain file, add EOL
+record this change to 'plain'? [Ynsfdaq?]
+% modify end of plain file, add EOL
diff --git a/plain b/plain
1 hunks, 2 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -9,4 +9,4 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -9,4 +9,4 @@
9
10
11
-7264f99c5f5ff3261504828afa4fb4d406c3af54
\ No newline at end of file
+7264f99c5f5ff3261504828afa4fb4d406c3af54
-record this change to 'plain'? [Ynsfdaq?] % modify beginning, trim end, record both
+record this change to 'plain'? [Ynsfdaq?]
+% modify beginning, trim end, record both
diff --git a/plain b/plain
2 hunks, 4 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -1,4 +1,4 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -1,4 +1,4 @@
-1
+2
2
3
4
-record change 1/2 to 'plain'? [Ynsfdaq?] @@ -8,5 +8,3 @@
+record change 1/2 to 'plain'? [Ynsfdaq?]
+@@ -8,5 +8,3 @@
8
9
10
-11
-7264f99c5f5ff3261504828afa4fb4d406c3af54
record change 2/2 to 'plain'? [Ynsfdaq?]
+
changeset: 11:efca65c9b09e
tag: tip
user: test
@@ -245,7 +264,8 @@
% record end
diff --git a/plain b/plain
2 hunks, 5 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -1,9 +1,6 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -1,9 +1,6 @@
-2
-2
-3
@@ -255,7 +275,8 @@
7
8
9
-record change 1/2 to 'plain'? [Ynsfdaq?] @@ -4,7 +1,7 @@
+record change 1/2 to 'plain'? [Ynsfdaq?]
+@@ -4,7 +1,7 @@
4
5
6
@@ -265,6 +286,7 @@
-10
+10.new
record change 2/2 to 'plain'? [Ynsfdaq?]
+
changeset: 12:7d1e66983c15
tag: tip
user: test
@@ -284,7 +306,8 @@
% record beginning
diff --git a/plain b/plain
1 hunks, 3 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,3 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -1,6 +1,3 @@
-2
-2
-3
@@ -292,6 +315,7 @@
5
6
record this change to 'plain'? [Ynsfdaq?]
+
changeset: 13:a09fc62a0e61
tag: tip
user: test
@@ -313,7 +337,8 @@
% record end
diff --git a/plain b/plain
2 hunks, 4 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -1,6 +1,9 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -1,6 +1,9 @@
+1
+2
+3
@@ -323,7 +348,8 @@
7
8
9
-record change 1/2 to 'plain'? [Ynsfdaq?] @@ -1,7 +4,6 @@
+record change 1/2 to 'plain'? [Ynsfdaq?]
+@@ -1,7 +4,6 @@
4
5
6
@@ -331,17 +357,20 @@
8
9
-10.new
-record change 2/2 to 'plain'? [Ynsfdaq?] % add to beginning, middle, end
+record change 2/2 to 'plain'? [Ynsfdaq?]
+% add to beginning, middle, end
% record beginning, middle
diff --git a/plain b/plain
3 hunks, 7 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -1,2 +1,5 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -1,2 +1,5 @@
+1
+2
+3
4
5
-record change 1/3 to 'plain'? [Ynsfdaq?] @@ -1,6 +4,8 @@
+record change 1/3 to 'plain'? [Ynsfdaq?]
+@@ -1,6 +4,8 @@
4
5
+5.new
@@ -350,7 +379,8 @@
7
8
9
-record change 2/3 to 'plain'? [Ynsfdaq?] @@ -3,4 +8,6 @@
+record change 2/3 to 'plain'? [Ynsfdaq?]
+@@ -3,4 +8,6 @@
6
7
8
@@ -358,6 +388,7 @@
+10
+11
record change 3/3 to 'plain'? [Ynsfdaq?]
+
changeset: 15:7d137997f3a6
tag: tip
user: test
@@ -382,13 +413,15 @@
% record end
diff --git a/plain b/plain
1 hunks, 2 lines changed
-examine changes to 'plain'? [Ynsfdaq?] @@ -9,3 +9,5 @@
+examine changes to 'plain'? [Ynsfdaq?]
+@@ -9,3 +9,5 @@
7
8
9
+10
+11
record this change to 'plain'? [Ynsfdaq?]
+
changeset: 16:4959e3ff13eb
tag: tip
user: test
@@ -408,10 +441,12 @@
adding subdir/a
diff --git a/subdir/a b/subdir/a
1 hunks, 1 lines changed
-examine changes to 'subdir/a'? [Ynsfdaq?] @@ -1,1 +1,2 @@
+examine changes to 'subdir/a'? [Ynsfdaq?]
+@@ -1,1 +1,2 @@
a
+a
record this change to 'subdir/a'? [Ynsfdaq?]
+
changeset: 18:40698cd490b2
tag: tip
user: test
@@ -428,7 +463,8 @@
% help, quit
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] y - record this change
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+y - record this change
n - skip this change
s - skip remaining changes to this file
f - record remaining changes to this file
@@ -436,31 +472,38 @@
a - record all changes to all remaining files
q - quit, recording no changes
? - display help
-examine changes to 'subdir/f1'? [Ynsfdaq?] abort: user quit
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+abort: user quit
% skip
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] diff --git a/subdir/f2 b/subdir/f2
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
examine changes to 'subdir/f2'? [Ynsfdaq?] abort: response expected
% no
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] diff --git a/subdir/f2 b/subdir/f2
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
examine changes to 'subdir/f2'? [Ynsfdaq?] abort: response expected
% f, quit
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] diff --git a/subdir/f2 b/subdir/f2
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
-examine changes to 'subdir/f2'? [Ynsfdaq?] abort: user quit
+examine changes to 'subdir/f2'? [Ynsfdaq?]
+abort: user quit
% s, all
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] diff --git a/subdir/f2 b/subdir/f2
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+diff --git a/subdir/f2 b/subdir/f2
1 hunks, 1 lines changed
examine changes to 'subdir/f2'? [Ynsfdaq?]
+
changeset: 20:d2d8c25276a8
tag: tip
user: test
@@ -478,6 +521,7 @@
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
examine changes to 'subdir/f1'? [Ynsfdaq?]
+
changeset: 21:1013f51ce32f
tag: tip
user: test
@@ -496,11 +540,13 @@
old mode 100644
new mode 100755
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] @@ -1,2 +1,3 @@
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+@@ -1,2 +1,3 @@
a
a
+a
record this change to 'subdir/f1'? [Ynsfdaq?]
+
changeset: 22:5df857735621
tag: tip
user: test
@@ -520,12 +566,14 @@
% preserve execute permission on original
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] @@ -1,3 +1,4 @@
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+@@ -1,3 +1,4 @@
a
a
a
+b
record this change to 'subdir/f1'? [Ynsfdaq?]
+
changeset: 23:a4ae36a78715
tag: tip
user: test
@@ -546,12 +594,14 @@
old mode 100755
new mode 100644
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] @@ -2,3 +2,4 @@
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+@@ -2,3 +2,4 @@
a
a
b
+c
record this change to 'subdir/f1'? [Ynsfdaq?]
+
changeset: 24:1460f6e47966
tag: tip
user: test
@@ -572,12 +622,14 @@
% with win32ext
diff --git a/subdir/f1 b/subdir/f1
1 hunks, 1 lines changed
-examine changes to 'subdir/f1'? [Ynsfdaq?] @@ -3,3 +3,4 @@
+examine changes to 'subdir/f1'? [Ynsfdaq?]
+@@ -3,3 +3,4 @@
a
b
c
+d
record this change to 'subdir/f1'? [Ynsfdaq?]
+
changeset: 25:5bacc1f6e9cf
tag: tip
user: test
--- a/tests/test-relink.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-relink.out Tue Mar 16 11:37:14 2010 -0300
@@ -17,7 +17,7 @@
not linkable: 00manifest.i
not linkable: data/b.i
pruned down to 2 probably relinkable files
-relink: data/a.i 1/2 files (50.00%)
+relinking: data/a.i 1/2 files (50.00%)
not linkable: data/dummy.i
relinked 1 files (136 bytes reclaimed)
% check hardlinks
--- a/tests/test-serve Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-serve Tue Mar 16 11:37:14 2010 -0300
@@ -2,8 +2,10 @@
hgserve()
{
- hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -E errors.log -v $@ \
- | sed -e 's/:[0-9][0-9]*//g' -e 's/http:\/\/[^/]*\//http:\/\/localhost\//'
+ hg serve -a localhost -d --pid-file=hg.pid -E errors.log -v $@ \
+ | sed -e "s/:$HGPORT1\\([^0-9]\\)/:HGPORT1\1/g" \
+ -e "s/:$HGPORT2\\([^0-9]\\)/:HGPORT2\1/g" \
+ -e 's/http:\/\/[^/]*\//http:\/\/localhost\//'
cat hg.pid >> "$DAEMON_PIDS"
echo % errors
cat errors.log
@@ -17,6 +19,7 @@
echo '[web]' > .hg/hgrc
echo 'accesslog = access.log' >> .hg/hgrc
+echo "port = $HGPORT1" >> .hg/hgrc
echo % Without -v
hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid -E errors.log
@@ -30,6 +33,9 @@
echo % With -v
hgserve
+echo % With -v and -p HGPORT2
+hgserve -p "$HGPORT2"
+
echo % With --prefix foo
hgserve --prefix foo
--- a/tests/test-serve.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-serve.out Tue Mar 16 11:37:14 2010 -0300
@@ -2,17 +2,20 @@
access log created - .hg/hgrc respected
% errors
% With -v
-listening at http://localhost/ (bound to 127.0.0.1)
+listening at http://localhost/ (bound to 127.0.0.1:HGPORT1)
+% errors
+% With -v and -p HGPORT2
+listening at http://localhost/ (bound to 127.0.0.1:HGPORT2)
% errors
% With --prefix foo
-listening at http://localhost/foo/ (bound to 127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
% errors
% With --prefix /foo
-listening at http://localhost/foo/ (bound to 127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
% errors
% With --prefix foo/
-listening at http://localhost/foo/ (bound to 127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
% errors
% With --prefix /foo/
-listening at http://localhost/foo/ (bound to 127.0.0.1)
+listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
% errors
--- a/tests/test-subrepo Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-subrepo Tue Mar 16 11:37:14 2010 -0300
@@ -78,7 +78,7 @@
echo % clone
cd ..
-hg clone t tc
+hg clone t tc | sed 's|from .*/sub|from .../sub|g'
cd tc
hg debugsub
@@ -102,7 +102,8 @@
echo % pull
cd ../tc
hg pull | sed 's/ .*sub/ ...sub/g'
-hg up # should pull t
+# should pull t
+hg up | sed 's|from .*/sub|from .../sub|g'
cat t/t
echo % bogus subrepo path aborts
--- a/tests/test-subrepo.out Sat Mar 06 10:02:45 2010 +0100
+++ b/tests/test-subrepo.out Tue Mar 16 11:37:14 2010 -0300
@@ -108,19 +108,19 @@
>>>>>>> other
% clone
updating to branch default
-pulling subrepo s
+pulling subrepo s from .../sub/t/s
requesting all changes
adding changesets
adding manifests
adding file changes
added 4 changesets with 5 changes to 3 files
-pulling subrepo ss
+pulling subrepo ss from .../sub/t/s/ss
requesting all changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
-pulling subrepo t
+pulling subrepo t from .../sub/t/t
requesting all changes
adding changesets
adding manifests
@@ -197,7 +197,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
(run 'hg update' to get a working copy)
-pulling subrepo t
+pulling subrepo t from .../sub/t/t
searching for changes
adding changesets
adding manifests
@@ -263,6 +263,6 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
[paths]
-default = $HGTMP/test-subrepo/sub/mercurial/main/nested_absolute
+default = $HGTMP/test-subrepo/sub/mercurial/nested_absolute
[paths]
-default = $HGTMP/test-subrepo/sub/mercurial/main/nested_relative
+default = $HGTMP/test-subrepo/sub/mercurial/main/../nested_relative