Merge histedit fixes from stable.
--- a/contrib/casesmash.py Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/casesmash.py Tue Jul 16 11:13:18 2013 -0500
@@ -1,4 +1,4 @@
-import sys, os, __builtin__
+import os, __builtin__
from mercurial import util
def lowerwrap(scope, funcname):
@@ -7,7 +7,7 @@
d, base = os.path.split(fname)
try:
files = os.listdir(d or '.')
- except OSError, inst:
+ except OSError:
files = []
if base in files:
return f(fname, *args, **kwargs)
--- a/contrib/check-code.py Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/check-code.py Tue Jul 16 11:13:18 2013 -0500
@@ -10,6 +10,20 @@
import re, glob, os, sys
import keyword
import optparse
+try:
+ import re2
+except ImportError:
+ re2 = None
+
+def compilere(pat, multiline=False):
+ if multiline:
+ pat = '(?m)' + pat
+ if re2:
+ try:
+ return re2.compile(pat)
+ except re2.error:
+ pass
+ return re.compile(pat)
def repquote(m):
t = re.sub(r"\w", "x", m.group('text'))
@@ -54,8 +68,8 @@
(r'head -c', "don't use 'head -c', use 'dd'"),
(r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
(r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
- (r'printf.*\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
- (r'printf.*\\x', "don't use printf \\x, use Python"),
+ (r'printf.*[^\\]\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
+ (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
(r'\$\(.*\)', "don't use $(expr), use `expr`"),
(r'rm -rf \*', "don't use naked rm -rf, target a directory"),
(r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
@@ -109,6 +123,16 @@
(r'^ changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
winglobmsg),
(r'^ pulling from \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ reverting .*/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ cloning subrepo \S+/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ pushing to \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ pushing subrepo \S+/\S+ to.*[^)]$', winglobmsg,
+ '\$TESTTMP/unix-repo$'),
+ (r'^ moving \S+/.*[^)]$', winglobmsg),
+ (r'^ no changes made to subrepo since.*/.*[^)]$',
+ winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ .*: largefile \S+ not available from file:.*/.*[^)]$',
+ winglobmsg, '\$TESTTMP/unix-repo$'),
],
# warnings
[
@@ -307,6 +331,24 @@
('txt', r'.*\.txt$', txtfilters, txtpats),
]
+def _preparepats():
+ for c in checks:
+ failandwarn = c[-1]
+ for pats in failandwarn:
+ for i, pseq in enumerate(pats):
+ # fix-up regexes for multi-line searches
+ p = pseq[0]
+ # \s doesn't match \n
+ p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
+ # [^...] doesn't match newline
+ p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
+
+ pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:]
+ filters = c[2]
+ for i, flt in enumerate(filters):
+ filters[i] = re.compile(flt[0]), flt[1]
+_preparepats()
+
class norepeatlogger(object):
def __init__(self):
self._lastseen = None
@@ -368,9 +410,9 @@
fp = open(f)
pre = post = fp.read()
fp.close()
- if "no-" + "check-code" in pre:
+ if "no-" "check-code" in pre:
if debug:
- print "Skipping %s for %s it has no- and check-code" % (
+ print "Skipping %s for %s it has no-" " check-code" % (
name, f)
break
for p, r in filters:
@@ -393,18 +435,9 @@
p, msg = pat
ignore = None
- # fix-up regexes for multi-line searches
- po = p
- # \s doesn't match \n
- p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
- # [^...] doesn't match newline
- p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
-
- #print po, '=>', p
-
pos = 0
n = 0
- for m in re.finditer(p, post, re.MULTILINE):
+ for m in p.finditer(post):
if prelines is None:
prelines = pre.splitlines()
postlines = post.splitlines(True)
@@ -418,9 +451,9 @@
n += 1
l = prelines[n]
- if "check-code" + "-ignore" in l:
+ if "check-code" "-ignore" in l:
if debug:
- print "Skipping %s for %s:%s (check-code -ignore)" % (
+ print "Skipping %s for %s:%s (check-code" "-ignore)" % (
name, f, n)
continue
elif ignore and re.search(ignore, l, re.MULTILINE):
--- a/contrib/hgfixes/fix_leftover_imports.py Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/hgfixes/fix_leftover_imports.py Tue Jul 16 11:13:18 2013 -0500
@@ -48,7 +48,6 @@
mod_list = ' | '.join(["'%s' '.' ('%s')" %
(key, "' | '".join(packages[key])) for key in packages])
mod_list = '(' + mod_list + ' )'
- bare_names = alternates(mapping.keys())
yield """name_import=import_name< 'import' module_name=dotted_name< %s > >
""" % mod_list
--- a/contrib/perf.py Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/perf.py Tue Jul 16 11:13:18 2013 -0500
@@ -1,7 +1,7 @@
# perf.py - performance test routines
'''helper extension to measure performance'''
-from mercurial import cmdutil, scmutil, util, match, commands, obsolete
+from mercurial import cmdutil, scmutil, util, commands, obsolete
from mercurial import repoview, branchmap, merge, copies
import time, os, sys
@@ -45,6 +45,11 @@
except Exception:
timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
+@command('perfannotate')
+def perfannotate(ui, repo, f):
+ fc = repo['.'][f]
+ timer(lambda: len(fc.annotate(True)))
+
@command('perfstatus',
[('u', 'unknown', False,
'ask status to look for unknown files')])
@@ -170,7 +175,7 @@
def perfmanifest(ui, repo):
def d():
t = repo.manifest.tip()
- m = repo.manifest.read(t)
+ repo.manifest.read(t)
repo.manifest.mapcache = None
repo.manifest._cache = None
timer(d)
@@ -179,7 +184,7 @@
def perfchangeset(ui, repo, rev):
n = repo[rev].node()
def d():
- c = repo.changelog.read(n)
+ repo.changelog.read(n)
#repo.changelog._cache = None
timer(d)
--- a/contrib/setup3k.py Mon Jul 15 15:04:42 2013 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,373 +0,0 @@
-#
-# This is an experimental py3k-enabled mercurial setup script.
-#
-# 'python setup.py install', or
-# 'python setup.py --help' for more options
-
-from distutils.command.build_py import build_py_2to3
-from lib2to3.refactor import get_fixers_from_package as getfixers
-
-import sys
-if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
- raise SystemExit("Mercurial requires Python 2.4 or later.")
-
-if sys.version_info[0] >= 3:
- def b(s):
- '''A helper function to emulate 2.6+ bytes literals using string
- literals.'''
- return s.encode('latin1')
-else:
- def b(s):
- '''A helper function to emulate 2.6+ bytes literals using string
- literals.'''
- return s
-
-# Solaris Python packaging brain damage
-try:
- import hashlib
- sha = hashlib.sha1()
-except ImportError:
- try:
- import sha
- except ImportError:
- raise SystemExit(
- "Couldn't import standard hashlib (incomplete Python install).")
-
-try:
- import zlib
-except ImportError:
- raise SystemExit(
- "Couldn't import standard zlib (incomplete Python install).")
-
-try:
- import bz2
-except ImportError:
- raise SystemExit(
- "Couldn't import standard bz2 (incomplete Python install).")
-
-import os, subprocess, time
-import shutil
-import tempfile
-from distutils import log
-from distutils.core import setup, Extension
-from distutils.dist import Distribution
-from distutils.command.build import build
-from distutils.command.build_ext import build_ext
-from distutils.command.build_py import build_py
-from distutils.spawn import spawn, find_executable
-from distutils.ccompiler import new_compiler
-from distutils.errors import CCompilerError
-
-scripts = ['hg']
-if os.name == 'nt':
- scripts.append('contrib/win32/hg.bat')
-
-# simplified version of distutils.ccompiler.CCompiler.has_function
-# that actually removes its temporary files.
-def hasfunction(cc, funcname):
- tmpdir = tempfile.mkdtemp(prefix='hg-install-')
- devnull = oldstderr = None
- try:
- try:
- fname = os.path.join(tmpdir, 'funcname.c')
- f = open(fname, 'w')
- f.write('int main(void) {\n')
- f.write(' %s();\n' % funcname)
- f.write('}\n')
- f.close()
- # Redirect stderr to /dev/null to hide any error messages
- # from the compiler.
- # This will have to be changed if we ever have to check
- # for a function on Windows.
- devnull = open('/dev/null', 'w')
- oldstderr = os.dup(sys.stderr.fileno())
- os.dup2(devnull.fileno(), sys.stderr.fileno())
- objects = cc.compile([fname], output_dir=tmpdir)
- cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
- except Exception:
- return False
- return True
- finally:
- if oldstderr is not None:
- os.dup2(oldstderr, sys.stderr.fileno())
- if devnull is not None:
- devnull.close()
- shutil.rmtree(tmpdir)
-
-# py2exe needs to be installed to work
-try:
- import py2exe
- py2exeloaded = True
-
- # Help py2exe to find win32com.shell
- try:
- import modulefinder
- import win32com
- for p in win32com.__path__[1:]: # Take the path to win32comext
- modulefinder.AddPackagePath("win32com", p)
- pn = "win32com.shell"
- __import__(pn)
- m = sys.modules[pn]
- for p in m.__path__[1:]:
- modulefinder.AddPackagePath(pn, p)
- except ImportError:
- pass
-
-except ImportError:
- py2exeloaded = False
- pass
-
-def runcmd(cmd, env):
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, env=env)
- out, err = p.communicate()
- # If root is executing setup.py, but the repository is owned by
- # another user (as in "sudo python setup.py install") we will get
- # trust warnings since the .hg/hgrc file is untrusted. That is
- # fine, we don't want to load it anyway. Python may warn about
- # a missing __init__.py in mercurial/locale, we also ignore that.
- err = [e for e in err.splitlines()
- if not e.startswith(b('Not trusting file')) \
- and not e.startswith(b('warning: Not importing'))]
- if err:
- return ''
- return out
-
-version = ''
-
-if os.path.isdir('.hg'):
- # Execute hg out of this directory with a custom environment which
- # includes the pure Python modules in mercurial/pure. We also take
- # care to not use any hgrc files and do no localization.
- pypath = ['mercurial', os.path.join('mercurial', 'pure')]
- env = {'PYTHONPATH': os.pathsep.join(pypath),
- 'HGRCPATH': '',
- 'LANGUAGE': 'C'}
- if 'LD_LIBRARY_PATH' in os.environ:
- env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
- if 'SystemRoot' in os.environ:
- # Copy SystemRoot into the custom environment for Python 2.6
- # under Windows. Otherwise, the subprocess will fail with
- # error 0xc0150004. See: http://bugs.python.org/issue3440
- env['SystemRoot'] = os.environ['SystemRoot']
- cmd = [sys.executable, 'hg', 'id', '-i', '-t']
- l = runcmd(cmd, env).split()
- while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
- l.pop()
- if len(l) > 1: # tag found
- version = l[-1]
- if l[0].endswith('+'): # propagate the dirty status to the tag
- version += '+'
- elif len(l) == 1: # no tag found
- cmd = [sys.executable, 'hg', 'parents', '--template',
- '{latesttag}+{latesttagdistance}-']
- version = runcmd(cmd, env) + l[0]
- if version.endswith('+'):
- version += time.strftime('%Y%m%d')
-elif os.path.exists('.hg_archival.txt'):
- kw = dict([[t.strip() for t in l.split(':', 1)]
- for l in open('.hg_archival.txt')])
- if 'tag' in kw:
- version = kw['tag']
- elif 'latesttag' in kw:
- version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
- else:
- version = kw.get('node', '')[:12]
-
-if version:
- f = open("mercurial/__version__.py", "w")
- f.write('# this file is autogenerated by setup.py\n')
- f.write('version = "%s"\n' % version)
- f.close()
-
-
-try:
- from mercurial import __version__
- version = __version__.version
-except ImportError:
- version = 'unknown'
-
-class hgbuildmo(build):
-
- description = "build translations (.mo files)"
-
- def run(self):
- if not find_executable('msgfmt'):
- self.warn("could not find msgfmt executable, no translations "
- "will be built")
- return
-
- podir = 'i18n'
- if not os.path.isdir(podir):
- self.warn("could not find %s/ directory" % podir)
- return
-
- join = os.path.join
- for po in os.listdir(podir):
- if not po.endswith('.po'):
- continue
- pofile = join(podir, po)
- modir = join('locale', po[:-3], 'LC_MESSAGES')
- mofile = join(modir, 'hg.mo')
- mobuildfile = join('mercurial', mofile)
- cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
- if sys.platform != 'sunos5':
- # msgfmt on Solaris does not know about -c
- cmd.append('-c')
- self.mkpath(join('mercurial', modir))
- self.make_file([pofile], mobuildfile, spawn, (cmd,))
-
-# Insert hgbuildmo first so that files in mercurial/locale/ are found
-# when build_py is run next.
-build.sub_commands.insert(0, ('build_mo', None))
-# We also need build_ext before build_py. Otherwise, when 2to3 is called (in
-# build_py), it will not find osutil & friends, thinking that those modules are
-# global and, consequently, making a mess, now that all module imports are
-# global.
-build.sub_commands.insert(1, ('build_ext', None))
-
-Distribution.pure = 0
-Distribution.global_options.append(('pure', None, "use pure (slow) Python "
- "code instead of C extensions"))
-
-class hgbuildext(build_ext):
-
- def build_extension(self, ext):
- try:
- build_ext.build_extension(self, ext)
- except CCompilerError:
- if getattr(ext, 'optional', False):
- raise
- log.warn("Failed to build optional extension '%s' (skipping)",
- ext.name)
-
-class hgbuildpy(build_py_2to3):
- fixer_names = sorted(set(getfixers("lib2to3.fixes") +
- getfixers("hgfixes")))
-
- def finalize_options(self):
- build_py.finalize_options(self)
-
- if self.distribution.pure:
- if self.py_modules is None:
- self.py_modules = []
- for ext in self.distribution.ext_modules:
- if ext.name.startswith("mercurial."):
- self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
- self.distribution.ext_modules = []
-
- def find_modules(self):
- modules = build_py.find_modules(self)
- for module in modules:
- if module[0] == "mercurial.pure":
- if module[1] != "__init__":
- yield ("mercurial", module[1], module[2])
- else:
- yield module
-
- def run(self):
- # In the build_py_2to3 class, self.updated_files = [], but I couldn't
- # see when that variable was updated to point to the updated files, as
- # its names suggests. Thus, I decided to just find_all_modules and feed
- # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
- # incur in 2to3 analysis overhead.
- self.updated_files = [i[2] for i in self.find_all_modules()]
-
- # Base class code
- if self.py_modules:
- self.build_modules()
- if self.packages:
- self.build_packages()
- self.build_package_data()
-
- # 2to3
- self.run_2to3(self.updated_files)
-
- # Remaining base class code
- self.byte_compile(self.get_outputs(include_bytecode=0))
-
-cmdclass = {'build_mo': hgbuildmo,
- 'build_ext': hgbuildext,
- 'build_py': hgbuildpy}
-
-packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
- 'hgext.highlight', 'hgext.zeroconf']
-
-pymodules = []
-
-extmodules = [
- Extension('mercurial.base85', ['mercurial/base85.c']),
- Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
- Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
- Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
- Extension('mercurial.parsers', ['mercurial/parsers.c']),
- ]
-
-# disable osutil.c under windows + python 2.4 (issue1364)
-if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
- pymodules.append('mercurial.pure.osutil')
-else:
- extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
-
-if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
- # The inotify extension is only usable with Linux 2.6 kernels.
- # You also need a reasonably recent C library.
- # In any case, if it fails to build the error will be skipped ('optional').
- cc = new_compiler()
- if hasfunction(cc, 'inotify_add_watch'):
- inotify = Extension('hgext.inotify.linux._inotify',
- ['hgext/inotify/linux/_inotify.c'],
- ['mercurial'])
- inotify.optional = True
- extmodules.append(inotify)
- packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
-
-packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
- 'help/*.txt']}
-
-def ordinarypath(p):
- return p and p[0] != '.' and p[-1] != '~'
-
-for root in ('templates',):
- for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
- curdir = curdir.split(os.sep, 1)[1]
- dirs[:] = filter(ordinarypath, dirs)
- for f in filter(ordinarypath, files):
- f = os.path.join(curdir, f)
- packagedata['mercurial'].append(f)
-
-datafiles = []
-setupversion = version
-extra = {}
-
-if py2exeloaded:
- extra['console'] = [
- {'script':'hg',
- 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
- 'product_version':version}]
-
-if os.name == 'nt':
- # Windows binary file versions for exe/dll files must have the
- # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
- setupversion = version.split('+', 1)[0]
-
-setup(name='mercurial',
- version=setupversion,
- author='Matt Mackall',
- author_email='mpm@selenic.com',
- url='http://mercurial.selenic.com/',
- description='Scalable distributed SCM',
- license='GNU GPLv2+',
- scripts=scripts,
- packages=packages,
- py_modules=pymodules,
- ext_modules=extmodules,
- data_files=datafiles,
- package_data=packagedata,
- cmdclass=cmdclass,
- options=dict(py2exe=dict(packages=['hgext', 'email']),
- bdist_mpkg=dict(zipdist=True,
- license='COPYING',
- readme='contrib/macosx/Readme.html',
- welcome='contrib/macosx/Welcome.html')),
- **extra)
--- a/contrib/shrink-revlog.py Mon Jul 15 15:04:42 2013 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,294 +0,0 @@
-"""reorder a revlog (the manifest by default) to save space
-
-Specifically, this topologically sorts the revisions in the revlog so that
-revisions on the same branch are adjacent as much as possible. This is a
-workaround for the fact that Mercurial computes deltas relative to the
-previous revision rather than relative to a parent revision.
-
-This is *not* safe to run on a changelog.
-"""
-
-# Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
-# as a patch to rewrite-log. Cleaned up, refactored, documented, and
-# renamed by Greg Ward <greg at gerg.ca>.
-
-# XXX would be nice to have a way to verify the repository after shrinking,
-# e.g. by comparing "before" and "after" states of random changesets
-# (maybe: export before, shrink, export after, diff).
-
-import os, errno
-from mercurial import revlog, transaction, node, util, scmutil
-from mercurial import changegroup
-from mercurial.i18n import _
-
-
-def postorder(start, edges):
- result = []
- visit = list(start)
- finished = set()
-
- while visit:
- cur = visit[-1]
- for p in edges[cur]:
- # defend against node.nullrev because it's occasionally
- # possible for a node to have parents (null, something)
- # rather than (something, null)
- if p not in finished and p != node.nullrev:
- visit.append(p)
- break
- else:
- result.append(cur)
- finished.add(cur)
- visit.pop()
-
- return result
-
-def toposort_reversepostorder(ui, rl):
- # postorder of the reverse directed graph
-
- # map rev to list of parent revs (p2 first)
- parents = {}
- heads = set()
- ui.status(_('reading revs\n'))
- try:
- for rev in rl:
- ui.progress(_('reading'), rev, total=len(rl))
- (p1, p2) = rl.parentrevs(rev)
- if p1 == p2 == node.nullrev:
- parents[rev] = () # root node
- elif p1 == p2 or p2 == node.nullrev:
- parents[rev] = (p1,) # normal node
- else:
- parents[rev] = (p2, p1) # merge node
- heads.add(rev)
- for p in parents[rev]:
- heads.discard(p)
- finally:
- ui.progress(_('reading'), None)
-
- heads = list(heads)
- heads.sort(reverse=True)
-
- ui.status(_('sorting revs\n'))
- return postorder(heads, parents)
-
-def toposort_postorderreverse(ui, rl):
- # reverse-postorder of the reverse directed graph
-
- children = {}
- roots = set()
- ui.status(_('reading revs\n'))
- try:
- for rev in rl:
- ui.progress(_('reading'), rev, total=len(rl))
- (p1, p2) = rl.parentrevs(rev)
- if p1 == p2 == node.nullrev:
- roots.add(rev)
- children[rev] = []
- if p1 != node.nullrev:
- children[p1].append(rev)
- if p2 != node.nullrev:
- children[p2].append(rev)
- finally:
- ui.progress(_('reading'), None)
-
- roots = list(roots)
- roots.sort()
-
- ui.status(_('sorting revs\n'))
- result = postorder(roots, children)
- result.reverse()
- return result
-
-def writerevs(ui, r1, r2, order, tr):
-
- ui.status(_('writing revs\n'))
-
-
- order = [r1.node(r) for r in order]
-
- # this is a bit ugly, but it works
- count = [0]
- def lookup(revl, x):
- count[0] += 1
- ui.progress(_('writing'), count[0], total=len(order))
- return "%020d" % revl.linkrev(revl.rev(x))
-
- unlookup = lambda x: int(x, 10)
-
- try:
- bundler = changegroup.bundle10(lookup)
- group = util.chunkbuffer(r1.group(order, bundler))
- group = changegroup.unbundle10(group, "UN")
- r2.addgroup(group, unlookup, tr)
- finally:
- ui.progress(_('writing'), None)
-
-def report(ui, r1, r2):
- def getsize(r):
- s = 0
- for fn in (r.indexfile, r.datafile):
- try:
- s += os.stat(fn).st_size
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- return s
-
- oldsize = float(getsize(r1))
- newsize = float(getsize(r2))
-
- # argh: have to pass an int to %d, because a float >= 2^32
- # blows up under Python 2.5 or earlier
- ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
- % (int(oldsize), oldsize / 1024 / 1024))
- ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
- % (int(newsize), newsize / 1024 / 1024))
-
- shrink_percent = (oldsize - newsize) / oldsize * 100
- shrink_factor = oldsize / newsize
- ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
- % (shrink_percent, shrink_factor))
-
-def shrink(ui, repo, **opts):
- """shrink a revlog by reordering revisions
-
- Rewrites all the entries in some revlog of the current repository
- (by default, the manifest log) to save space.
-
- Different sort algorithms have different performance
- characteristics. Use ``--sort`` to select a sort algorithm so you
- can determine which works best for your data.
- """
-
- if not repo.local():
- raise util.Abort(_('not a local repository: %s') % repo.root)
-
- fn = opts.get('revlog')
- if not fn:
- indexfn = repo.sjoin('00manifest.i')
- else:
- if not fn.endswith('.i'):
- raise util.Abort(_('--revlog option must specify the revlog index '
- 'file (*.i), not %s') % opts.get('revlog'))
-
- indexfn = os.path.realpath(fn)
- store = repo.sjoin('')
- if not indexfn.startswith(store):
- raise util.Abort(_('--revlog option must specify a revlog in %s, '
- 'not %s') % (store, indexfn))
-
- sortname = opts['sort']
- try:
- toposort = globals()['toposort_' + sortname]
- except KeyError:
- raise util.Abort(_('no such toposort algorithm: %s') % sortname)
-
- if not os.path.exists(indexfn):
- raise util.Abort(_('no such file: %s') % indexfn)
- if '00changelog' in indexfn:
- raise util.Abort(_('shrinking the changelog '
- 'will corrupt your repository'))
-
- ui.write(_('shrinking %s\n') % indexfn)
- tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
-
- r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
- r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
-
- datafn, tmpdatafn = r1.datafile, r2.datafile
-
- oldindexfn = indexfn + '.old'
- olddatafn = datafn + '.old'
- if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
- raise util.Abort(_('one or both of\n'
- ' %s\n'
- ' %s\n'
- 'exists from a previous run; please clean up '
- 'before running again') % (oldindexfn, olddatafn))
-
- # Don't use repo.transaction(), because then things get hairy with
- # paths: some need to be relative to .hg, and some need to be
- # absolute. Doing it this way keeps things simple: everything is an
- # absolute path.
- lock = repo.lock(wait=False)
- tr = transaction.transaction(ui.warn,
- open,
- repo.sjoin('journal'))
-
- def ignoremissing(func):
- def f(*args, **kw):
- try:
- return func(*args, **kw)
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- return f
-
- try:
- try:
- order = toposort(ui, r1)
-
- suboptimal = 0
- for i in xrange(1, len(order)):
- parents = [p for p in r1.parentrevs(order[i])
- if p != node.nullrev]
- if parents and order[i - 1] not in parents:
- suboptimal += 1
- ui.note(_('%d suboptimal nodes\n') % suboptimal)
-
- writerevs(ui, r1, r2, order, tr)
- report(ui, r1, r2)
- tr.close()
- except: # re-raises
- # Abort transaction first, so we truncate the files before
- # deleting them.
- tr.abort()
- for fn in (tmpindexfn, tmpdatafn):
- ignoremissing(os.unlink)(fn)
- raise
- if not opts.get('dry_run'):
- # racy, both files cannot be renamed atomically
- # copy files
- util.oslink(indexfn, oldindexfn)
- ignoremissing(util.oslink)(datafn, olddatafn)
-
- # rename
- util.rename(tmpindexfn, indexfn)
- try:
- os.chmod(tmpdatafn, os.stat(datafn).st_mode)
- util.rename(tmpdatafn, datafn)
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- ignoremissing(os.unlink)(datafn)
- else:
- for fn in (tmpindexfn, tmpdatafn):
- ignoremissing(os.unlink)(fn)
- finally:
- lock.release()
-
- if not opts.get('dry_run'):
- ui.write(
- _('note: old revlog saved in:\n'
- ' %s\n'
- ' %s\n'
- '(You can delete those files when you are satisfied that your\n'
- 'repository is still sane. '
- 'Running \'hg verify\' is strongly recommended.)\n')
- % (oldindexfn, olddatafn))
-
-cmdtable = {
- 'shrink': (shrink,
- [('', 'revlog', '',
- _('the revlog to shrink (.i)')),
- ('n', 'dry-run', None,
- _('do not shrink, simulate only')),
- ('', 'sort', 'reversepostorder',
- _('name of sort algorithm to use')),
- ],
- _('hg shrink [--revlog PATH]'))
-}
-
-if __name__ == "__main__":
- print "shrink-revlog.py is now an extension (see hg help extensions)"
--- a/contrib/simplemerge Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/simplemerge Tue Jul 16 11:13:18 2013 -0500
@@ -3,7 +3,7 @@
from mercurial import demandimport
demandimport.enable()
-import os, sys
+import sys
from mercurial.i18n import _
from mercurial import simplemerge, fancyopts, util, ui
--- a/contrib/synthrepo.py Mon Jul 15 15:04:42 2013 +0200
+++ b/contrib/synthrepo.py Tue Jul 16 11:13:18 2013 -0500
@@ -36,7 +36,7 @@
'''
import bisect, collections, json, os, random, time, sys
-from mercurial import cmdutil, context, patch, scmutil, url, util, hg
+from mercurial import cmdutil, context, patch, scmutil, util, hg
from mercurial.i18n import _
from mercurial.node import nullrev, nullid
--- a/doc/gendoc.py Mon Jul 15 15:04:42 2013 +0200
+++ b/doc/gendoc.py Tue Jul 16 11:13:18 2013 -0500
@@ -4,10 +4,9 @@
# fall back to pure modules if required C extensions are not available
sys.path.append(os.path.join('..', 'mercurial', 'pure'))
from mercurial import demandimport; demandimport.enable()
-from mercurial import encoding
from mercurial import minirst
from mercurial.commands import table, globalopts
-from mercurial.i18n import _
+from mercurial.i18n import gettext, _
from mercurial.help import helptable
from mercurial import extensions
from mercurial import util
@@ -51,7 +50,7 @@
d['cmd'] = cmds[0]
d['aliases'] = cmd.split("|")[1:]
- d['desc'] = get_desc(attr[0].__doc__)
+ d['desc'] = get_desc(gettext(attr[0].__doc__))
d['opts'] = list(get_opts(attr[1]))
s = 'hg ' + cmds[0]
@@ -74,20 +73,9 @@
ui.write(minirst.section(_("Commands")))
commandprinter(ui, table, minirst.subsection)
- # print topics
- for names, sec, doc in helptable:
- if names[0] == "config":
- # The config help topic is included in the hgrc.5 man
- # page.
- continue
- for name in names:
- ui.write(".. _%s:\n" % name)
- ui.write("\n")
- ui.write(minirst.section(sec))
- if util.safehasattr(doc, '__call__'):
- doc = doc()
- ui.write(doc)
- ui.write("\n")
+ # print help topics
+ # The config help topic is included in the hgrc.5 man page.
+ helpprinter(ui, helptable, minirst.section, exclude=['config'])
ui.write(minirst.section(_("Extensions")))
ui.write(_("This section contains help for extensions that are "
@@ -102,12 +90,28 @@
for extensionname in sorted(allextensionnames()):
mod = extensions.load(None, extensionname, None)
ui.write(minirst.subsection(extensionname))
- ui.write("%s\n\n" % mod.__doc__)
+ ui.write("%s\n\n" % gettext(mod.__doc__))
cmdtable = getattr(mod, 'cmdtable', None)
if cmdtable:
ui.write(minirst.subsubsection(_('Commands')))
commandprinter(ui, cmdtable, minirst.subsubsubsection)
+def helpprinter(ui, helptable, sectionfunc, include=[], exclude=[]):
+ for names, sec, doc in helptable:
+ if exclude and names[0] in exclude:
+ continue
+ if include and names[0] not in include:
+ continue
+ for name in names:
+ ui.write(".. _%s:\n" % name)
+ ui.write("\n")
+ if sectionfunc:
+ ui.write(sectionfunc(sec))
+ if util.safehasattr(doc, '__call__'):
+ doc = doc()
+ ui.write(doc)
+ ui.write("\n")
+
def commandprinter(ui, cmdtable, sectionfunc):
h = {}
for c, attr in cmdtable.items():
--- a/doc/hgmanpage.py Mon Jul 15 15:04:42 2013 +0200
+++ b/doc/hgmanpage.py Tue Jul 16 11:13:18 2013 -0500
@@ -981,7 +981,6 @@
# Level is too low to display:
# raise nodes.SkipNode
attr = {}
- backref_text = ''
if node.hasattr('id'):
attr['name'] = node['id']
if node.hasattr('line'):
--- a/hgext/color.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/color.py Tue Jul 16 11:13:18 2013 -0500
@@ -60,6 +60,11 @@
tags.normal = green
tags.local = black bold
+ rebase.rebased = blue
+ rebase.remaining = red bold
+
+ histedit.remaining = red bold
+
The available effects in terminfo mode are 'blink', 'bold', 'dim',
'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
@@ -154,10 +159,9 @@
"ECMA-48 color\n"))
_terminfo_params = {}
-def _modesetup(ui, opts):
+def _modesetup(ui, coloropt):
global _terminfo_params
- coloropt = opts['color']
auto = coloropt == 'auto'
always = not auto and util.parsebool(coloropt)
if not always and not auto:
@@ -248,8 +252,11 @@
'diff.trailingwhitespace': 'bold red_background',
'diffstat.deleted': 'red',
'diffstat.inserted': 'green',
+ 'histedit.remaining': 'red bold',
'ui.prompt': 'yellow',
'log.changeset': 'yellow',
+ 'rebase.rebased': 'blue',
+ 'rebase.remaining': 'red bold',
'resolve.resolved': 'green bold',
'resolve.unresolved': 'red bold',
'status.added': 'green bold',
@@ -393,11 +400,11 @@
def uisetup(ui):
if ui.plain():
return
- if not issubclass(ui.__class__, colorui):
+ if not isinstance(ui, colorui):
colorui.__bases__ = (ui.__class__,)
ui.__class__ = colorui
def colorcmd(orig, ui_, opts, cmd, cmdfunc):
- mode = _modesetup(ui_, opts)
+ mode = _modesetup(ui_, opts['color'])
colorui._colormode = mode
if mode:
extstyles()
--- a/hgext/convert/common.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/convert/common.py Tue Jul 16 11:13:18 2013 -0500
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import base64, errno, subprocess, os, datetime
+import base64, errno, subprocess, os, datetime, re
import cPickle as pickle
from mercurial import util
from mercurial.i18n import _
@@ -63,6 +63,14 @@
self.encoding = 'utf-8'
+ def checkhexformat(self, revstr):
+ """ fails if revstr is not a 40 byte hex. mercurial and git both uses
+ such format for their revision numbering
+ """
+ if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
+ raise util.Abort(_('splicemap entry %s is not a valid revision'
+ ' identifier') % revstr)
+
def before(self):
pass
@@ -164,6 +172,13 @@
"""
return {}
+ def checkrevformat(self, revstr):
+ """revstr is a string that describes a revision in the given
+ source control system. Return true if revstr has correct
+ format.
+ """
+ return True
+
class converter_sink(object):
"""Conversion sink (target) interface"""
@@ -424,34 +439,6 @@
self.fp.close()
self.fp = None
-def parsesplicemap(path):
- """Parse a splicemap, return a child/parents dictionary."""
- if not path:
- return {}
- m = {}
- try:
- fp = open(path, 'r')
- for i, line in enumerate(fp):
- line = line.splitlines()[0].rstrip()
- if not line:
- # Ignore blank lines
- continue
- try:
- child, parents = line.split(' ', 1)
- parents = parents.replace(',', ' ').split()
- except ValueError:
- raise util.Abort(_('syntax error in %s(%d): child parent1'
- '[,parent2] expected') % (path, i + 1))
- pp = []
- for p in parents:
- if p not in pp:
- pp.append(p)
- m[child] = pp
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- return m
-
def makedatetimestamp(t):
"""Like util.makedate() but for time t instead of current time"""
delta = (datetime.datetime.utcfromtimestamp(t) -
--- a/hgext/convert/convcmd.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/convert/convcmd.py Tue Jul 16 11:13:18 2013 -0500
@@ -15,9 +15,9 @@
from gnuarch import gnuarch_source
from bzr import bzr_source
from p4 import p4_source
-import filemap, common
+import filemap
-import os, shutil
+import os, shutil, shlex
from mercurial import hg, util, encoding
from mercurial.i18n import _
@@ -118,9 +118,53 @@
self.readauthormap(opts.get('authormap'))
self.authorfile = self.dest.authorfile()
- self.splicemap = common.parsesplicemap(opts.get('splicemap'))
+ self.splicemap = self.parsesplicemap(opts.get('splicemap'))
self.branchmap = mapfile(ui, opts.get('branchmap'))
+ def parsesplicemap(self, path):
+ """ check and validate the splicemap format and
+ return a child/parents dictionary.
+ Format checking has two parts.
+ 1. generic format which is same across all source types
+ 2. specific format checking which may be different for
+ different source type. This logic is implemented in
+ checkrevformat function in source files like
+ hg.py, subversion.py etc.
+ """
+
+ if not path:
+ return {}
+ m = {}
+ try:
+ fp = open(path, 'r')
+ for i, line in enumerate(fp):
+ line = line.splitlines()[0].rstrip()
+ if not line:
+ # Ignore blank lines
+ continue
+ # split line
+ lex = shlex.shlex(line, posix=True)
+ lex.whitespace_split = True
+ lex.whitespace += ','
+ line = list(lex)
+ # check number of parents
+ if not (2 <= len(line) <= 3):
+ raise util.Abort(_('syntax error in %s(%d): child parent1'
+ '[,parent2] expected') % (path, i + 1))
+ for part in line:
+ self.source.checkrevformat(part)
+ child, p1, p2 = line[0], line[1:2], line[2:]
+ if p1 == p2:
+ m[child] = p1
+ else:
+ m[child] = p1 + p2
+ # if file does not exist or error reading, exit
+ except IOError:
+ raise util.Abort(_('splicemap file not found or error reading %s:')
+ % path)
+ return m
+
+
def walktree(self, heads):
'''Return a mapping that identifies the uncommitted parents of every
uncommitted changeset.'''
--- a/hgext/convert/git.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/convert/git.py Tue Jul 16 11:13:18 2013 -0500
@@ -296,3 +296,8 @@
pass
return bookmarks
+
+ def checkrevformat(self, revstr):
+ """ git revision string is a 40 byte hex """
+ self.checkhexformat(revstr)
+
--- a/hgext/convert/hg.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/convert/hg.py Tue Jul 16 11:13:18 2013 -0500
@@ -397,3 +397,7 @@
def getbookmarks(self):
return bookmarks.listbookmarks(self.repo)
+
+ def checkrevformat(self, revstr):
+ """ Mercurial, revision string is a 40 byte hex """
+ self.checkhexformat(revstr)
--- a/hgext/convert/subversion.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/convert/subversion.py Tue Jul 16 11:13:18 2013 -0500
@@ -452,6 +452,14 @@
del self.commits[rev]
return commit
+ def checkrevformat(self, revstr):
+ """ fails if revision format does not match the correct format"""
+ if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
+ '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
+ '{12,12}(.*)\@[0-9]+$',revstr):
+ raise util.Abort(_('splicemap entry %s is not a valid revision'
+ ' identifier') % revstr)
+
def gettags(self):
tags = {}
if self.tags is None:
--- a/hgext/histedit.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/histedit.py Tue Jul 16 11:13:18 2013 -0500
@@ -857,3 +857,16 @@
repair.strip(ui, repo, c)
finally:
lockmod.release(lock)
+
+def summaryhook(ui, repo):
+ if not os.path.exists(repo.join('histedit-state')):
+ return
+ (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
+ if rules:
+ # i18n: column positioning for "hg summary"
+ ui.write(_('hist: %s (histedit --continue)\n') %
+ (ui.label(_('%d remaining'), 'histedit.remaining') %
+ len(rules)))
+
+def extsetup(ui):
+ cmdutil.summaryhooks.add('histedit', summaryhook)
--- a/hgext/inotify/client.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/inotify/client.py Tue Jul 16 11:13:18 2013 -0500
@@ -159,7 +159,8 @@
vdirs = cs.read(nbytes)
if vdirs:
for vdir in vdirs.split('\0'):
- match.dir(vdir)
+ if match.explicitdir:
+ match.explicitdir(vdir)
return results
--- a/hgext/largefiles/overrides.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/largefiles/overrides.py Tue Jul 16 11:13:18 2013 -0500
@@ -376,8 +376,6 @@
continue
f, m, args, msg = action
- choices = (_('&Largefile'), _('&Normal file'))
-
splitstandin = lfutil.splitstandin(f)
if (m == "g" and splitstandin is not None and
splitstandin in p1 and f in p2):
@@ -386,8 +384,9 @@
lfile = splitstandin
standin = f
msg = _('%s has been turned into a largefile\n'
- 'use (l)argefile or keep as (n)ormal file?') % lfile
- if repo.ui.promptchoice(msg, choices, 0) == 0:
+ 'use (l)argefile or keep as (n)ormal file?'
+ '$$ &Largefile $$ &Normal file') % lfile
+ if repo.ui.promptchoice(msg, 0) == 0:
processed.append((lfile, "r", None, msg))
processed.append((standin, "g", (p2.flags(standin),), msg))
else:
@@ -398,8 +397,9 @@
standin = lfutil.standin(f)
lfile = f
msg = _('%s has been turned into a normal file\n'
- 'keep as (l)argefile or use (n)ormal file?') % lfile
- if repo.ui.promptchoice(msg, choices, 0) == 0:
+ 'keep as (l)argefile or use (n)ormal file?'
+ '$$ &Largefile $$ &Normal file') % lfile
+ if repo.ui.promptchoice(msg, 0) == 0:
processed.append((lfile, "r", None, msg))
else:
processed.append((standin, "r", None, msg))
@@ -444,9 +444,9 @@
return 0
if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
- 'keep (l)ocal or take (o)ther?') %
- lfutil.splitstandin(orig),
- (_('&Local'), _('&Other')), 0) == 0:
+ 'keep (l)ocal or take (o)ther?'
+ '$$ &Local $$ &Other') %
+ lfutil.splitstandin(orig), 0) == 0:
return 0
else:
repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
--- a/hgext/mq.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/mq.py Tue Jul 16 11:13:18 2013 -0500
@@ -3533,8 +3533,7 @@
raise util.Abort(_('no queue repository'))
return orig(r.ui, r, *args, **kwargs)
-def summary(orig, ui, repo, *args, **kwargs):
- r = orig(ui, repo, *args, **kwargs)
+def summaryhook(ui, repo):
q = repo.mq
m = []
a, u = len(q.applied), len(q.unapplied(repo))
@@ -3548,7 +3547,6 @@
else:
# i18n: column positioning for "hg summary"
ui.note(_("mq: (empty queue)\n"))
- return r
def revsetmq(repo, subset, x):
"""``mq()``
@@ -3567,7 +3565,7 @@
mqopt = [('', 'mq', None, _("operate on patch repository"))]
extensions.wrapcommand(commands.table, 'import', mqimport)
- extensions.wrapcommand(commands.table, 'summary', summary)
+ cmdutil.summaryhooks.add('mq', summaryhook)
entry = extensions.wrapcommand(commands.table, 'init', mqinit)
entry[1].extend(mqopt)
--- a/hgext/patchbomb.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/patchbomb.py Tue Jul 16 11:13:18 2013 -0500
@@ -482,8 +482,8 @@
if ds:
ui.write(ds)
ui.write('\n')
- if ui.promptchoice(_('are you sure you want to send (yn)?'),
- (_('&Yes'), _('&No'))):
+ if ui.promptchoice(_('are you sure you want to send (yn)?'
+ '$$ &Yes $$ &No')):
raise util.Abort(_('patchbomb canceled'))
ui.write('\n')
--- a/hgext/purge.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/purge.py Tue Jul 16 11:13:18 2013 -0500
@@ -97,7 +97,7 @@
directories = []
match = scmutil.match(repo[None], dirs, opts)
- match.dir = directories.append
+ match.explicitdir = match.traversedir = directories.append
status = repo.status(match=match, ignored=opts['all'], unknown=True)
for f in sorted(status[4] + status[5]):
--- a/hgext/rebase.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/rebase.py Tue Jul 16 11:13:18 2013 -0500
@@ -779,6 +779,17 @@
raise util.Abort(_('--tool can only be used with --rebase'))
orig(ui, repo, *args, **opts)
+def summaryhook(ui, repo):
+ if not os.path.exists(repo.join('rebasestate')):
+ return
+ state = restorestatus(repo)[2]
+ numrebased = len([i for i in state.itervalues() if i != -1])
+ # i18n: column positioning for "hg summary"
+ ui.write(_('rebase: %s, %s (rebase --continue)\n') %
+ (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
+ ui.label(_('%d remaining'), 'rebase.remaining') %
+ (len(state) - numrebased)))
+
def uisetup(ui):
'Replace pull with a decorator to provide --rebase option'
entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
@@ -786,3 +797,4 @@
_("rebase working directory to branch head")))
entry[1].append(('t', 'tool', '',
_("specify merge tool for rebase")))
+ cmdutil.summaryhooks.add('rebase', summaryhook)
--- a/hgext/record.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/record.py Tue Jul 16 11:13:18 2013 -0500
@@ -283,17 +283,17 @@
if skipfile is not None:
return skipfile, skipfile, skipall, newpatches
while True:
- resps = _('[Ynesfdaq?]')
- choices = (_('&Yes, record this change'),
- _('&No, skip this change'),
- _('&Edit the change manually'),
- _('&Skip remaining changes to this file'),
- _('Record remaining changes to this &file'),
- _('&Done, skip remaining changes and files'),
- _('Record &all changes to all remaining files'),
- _('&Quit, recording no changes'),
- _('&?'))
- r = ui.promptchoice("%s %s" % (query, resps), choices)
+ resps = _('[Ynesfdaq?]'
+ '$$ &Yes, record this change'
+ '$$ &No, skip this change'
+ '$$ &Edit the change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Record remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Record &all changes to all remaining files'
+ '$$ &Quit, recording no changes'
+ '$$ &?')
+ r = ui.promptchoice("%s %s" % (query, resps))
ui.write("\n")
if r == 8: # ?
doc = gettext(record.__doc__)
--- a/hgext/win32mbcs.py Mon Jul 15 15:04:42 2013 +0200
+++ b/hgext/win32mbcs.py Tue Jul 16 11:13:18 2013 -0500
@@ -140,7 +140,8 @@
os.path.normpath os.makedirs
mercurial.util.endswithsep mercurial.util.splitpath mercurial.util.checkcase
mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
- mercurial.util.checkwinfilename mercurial.util.checkosfilename'''
+ mercurial.util.checkwinfilename mercurial.util.checkosfilename
+ mercurial.util.split'''
# These functions are required to be called with local encoded string
# because they expects argument is local encoded string and cause
--- a/mercurial/changegroup.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/changegroup.py Tue Jul 16 11:13:18 2013 -0500
@@ -6,8 +6,8 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-from node import nullrev
-import mdiff, util
+from node import nullrev, hex
+import mdiff, util, dagutil
import struct, os, bz2, zlib, tempfile
_BUNDLE10_DELTA_HEADER = "20s20s20s20s"
@@ -225,13 +225,188 @@
class bundle10(object):
deltaheader = _BUNDLE10_DELTA_HEADER
- def __init__(self, lookup):
- self._lookup = lookup
+ def __init__(self, repo, bundlecaps=None):
+ """Given a source repo, construct a bundler.
+
+ bundlecaps is optional and can be used to specify the set of
+ capabilities which can be used to build the bundle.
+ """
+ # Set of capabilities we can use to build the bundle.
+ if bundlecaps is None:
+ bundlecaps = set()
+ self._bundlecaps = bundlecaps
+ self._changelog = repo.changelog
+ self._manifest = repo.manifest
+ reorder = repo.ui.config('bundle', 'reorder', 'auto')
+ if reorder == 'auto':
+ reorder = None
+ else:
+ reorder = util.parsebool(reorder)
+ self._repo = repo
+ self._reorder = reorder
+ self._progress = repo.ui.progress
def close(self):
return closechunk()
+
def fileheader(self, fname):
return chunkheader(len(fname)) + fname
- def revchunk(self, revlog, rev, prev):
+
+ def group(self, nodelist, revlog, lookup, units=None, reorder=None):
+ """Calculate a delta group, yielding a sequence of changegroup chunks
+ (strings).
+
+ Given a list of changeset revs, return a set of deltas and
+ metadata corresponding to nodes. The first delta is
+ first parent(nodelist[0]) -> nodelist[0], the receiver is
+ guaranteed to have this parent as it has all history before
+ these changesets. In the case firstparent is nullrev the
+ changegroup starts with a full revision.
+
+ If units is not None, progress detail will be generated, units specifies
+ the type of revlog that is touched (changelog, manifest, etc.).
+ """
+ # if we don't have any revisions touched by these changesets, bail
+ if len(nodelist) == 0:
+ yield self.close()
+ return
+
+ # for generaldelta revlogs, we linearize the revs; this will both be
+ # much quicker and generate a much smaller bundle
+ if (revlog._generaldelta and reorder is not False) or reorder:
+ dag = dagutil.revlogdag(revlog)
+ revs = set(revlog.rev(n) for n in nodelist)
+ revs = dag.linearize(revs)
+ else:
+ revs = sorted([revlog.rev(n) for n in nodelist])
+
+ # add the parent of the first rev
+ p = revlog.parentrevs(revs[0])[0]
+ revs.insert(0, p)
+
+ # build deltas
+ total = len(revs) - 1
+ msgbundling = _('bundling')
+ for r in xrange(len(revs) - 1):
+ if units is not None:
+ self._progress(msgbundling, r + 1, unit=units, total=total)
+ prev, curr = revs[r], revs[r + 1]
+ linknode = lookup(revlog.node(curr))
+ for c in self.revchunk(revlog, curr, prev, linknode):
+ yield c
+
+ yield self.close()
+
+ # filter any nodes that claim to be part of the known set
+ def prune(self, revlog, missing, commonrevs, source):
+ rr, rl = revlog.rev, revlog.linkrev
+ return [n for n in missing if rl(rr(n)) not in commonrevs]
+
+ def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
+ '''yield a sequence of changegroup chunks (strings)'''
+ repo = self._repo
+ cl = self._changelog
+ mf = self._manifest
+ reorder = self._reorder
+ progress = self._progress
+
+ # for progress output
+ msgbundling = _('bundling')
+
+ mfs = {} # needed manifests
+ fnodes = {} # needed file nodes
+ changedfiles = set()
+
+ # Callback for the changelog, used to collect changed files and manifest
+ # nodes.
+ # Returns the linkrev node (identity in the changelog case).
+ def lookupcl(x):
+ c = cl.read(x)
+ changedfiles.update(c[3])
+ # record the first changeset introducing this manifest version
+ mfs.setdefault(c[0], x)
+ return x
+
+ # Callback for the manifest, used to collect linkrevs for filelog
+ # revisions.
+ # Returns the linkrev node (collected in lookupcl).
+ def lookupmf(x):
+ clnode = mfs[x]
+ if not fastpathlinkrev:
+ mdata = mf.readfast(x)
+ for f, n in mdata.iteritems():
+ if f in changedfiles:
+ # record the first changeset introducing this filelog
+ # version
+ fnodes[f].setdefault(n, clnode)
+ return clnode
+
+ for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
+ reorder=reorder):
+ yield chunk
+ progress(msgbundling, None)
+
+ for f in changedfiles:
+ fnodes[f] = {}
+ mfnodes = self.prune(mf, mfs, commonrevs, source)
+ for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
+ reorder=reorder):
+ yield chunk
+ progress(msgbundling, None)
+
+ mfs.clear()
+
+ def linknodes(filerevlog, fname):
+ if fastpathlinkrev:
+ ln, llr = filerevlog.node, filerevlog.linkrev
+ needed = set(cl.rev(x) for x in clnodes)
+ def genfilenodes():
+ for r in filerevlog:
+ linkrev = llr(r)
+ if linkrev in needed:
+ yield filerevlog.node(r), cl.node(linkrev)
+ fnodes[fname] = dict(genfilenodes())
+ return fnodes.get(fname, {})
+
+ for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
+ source):
+ yield chunk
+
+ yield self.close()
+ progress(msgbundling, None)
+
+ if clnodes:
+ repo.hook('outgoing', node=hex(clnodes[0]), source=source)
+
+ def generatefiles(self, changedfiles, linknodes, commonrevs, source):
+ repo = self._repo
+ progress = self._progress
+ reorder = self._reorder
+ msgbundling = _('bundling')
+
+ total = len(changedfiles)
+ # for progress output
+ msgfiles = _('files')
+ for i, fname in enumerate(sorted(changedfiles)):
+ filerevlog = repo.file(fname)
+ if not filerevlog:
+ raise util.Abort(_("empty or missing revlog for %s") % fname)
+
+ linkrevnodes = linknodes(filerevlog, fname)
+ # Lookup for filenodes, we collected the linkrev nodes above in the
+ # fastpath case and with lookupmf in the slowpath case.
+ def lookupfilelog(x):
+ return linkrevnodes[x]
+
+ filenodes = self.prune(filerevlog, linkrevnodes, commonrevs, source)
+ if filenodes:
+ progress(msgbundling, i + 1, item=fname, unit=msgfiles,
+ total=total)
+ yield self.fileheader(fname)
+ for chunk in self.group(filenodes, filerevlog, lookupfilelog,
+ reorder=reorder):
+ yield chunk
+
+ def revchunk(self, revlog, rev, prev, linknode):
node = revlog.node(rev)
p1, p2 = revlog.parentrevs(rev)
base = prev
@@ -242,7 +417,6 @@
prefix = mdiff.trivialdiffheader(len(delta))
else:
delta = revlog.revdiff(base, rev)
- linknode = self._lookup(revlog, node)
p1n, p2n = revlog.parents(node)
basenode = revlog.node(base)
meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
--- a/mercurial/cmdutil.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/cmdutil.py Tue Jul 16 11:13:18 2013 -0500
@@ -1005,6 +1005,107 @@
if windowsize < sizelimit:
windowsize *= 2
+class FileWalkError(Exception):
+ pass
+
+def walkfilerevs(repo, match, follow, revs, fncache):
+ '''Walks the file history for the matched files.
+
+ Returns the changeset revs that are involved in the file history.
+
+ Throws FileWalkError if the file history can't be walked using
+ filelogs alone.
+ '''
+ wanted = set()
+ copies = []
+ minrev, maxrev = min(revs), max(revs)
+ def filerevgen(filelog, last):
+ """
+ Only files, no patterns. Check the history of each file.
+
+ Examines filelog entries within minrev, maxrev linkrev range
+ Returns an iterator yielding (linkrev, parentlinkrevs, copied)
+ tuples in backwards order
+ """
+ cl_count = len(repo)
+ revs = []
+ for j in xrange(0, last + 1):
+ linkrev = filelog.linkrev(j)
+ if linkrev < minrev:
+ continue
+ # only yield rev for which we have the changelog, it can
+ # happen while doing "hg log" during a pull or commit
+ if linkrev >= cl_count:
+ break
+
+ parentlinkrevs = []
+ for p in filelog.parentrevs(j):
+ if p != nullrev:
+ parentlinkrevs.append(filelog.linkrev(p))
+ n = filelog.node(j)
+ revs.append((linkrev, parentlinkrevs,
+ follow and filelog.renamed(n)))
+
+ return reversed(revs)
+ def iterfiles():
+ pctx = repo['.']
+ for filename in match.files():
+ if follow:
+ if filename not in pctx:
+ raise util.Abort(_('cannot follow file not in parent '
+ 'revision: "%s"') % filename)
+ yield filename, pctx[filename].filenode()
+ else:
+ yield filename, None
+ for filename_node in copies:
+ yield filename_node
+
+ for file_, node in iterfiles():
+ filelog = repo.file(file_)
+ if not len(filelog):
+ if node is None:
+ # A zero count may be a directory or deleted file, so
+ # try to find matching entries on the slow path.
+ if follow:
+ raise util.Abort(
+ _('cannot follow nonexistent file: "%s"') % file_)
+ raise FileWalkError("Cannot walk via filelog")
+ else:
+ continue
+
+ if node is None:
+ last = len(filelog) - 1
+ else:
+ last = filelog.rev(node)
+
+
+ # keep track of all ancestors of the file
+ ancestors = set([filelog.linkrev(last)])
+
+ # iterate from latest to oldest revision
+ for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
+ if not follow:
+ if rev > maxrev:
+ continue
+ else:
+ # Note that last might not be the first interesting
+ # rev to us:
+ # if the file has been changed after maxrev, we'll
+ # have linkrev(last) > maxrev, and we still need
+ # to explore the file graph
+ if rev not in ancestors:
+ continue
+ # XXX insert 1327 fix here
+ if flparentlinkrevs:
+ ancestors.update(flparentlinkrevs)
+
+ fncache.setdefault(rev, []).append(file_)
+ wanted.add(rev)
+ if copied:
+ copies.append(copied)
+
+ return wanted
+
def walkchangerevs(repo, match, opts, prepare):
'''Iterate over files and the revs in which they changed.
@@ -1044,101 +1145,18 @@
if not slowpath and not match.files():
# No files, no patterns. Display all revs.
wanted = set(revs)
- copies = []
if not slowpath and match.files():
# We only have to read through the filelog to find wanted revisions
- minrev, maxrev = min(revs), max(revs)
- def filerevgen(filelog, last):
- """
- Only files, no patterns. Check the history of each file.
-
- Examines filelog entries within minrev, maxrev linkrev range
- Returns an iterator yielding (linkrev, parentlinkrevs, copied)
- tuples in backwards order
- """
- cl_count = len(repo)
- revs = []
- for j in xrange(0, last + 1):
- linkrev = filelog.linkrev(j)
- if linkrev < minrev:
- continue
- # only yield rev for which we have the changelog, it can
- # happen while doing "hg log" during a pull or commit
- if linkrev >= cl_count:
- break
-
- parentlinkrevs = []
- for p in filelog.parentrevs(j):
- if p != nullrev:
- parentlinkrevs.append(filelog.linkrev(p))
- n = filelog.node(j)
- revs.append((linkrev, parentlinkrevs,
- follow and filelog.renamed(n)))
+ try:
+ wanted = walkfilerevs(repo, match, follow, revs, fncache)
+ except FileWalkError:
+ slowpath = True
- return reversed(revs)
- def iterfiles():
- pctx = repo['.']
- for filename in match.files():
- if follow:
- if filename not in pctx:
- raise util.Abort(_('cannot follow file not in parent '
- 'revision: "%s"') % filename)
- yield filename, pctx[filename].filenode()
- else:
- yield filename, None
- for filename_node in copies:
- yield filename_node
- for file_, node in iterfiles():
- filelog = repo.file(file_)
- if not len(filelog):
- if node is None:
- # A zero count may be a directory or deleted file, so
- # try to find matching entries on the slow path.
- if follow:
- raise util.Abort(
- _('cannot follow nonexistent file: "%s"') % file_)
- slowpath = True
- break
- else:
- continue
-
- if node is None:
- last = len(filelog) - 1
- else:
- last = filelog.rev(node)
-
-
- # keep track of all ancestors of the file
- ancestors = set([filelog.linkrev(last)])
-
- # iterate from latest to oldest revision
- for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
- if not follow:
- if rev > maxrev:
- continue
- else:
- # Note that last might not be the first interesting
- # rev to us:
- # if the file has been changed after maxrev, we'll
- # have linkrev(last) > maxrev, and we still need
- # to explore the file graph
- if rev not in ancestors:
- continue
- # XXX insert 1327 fix here
- if flparentlinkrevs:
- ancestors.update(flparentlinkrevs)
-
- fncache.setdefault(rev, []).append(file_)
- wanted.add(rev)
- if copied:
- copies.append(copied)
-
- # We decided to fall back to the slowpath because at least one
- # of the paths was not a file. Check to see if at least one of them
- # existed in history, otherwise simply return
- if slowpath:
+ # We decided to fall back to the slowpath because at least one
+ # of the paths was not a file. Check to see if at least one of them
+ # existed in history, otherwise simply return
for path in match.files():
if path == '.' or path in repo.store:
break
@@ -1320,7 +1338,7 @@
raise util.Abort(_('cannot follow file not in parent '
'revision: "%s"') % f)
filelog = repo.file(f)
- if not len(filelog):
+ if not filelog:
# A zero count may be a directory or deleted file, so
# try to find matching entries on the slow path.
if follow:
@@ -2082,3 +2100,6 @@
return decorator
return cmd
+
+# a list of (ui, repo) functions called by commands.summary
+summaryhooks = util.hooks()
--- a/mercurial/commands.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/commands.py Tue Jul 16 11:13:18 2013 -0500
@@ -767,9 +767,8 @@
('d', 'delete', False, _('delete a given bookmark')),
('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
('i', 'inactive', False, _('mark a bookmark inactive'))],
- _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
-def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
- rename=None, inactive=False):
+ _('hg bookmarks [OPTIONS]... [NAME]...'))
+def bookmark(ui, repo, *names, **opts):
'''track a line of development with movable markers
Bookmarks are pointers to certain commits that move when committing.
@@ -796,6 +795,12 @@
active even if -i/--inactive is not given. If no NAME is given, the
current active bookmark will be marked inactive.
'''
+ force = opts.get('force')
+ rev = opts.get('rev')
+ delete = opts.get('delete')
+ rename = opts.get('rename')
+ inactive = opts.get('inactive')
+
hexfn = ui.debugflag and hex or short
marks = repo._bookmarks
cur = repo.changectx('.').node()
@@ -846,21 +851,24 @@
raise util.Abort(_("--rev is incompatible with --delete"))
if rename and rev:
raise util.Abort(_("--rev is incompatible with --rename"))
- if mark is None and (delete or rev):
+ if not names and (delete or rev):
raise util.Abort(_("bookmark name required"))
if delete:
- if mark not in marks:
- raise util.Abort(_("bookmark '%s' does not exist") % mark)
- if mark == repo._bookmarkcurrent:
- bookmarks.setcurrent(repo, None)
- del marks[mark]
+ for mark in names:
+ if mark not in marks:
+ raise util.Abort(_("bookmark '%s' does not exist") % mark)
+ if mark == repo._bookmarkcurrent:
+ bookmarks.setcurrent(repo, None)
+ del marks[mark]
marks.write()
elif rename:
- if mark is None:
+ if not names:
raise util.Abort(_("new bookmark name required"))
- mark = checkformat(mark)
+ elif len(names) > 1:
+ raise util.Abort(_("only one new bookmark name allowed"))
+ mark = checkformat(names[0])
if rename not in marks:
raise util.Abort(_("bookmark '%s' does not exist") % rename)
checkconflict(repo, mark, force)
@@ -870,19 +878,23 @@
del marks[rename]
marks.write()
- elif mark is not None:
- mark = checkformat(mark)
- if inactive and mark == repo._bookmarkcurrent:
- bookmarks.setcurrent(repo, None)
- return
- tgt = cur
- if rev:
- tgt = scmutil.revsingle(repo, rev).node()
- checkconflict(repo, mark, force, tgt)
- marks[mark] = tgt
- if not inactive and cur == marks[mark] and not rev:
- bookmarks.setcurrent(repo, mark)
- elif cur != tgt and mark == repo._bookmarkcurrent:
+ elif names:
+ newact = None
+ for mark in names:
+ mark = checkformat(mark)
+ if newact is None:
+ newact = mark
+ if inactive and mark == repo._bookmarkcurrent:
+ bookmarks.setcurrent(repo, None)
+ return
+ tgt = cur
+ if rev:
+ tgt = scmutil.revsingle(repo, rev).node()
+ checkconflict(repo, mark, force, tgt)
+ marks[mark] = tgt
+ if not inactive and cur == marks[newact] and not rev:
+ bookmarks.setcurrent(repo, newact)
+ elif cur != tgt and newact == repo._bookmarkcurrent:
bookmarks.setcurrent(repo, None)
marks.write()
@@ -1084,13 +1096,16 @@
base = ['null']
else:
base = scmutil.revrange(repo, opts.get('base'))
+ # TODO: get desired bundlecaps from command line.
+ bundlecaps = None
if base:
if dest:
raise util.Abort(_("--base is incompatible with specifying "
"a destination"))
common = [repo.lookup(rev) for rev in base]
heads = revs and map(repo.lookup, revs) or revs
- cg = repo.getbundle('bundle', heads=heads, common=common)
+ cg = repo.getbundle('bundle', heads=heads, common=common,
+ bundlecaps=bundlecaps)
outgoing = None
else:
dest = ui.expandpath(dest or 'default-push', dest or 'default')
@@ -1102,7 +1117,7 @@
onlyheads=heads,
force=opts.get('force'),
portable=True)
- cg = repo.getlocalbundle('bundle', outgoing)
+ cg = repo.getlocalbundle('bundle', outgoing, bundlecaps)
if not cg:
scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
return 1
@@ -1914,6 +1929,8 @@
args['common'] = [bin(s) for s in common]
if head:
args['heads'] = [bin(s) for s in head]
+ # TODO: get desired bundlecaps from command line.
+ args['bundlecaps'] = None
bundle = repo.getbundle('debug', **args)
bundletype = opts.get('type', 'bzip2').lower()
@@ -4339,8 +4356,10 @@
pass
if not filenodes:
raise util.Abort(_("'%s' not found in manifest!") % file_)
- fl = repo.file(file_)
- p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
+ p = []
+ for fn in filenodes:
+ fctx = repo.filectx(file_, fileid=fn)
+ p.append(fctx.node())
else:
p = [cp.node() for cp in ctx.parents()]
@@ -5485,13 +5504,14 @@
ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
(new, len(bheads)))
+ cmdutil.summaryhooks(ui, repo)
+
if opts.get('remote'):
t = []
source, branches = hg.parseurl(ui.expandpath('default'))
sbranch = branches[0]
other = hg.peer(repo, {}, source)
- revs, checkout = hg.addbranchrevs(repo, other, branches,
- opts.get('rev'))
+ revs, checkout = hg.addbranchrevs(repo, other, branches, None)
if revs:
revs = [other.lookup(rev) for rev in revs]
ui.debug('comparing with %s\n' % util.hidepassword(source))
--- a/mercurial/context.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/context.py Tue Jul 16 11:13:18 2013 -0500
@@ -398,7 +398,7 @@
("bad args: changeid=%r, fileid=%r, changectx=%r"
% (changeid, fileid, changectx))
- if filelog:
+ if filelog is not None:
self._filelog = filelog
if changeid is not None:
@@ -437,7 +437,9 @@
@propertycache
def _changeid(self):
- if '_changectx' in self.__dict__:
+ if '_changeid' in self.__dict__:
+ return self._changeid
+ elif '_changectx' in self.__dict__:
return self._changectx.rev()
else:
return self._filelog.linkrev(self._filerev)
@@ -501,14 +503,8 @@
return self._changectx.flags(self._path)
def filelog(self):
return self._filelog
-
def rev(self):
- if '_changectx' in self.__dict__:
- return self._changectx.rev()
- if '_changeid' in self.__dict__:
- return self._changectx.rev()
- return self._filelog.linkrev(self._filerev)
-
+ return self._changeid
def linkrev(self):
return self._filelog.linkrev(self._filerev)
def node(self):
@@ -653,29 +649,25 @@
return child
getlog = util.lrucachefunc(lambda x: self._repo.file(x))
- def getctx(path, fileid):
- log = path == self._path and self._filelog or getlog(path)
- return filectx(self._repo, path, fileid=fileid, filelog=log)
- getctx = util.lrucachefunc(getctx)
def parents(f):
- # we want to reuse filectx objects as much as possible
- p = f._path
- if f._filerev is None: # working dir
- pl = [(n.path(), n.filerev()) for n in f.parents()]
- else:
- pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
+ pl = f.parents()
+
+ # Don't return renamed parents if we aren't following.
+ if not follow:
+ pl = [p for p in pl if p.path() == f.path()]
- if follow:
- r = f.renamed()
- if r:
- pl[0] = (r[0], getlog(r[0]).rev(r[1]))
+ # renamed filectx won't have a filelog yet, so set it
+ # from the cache to save time
+ for p in pl:
+ if not '_filelog' in p.__dict__:
+ p._filelog = getlog(p.path())
- return [getctx(p, n) for p, n in pl if n != nullrev]
+ return pl
# use linkrev to find the first changeset where self appeared
if self.rev() != self.linkrev():
- base = self.filectx(self.filerev())
+ base = self.filectx(self.filenode())
else:
base = self
@@ -744,7 +736,7 @@
# prime the ancestor cache for the working directory
acache = {}
for c in (self, fc2):
- if c._filerev is None:
+ if c.filenode() is None:
pl = [(n.path(), n.filenode()) for n in c.parents()]
acache[(c._path, None)] = pl
@@ -1167,7 +1159,7 @@
self._changeid = None
self._filerev = self._filenode = None
- if filelog:
+ if filelog is not None:
self._filelog = filelog
if workingctx:
self._changectx = workingctx
--- a/mercurial/copies.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/copies.py Tue Jul 16 11:13:18 2013 -0500
@@ -222,65 +222,8 @@
fullcopy = {}
diverge = {}
- def related(f1, f2, limit):
- # Walk back to common ancestor to see if the two files originate
- # from the same file. Since workingfilectx's rev() is None it messes
- # up the integer comparison logic, hence the pre-step check for
- # None (f1 and f2 can only be workingfilectx's initially).
-
- if f1 == f2:
- return f1 # a match
-
- g1, g2 = f1.ancestors(), f2.ancestors()
- try:
- f1r, f2r = f1.rev(), f2.rev()
-
- if f1r is None:
- f1 = g1.next()
- if f2r is None:
- f2 = g2.next()
-
- while True:
- f1r, f2r = f1.rev(), f2.rev()
- if f1r > f2r:
- f1 = g1.next()
- elif f2r > f1r:
- f2 = g2.next()
- elif f1 == f2:
- return f1 # a match
- elif f1r == f2r or f1r < limit or f2r < limit:
- return False # copy no longer relevant
- except StopIteration:
- return False
-
- def checkcopies(f, m1, m2):
- '''check possible copies of f from m1 to m2'''
- of = None
- seen = set([f])
- for oc in ctx(f, m1[f]).ancestors():
- ocr = oc.rev()
- of = oc.path()
- if of in seen:
- # check limit late - grab last rename before
- if ocr < limit:
- break
- continue
- seen.add(of)
-
- fullcopy[f] = of # remember for dir rename detection
- if of not in m2:
- continue # no match, keep looking
- if m2[of] == ma.get(of):
- break # no merge needed, quit early
- c2 = ctx(of, m2[of])
- cr = related(oc, c2, ca.rev())
- if cr and (of == f or of == c2.path()): # non-divergent
- copy[f] = of
- of = None
- break
-
- if of in ma:
- diverge.setdefault(of, []).append(f)
+ def _checkcopies(f, m1, m2):
+ checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy)
repo.ui.debug(" searching for copies back to rev %d\n" % limit)
@@ -295,9 +238,9 @@
% "\n ".join(u2))
for f in u1:
- checkcopies(f, m1, m2)
+ _checkcopies(f, m1, m2)
for f in u2:
- checkcopies(f, m2, m1)
+ _checkcopies(f, m2, m1)
renamedelete = {}
renamedelete2 = set()
@@ -386,3 +329,78 @@
break
return copy, movewithdir, diverge, renamedelete
+
+def checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
+ """
+ check possible copies of f from m1 to m2
+
+ ctx = function accepting (filename, node) that returns a filectx.
+ f = the filename to check
+ m1 = the source manifest
+ m2 = the destination manifest
+ ca = the changectx of the common ancestor
+ limit = the rev number to not search beyond
+ diverge = record all diverges in this dict
+ copy = record all non-divergent copies in this dict
+ fullcopy = record all copies in this dict
+ """
+
+ ma = ca.manifest()
+
+ def _related(f1, f2, limit):
+ # Walk back to common ancestor to see if the two files originate
+ # from the same file. Since workingfilectx's rev() is None it messes
+ # up the integer comparison logic, hence the pre-step check for
+ # None (f1 and f2 can only be workingfilectx's initially).
+
+ if f1 == f2:
+ return f1 # a match
+
+ g1, g2 = f1.ancestors(), f2.ancestors()
+ try:
+ f1r, f2r = f1.rev(), f2.rev()
+
+ if f1r is None:
+ f1 = g1.next()
+ if f2r is None:
+ f2 = g2.next()
+
+ while True:
+ f1r, f2r = f1.rev(), f2.rev()
+ if f1r > f2r:
+ f1 = g1.next()
+ elif f2r > f1r:
+ f2 = g2.next()
+ elif f1 == f2:
+ return f1 # a match
+ elif f1r == f2r or f1r < limit or f2r < limit:
+ return False # copy no longer relevant
+ except StopIteration:
+ return False
+
+ of = None
+ seen = set([f])
+ for oc in ctx(f, m1[f]).ancestors():
+ ocr = oc.rev()
+ of = oc.path()
+ if of in seen:
+ # check limit late - grab last rename before
+ if ocr < limit:
+ break
+ continue
+ seen.add(of)
+
+ fullcopy[f] = of # remember for dir rename detection
+ if of not in m2:
+ continue # no match, keep looking
+ if m2[of] == ma.get(of):
+ break # no merge needed, quit early
+ c2 = ctx(of, m2[of])
+ cr = _related(oc, c2, ca.rev())
+ if cr and (of == f or of == c2.path()): # non-divergent
+ copy[f] = of
+ of = None
+ break
+
+ if of in ma:
+ diverge.setdefault(of, []).append(f)
--- a/mercurial/dirstate.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/dirstate.py Tue Jul 16 11:13:18 2013 -0500
@@ -522,18 +522,15 @@
return True
return False
- def walk(self, match, subrepos, unknown, ignored):
- '''
- Walk recursively through the directory tree, finding all files
- matched by match.
+ def _walkexplicit(self, match, subrepos):
+ '''Get stat data about the files explicitly specified by match.
- Return a dict mapping filename to stat-like object (either
- mercurial.osutil.stat instance or return value of os.stat()).
- '''
-
- def fwarn(f, msg):
- self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
- return False
+ Return a triple (results, dirsfound, dirsnotfound).
+ - results is a mapping from filename to stat result. It also contains
+ listings mapping subrepos and .hg to None.
+ - dirsfound is a list of files found to be directories.
+ - dirsnotfound is a list of files that the dirstate thinks are
+ directories and that were not found.'''
def badtype(mode):
kind = _('unknown')
@@ -549,41 +546,23 @@
kind = _('directory')
return _('unsupported file type (type is %s)') % kind
- ignore = self._ignore
- dirignore = self._dirignore
- if ignored:
- ignore = util.never
- dirignore = util.never
- elif not unknown:
- # if unknown and ignored are False, skip step 2
- ignore = util.always
- dirignore = util.always
-
- matchfn = match.matchfn
- matchalways = match.always()
+ matchedir = match.explicitdir
badfn = match.bad
dmap = self._map
normpath = util.normpath
- listdir = osutil.listdir
lstat = os.lstat
getkind = stat.S_IFMT
dirkind = stat.S_IFDIR
regkind = stat.S_IFREG
lnkkind = stat.S_IFLNK
join = self._join
- work = []
- wadd = work.append
+ dirsfound = []
+ foundadd = dirsfound.append
+ dirsnotfound = []
+ notfoundadd = dirsnotfound.append
- exact = skipstep3 = False
- if matchfn == match.exact: # match.exact
- exact = True
- dirignore = util.always # skip step 2
- elif match.files() and not match.anypats(): # match.match, no patterns
- skipstep3 = True
-
- if not exact and self._checkcase:
+ if match.matchfn != match.exact and self._checkcase:
normalize = self._normalize
- skipstep3 = False
else:
normalize = None
@@ -604,7 +583,6 @@
results = dict.fromkeys(subrepos)
results['.hg'] = None
- # step 1: find all explicit files
for ff in files:
if normalize:
nf = normalize(normpath(ff), False, True)
@@ -617,13 +595,12 @@
st = lstat(join(nf))
kind = getkind(st.st_mode)
if kind == dirkind:
- skipstep3 = False
if nf in dmap:
#file deleted on disk but still in dirstate
results[nf] = None
- match.dir(nf)
- if not dirignore(nf):
- wadd(nf)
+ if matchedir:
+ matchedir(nf)
+ foundadd(nf)
elif kind == regkind or kind == lnkkind:
results[nf] = st
else:
@@ -637,12 +614,75 @@
prefix = nf + "/"
for fn in dmap:
if fn.startswith(prefix):
- match.dir(nf)
- skipstep3 = False
+ if matchedir:
+ matchedir(nf)
+ notfoundadd(nf)
break
else:
badfn(ff, inst.strerror)
+ return results, dirsfound, dirsnotfound
+
+ def walk(self, match, subrepos, unknown, ignored, full=True):
+ '''
+ Walk recursively through the directory tree, finding all files
+ matched by match.
+
+ If full is False, maybe skip some known-clean files.
+
+ Return a dict mapping filename to stat-like object (either
+ mercurial.osutil.stat instance or return value of os.stat()).
+
+ '''
+ # full is a flag that extensions that hook into walk can use -- this
+ # implementation doesn't use it at all. This satisfies the contract
+ # because we only guarantee a "maybe".
+
+ def fwarn(f, msg):
+ self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
+ return False
+
+ ignore = self._ignore
+ dirignore = self._dirignore
+ if ignored:
+ ignore = util.never
+ dirignore = util.never
+ elif not unknown:
+ # if unknown and ignored are False, skip step 2
+ ignore = util.always
+ dirignore = util.always
+
+ matchfn = match.matchfn
+ matchalways = match.always()
+ matchtdir = match.traversedir
+ dmap = self._map
+ listdir = osutil.listdir
+ lstat = os.lstat
+ dirkind = stat.S_IFDIR
+ regkind = stat.S_IFREG
+ lnkkind = stat.S_IFLNK
+ join = self._join
+
+ exact = skipstep3 = False
+ if matchfn == match.exact: # match.exact
+ exact = True
+ dirignore = util.always # skip step 2
+ elif match.files() and not match.anypats(): # match.match, no patterns
+ skipstep3 = True
+
+ if not exact and self._checkcase:
+ normalize = self._normalize
+ skipstep3 = False
+ else:
+ normalize = None
+
+ # step 1: find all explicit files
+ results, work, dirsnotfound = self._walkexplicit(match, subrepos)
+
+ skipstep3 = skipstep3 and not (work or dirsnotfound)
+ work = [d for d in work if not dirignore(d)]
+ wadd = work.append
+
# step 2: visit subdirectories
while work:
nd = work.pop()
@@ -666,7 +706,8 @@
if nf not in results:
if kind == dirkind:
if not ignore(nf):
- match.dir(nf)
+ if matchtdir:
+ matchtdir(nf)
wadd(nf)
if nf in dmap and (matchalways or matchfn(nf)):
results[nf] = None
@@ -766,8 +807,13 @@
lnkkind = stat.S_IFLNK
- for fn, st in self.walk(match, subrepos, listunknown,
- listignored).iteritems():
+ # We need to do full walks when either
+ # - we're listing all clean files, or
+ # - match.traversedir does something, because match.traversedir should
+ # be called for every dir in the working dir
+ full = listclean or match.traversedir is not None
+ for fn, st in self.walk(match, subrepos, listunknown, listignored,
+ full=full).iteritems():
if fn not in dmap:
if (listignored or mexact(fn)) and dirignore(fn):
if listignored:
--- a/mercurial/filelog.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/filelog.py Tue Jul 16 11:13:18 2013 -0500
@@ -31,7 +31,7 @@
class filelog(revlog.revlog):
def __init__(self, opener, path):
- revlog.revlog.__init__(self, opener,
+ super(filelog, self).__init__(opener,
"/".join(("data", path + ".i")))
def read(self, node):
@@ -64,7 +64,7 @@
return len(self.read(node))
# XXX if self.read(node).startswith("\1\n"), this returns (size+4)
- return revlog.revlog.size(self, rev)
+ return super(filelog, self).size(rev)
def cmp(self, node, text):
"""compare text with a given file revision
@@ -76,7 +76,7 @@
if text.startswith('\1\n'):
t = '\1\n\1\n' + text
- samehashes = not revlog.revlog.cmp(self, node, t)
+ samehashes = not super(filelog, self).cmp(node, t)
if samehashes:
return False
--- a/mercurial/filemerge.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/filemerge.py Tue Jul 16 11:13:18 2013 -0500
@@ -144,8 +144,8 @@
fd = fcd.path()
if ui.promptchoice(_(" no tool found to merge %s\n"
- "keep (l)ocal or take (o)ther?") % fd,
- (_("&Local"), _("&Other")), 0):
+ "keep (l)ocal or take (o)ther?"
+ "$$ &Local $$ &Other") % fd, 0):
return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
else:
return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
@@ -348,16 +348,16 @@
checked = False
if 'prompt' in _toollist(ui, tool, "check"):
checked = True
- if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
- (_("&Yes"), _("&No")), 1):
+ if ui.promptchoice(_("was merge of '%s' successful (yn)?"
+ "$$ &Yes $$ &No") % fd, 1):
r = 1
if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
'changed' in _toollist(ui, tool, "check")):
if filecmp.cmp(a, back):
if ui.promptchoice(_(" output file %s appears unchanged\n"
- "was merge successful (yn)?") % fd,
- (_("&Yes"), _("&No")), 1):
+ "was merge successful (yn)?"
+ "$$ &Yes $$ &No") % fd, 1):
r = 1
if _toolbool(ui, tool, "fixeol"):
--- a/mercurial/fileset.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/fileset.py Tue Jul 16 11:13:18 2013 -0500
@@ -263,23 +263,10 @@
raise error.ParseError(_('invalid match pattern: %s') % e)
return [f for f in mctx.existing() if r.search(mctx.ctx[f].data())]
-_units = dict(k=2**10, K=2**10, kB=2**10, KB=2**10,
- M=2**20, MB=2**20, G=2**30, GB=2**30)
-
-def _sizetoint(s):
- try:
- s = s.strip()
- for k, v in _units.items():
- if s.endswith(k):
- return int(float(s[:-len(k)]) * v)
- return int(s)
- except ValueError:
- raise error.ParseError(_("couldn't parse size: %s") % s)
-
def _sizetomax(s):
try:
s = s.strip()
- for k, v in _units.items():
+ for k, v in util._sizeunits:
if s.endswith(k):
# max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
n = s[:-len(k)]
@@ -306,23 +293,23 @@
expr = getstring(x, _("size requires an expression")).strip()
if '-' in expr: # do we have a range?
a, b = expr.split('-', 1)
- a = _sizetoint(a)
- b = _sizetoint(b)
+ a = util.sizetoint(a)
+ b = util.sizetoint(b)
m = lambda x: x >= a and x <= b
elif expr.startswith("<="):
- a = _sizetoint(expr[2:])
+ a = util.sizetoint(expr[2:])
m = lambda x: x <= a
elif expr.startswith("<"):
- a = _sizetoint(expr[1:])
+ a = util.sizetoint(expr[1:])
m = lambda x: x < a
elif expr.startswith(">="):
- a = _sizetoint(expr[2:])
+ a = util.sizetoint(expr[2:])
m = lambda x: x >= a
elif expr.startswith(">"):
- a = _sizetoint(expr[1:])
+ a = util.sizetoint(expr[1:])
m = lambda x: x > a
elif expr[0].isdigit or expr[0] == '.':
- a = _sizetoint(expr)
+ a = util.sizetoint(expr)
b = _sizetomax(expr)
m = lambda x: x >= a and x <= b
else:
--- a/mercurial/help/templates.txt Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/help/templates.txt Tue Jul 16 11:13:18 2013 -0500
@@ -6,8 +6,8 @@
You can customize output for any "log-like" command: log,
outgoing, incoming, tip, parents, heads and glog.
-Four styles are packaged with Mercurial: default (the style used
-when no explicit preference is passed), compact, changelog,
+Five styles are packaged with Mercurial: default (the style used
+when no explicit preference is passed), compact, changelog, phases
and xml.
Usage::
@@ -58,9 +58,11 @@
- label(label, expr)
-- sub(pat, repl, expr)
+- rstdoc(text, style)
-- rstdoc(text, style)
+- strip(text[, chars])
+
+- sub(pat, repl, expr)
Also, for any expression that returns a list, there is a list operator:
--- a/mercurial/hg.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/hg.py Tue Jul 16 11:13:18 2013 -0500
@@ -437,14 +437,13 @@
_update(destrepo, uprev)
if update in destrepo._bookmarks:
bookmarks.setcurrent(destrepo, update)
-
- return srcpeer, destpeer
finally:
release(srclock, destlock)
if cleandir is not None:
shutil.rmtree(cleandir, True)
if srcpeer is not None:
srcpeer.close()
+ return srcpeer, destpeer
def _showstats(repo, stats):
repo.ui.status(_("%d files updated, %d files merged, "
--- a/mercurial/httpclient/__init__.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/httpclient/__init__.py Tue Jul 16 11:13:18 2013 -0500
@@ -37,6 +37,9 @@
* implements ssl inline instead of in a different class
"""
+# Many functions in this file have too many arguments.
+# pylint: disable=R0913
+
import cStringIO
import errno
import httplib
@@ -117,6 +120,8 @@
def _close(self):
if self._reader is not None:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._close()
def readline(self):
@@ -137,6 +142,7 @@
return ''.join(blocks)
def read(self, length=None):
+ """Read data from the response body."""
# if length is None, unbounded read
while (not self.complete() # never select on a finished read
and (not length # unbounded, so we wait for complete()
@@ -150,7 +156,8 @@
return r
def _select(self):
- r, _, _ = select.select([self.sock], [], [], self._timeout)
+ r, unused_write, unused_err = select.select(
+ [self.sock], [], [], self._timeout)
if not r:
# socket was not readable. If the response is not
# complete, raise a timeout.
@@ -170,13 +177,16 @@
# raise an exception if this is an invalid situation.
if not data:
if self._reader:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._close()
return False
else:
self._load_response(data)
return True
- def _load_response(self, data):
+ # This method gets replaced by _load later, which confuses pylint.
+ def _load_response(self, data): # pylint: disable=E0202
# Being here implies we're not at the end of the headers yet,
# since at the end of this method if headers were completely
# loaded we replace this method with the load() method of the
@@ -201,7 +211,7 @@
# handle 100-continue response
hdrs, body = self.raw_response.split(self._end_headers, 1)
- http_ver, status = hdrs.split(' ', 1)
+ unused_http_ver, status = hdrs.split(' ', 1)
if status.startswith('100'):
self.raw_response = body
self.continued = True
@@ -260,9 +270,13 @@
self.will_close = True
if body:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._load(body)
logger.debug('headers complete')
self.headers = headers
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._load_response = self._reader._load
@@ -335,9 +349,9 @@
self._proxy_port))
if self.ssl:
# TODO proxy header support
- data = self.buildheaders('CONNECT', '%s:%d' % (self.host,
- self.port),
- {}, HTTP_VER_1_0)
+ data = self._buildheaders('CONNECT', '%s:%d' % (self.host,
+ self.port),
+ {}, HTTP_VER_1_0)
sock.send(data)
sock.setblocking(0)
r = self.response_class(sock, self.timeout, 'CONNECT')
@@ -345,6 +359,9 @@
'Timed out waiting for CONNECT response from proxy')
while not r.complete():
try:
+ # We're a friend of the response class, so let
+ # us use the private attribute.
+ # pylint: disable=W0212
if not r._select():
if not r.complete():
raise timeout_exc
@@ -376,7 +393,7 @@
sock.setblocking(0)
self.sock = sock
- def buildheaders(self, method, path, headers, http_ver):
+ def _buildheaders(self, method, path, headers, http_ver):
if self.ssl and self.port == 443 or self.port == 80:
# default port for protocol, so leave it out
hdrhost = self.host
@@ -437,6 +454,11 @@
return True
return False
+ def _reconnect(self, where):
+ logger.info('reconnecting during %s', where)
+ self.close()
+ self._connect()
+
def request(self, method, path, body=None, headers={},
expect_continue=False):
"""Send a request to the server.
@@ -474,16 +496,11 @@
raise BadRequestData('body has no __len__() nor read()')
self._connect()
- outgoing_headers = self.buildheaders(
+ outgoing_headers = self._buildheaders(
method, path, hdrs, self.http_version)
response = None
first = True
- def reconnect(where):
- logger.info('reconnecting during %s', where)
- self.close()
- self._connect()
-
while ((outgoing_headers or body)
and not (response and response.complete())):
select_timeout = self.timeout
@@ -523,14 +540,17 @@
except socket.sslerror, e:
if e.args[0] != socket.SSL_ERROR_WANT_READ:
raise
- logger.debug(
- 'SSL_ERROR_WANT_READ while sending data, retrying...')
+ logger.debug('SSL_ERROR_WANT_READ while sending '
+ 'data, retrying...')
continue
if not data:
logger.info('socket appears closed in read')
self.sock = None
self._current_response = None
if response is not None:
+ # We're a friend of the response class, so let
+ # us use the private attribute.
+ # pylint: disable=W0212
response._close()
# This if/elif ladder is a bit subtle,
# comments in each branch should help.
@@ -550,7 +570,7 @@
logger.info(
'Connection appeared closed in read on first'
' request loop iteration, will retry.')
- reconnect('read')
+ self._reconnect('read')
continue
else:
# We didn't just send the first data hunk,
@@ -563,7 +583,11 @@
'response was missing or incomplete!')
logger.debug('read %d bytes in request()', len(data))
if response is None:
- response = self.response_class(r[0], self.timeout, method)
+ response = self.response_class(
+ r[0], self.timeout, method)
+ # We're a friend of the response class, so let us
+ # use the private attribute.
+ # pylint: disable=W0212
response._load_response(data)
# Jump to the next select() call so we load more
# data if the server is still sending us content.
@@ -576,6 +600,8 @@
if w and out:
try:
if getattr(out, 'read', False):
+ # pylint guesses the type of out incorrectly here
+ # pylint: disable=E1103
data = out.read(OUTGOING_BUFFER_SIZE)
if not data:
continue
@@ -599,14 +625,10 @@
elif (e[0] not in (errno.ECONNRESET, errno.EPIPE)
and not first):
raise
- reconnect('write')
+ self._reconnect('write')
amt = self.sock.send(out)
logger.debug('sent %d', amt)
first = False
- # stash data we think we sent in case the socket breaks
- # when we read from it
- if was_first:
- sent_data = out[:amt]
if out is body:
body = out[amt:]
else:
@@ -616,7 +638,6 @@
# the whole request
if response is None:
response = self.response_class(self.sock, self.timeout, method)
- complete = response.complete()
data_left = bool(outgoing_headers or body)
if data_left:
logger.info('stopped sending request early, '
@@ -629,10 +650,14 @@
self._current_response = response
def getresponse(self):
+ """Returns the response to the most recent request."""
if self._current_response is None:
raise httplib.ResponseNotReady()
r = self._current_response
while r.headers is None:
+ # We're a friend of the response class, so let us use the
+ # private attribute.
+ # pylint: disable=W0212
if not r._select() and not r.complete():
raise _readers.HTTPRemoteClosedError()
if r.will_close:
--- a/mercurial/httpclient/_readers.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/httpclient/_readers.py Tue Jul 16 11:13:18 2013 -0500
@@ -33,7 +33,6 @@
"""
import httplib
-import itertools
import logging
logger = logging.getLogger(__name__)
@@ -59,33 +58,35 @@
self._done_chunks = []
self.available_data = 0
- def addchunk(self, data):
+ def _addchunk(self, data):
self._done_chunks.append(data)
self.available_data += len(data)
- def pushchunk(self, data):
+ def _pushchunk(self, data):
self._done_chunks.insert(0, data)
self.available_data += len(data)
- def popchunk(self):
+ def _popchunk(self):
b = self._done_chunks.pop(0)
self.available_data -= len(b)
return b
def done(self):
+ """Returns true if the response body is entirely read."""
return self._finished
def read(self, amt):
+ """Read amt bytes from the response body."""
if self.available_data < amt and not self._finished:
raise ReadNotReady()
blocks = []
need = amt
while self._done_chunks:
- b = self.popchunk()
+ b = self._popchunk()
if len(b) > need:
nb = b[:need]
- self.pushchunk(b[need:])
+ self._pushchunk(b[need:])
b = nb
blocks.append(b)
need -= len(b)
@@ -107,11 +108,11 @@
blocks = []
while self._done_chunks:
- b = self.popchunk()
+ b = self._popchunk()
i = b.find(delimstr) + len(delimstr)
if i:
if i < len(b):
- self.pushchunk(b[i:])
+ self._pushchunk(b[i:])
blocks.append(b[:i])
break
else:
@@ -154,8 +155,9 @@
if data:
assert not self._finished, (
'tried to add data (%r) to a closed reader!' % data)
- logger.debug('%s read an additional %d data', self.name, len(data))
- self.addchunk(data)
+ logger.debug('%s read an additional %d data',
+ self.name, len(data)) # pylint: disable=E1101
+ self._addchunk(data)
class CloseIsEndReader(AbstractSimpleReader):
@@ -172,7 +174,7 @@
name = 'content-length'
def __init__(self, amount):
- AbstractReader.__init__(self)
+ AbstractSimpleReader.__init__(self)
self._amount = amount
if amount == 0:
self._finished = True
@@ -199,7 +201,8 @@
logger.debug('chunked read an additional %d data', len(data))
position = 0
if self._leftover_data:
- logger.debug('chunked reader trying to finish block from leftover data')
+ logger.debug(
+ 'chunked reader trying to finish block from leftover data')
# TODO: avoid this string concatenation if possible
data = self._leftover_data + data
position = self._leftover_skip_amt
@@ -224,6 +227,6 @@
self._finished = True
logger.debug('closing chunked reader due to chunk of length 0')
return
- self.addchunk(data[block_start:block_start + amt])
+ self._addchunk(data[block_start:block_start + amt])
position = block_start + amt + len(self._eol)
# no-check-code
--- a/mercurial/httpclient/socketutil.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/httpclient/socketutil.py Tue Jul 16 11:13:18 2013 -0500
@@ -39,7 +39,8 @@
try:
import ssl
- ssl.wrap_socket # make demandimporters load the module
+ # make demandimporters load the module
+ ssl.wrap_socket # pylint: disable=W0104
have_ssl = True
except ImportError:
import httplib
@@ -52,12 +53,13 @@
create_connection = socket.create_connection
except AttributeError:
def create_connection(address):
+ """Backport of socket.create_connection from Python 2.6."""
host, port = address
msg = "getaddrinfo returns an empty list"
sock = None
for res in socket.getaddrinfo(host, port, 0,
socket.SOCK_STREAM):
- af, socktype, proto, _canonname, sa = res
+ af, socktype, proto, unused_canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
logger.info("connect: (%s, %s)", host, port)
@@ -80,8 +82,11 @@
CERT_REQUIRED = ssl.CERT_REQUIRED
else:
class FakeSocket(httplib.FakeSocket):
- """Socket wrapper that supports SSL.
- """
+ """Socket wrapper that supports SSL."""
+
+ # Silence lint about this goofy backport class
+ # pylint: disable=W0232,E1101,R0903,R0913,C0111
+
# backport the behavior from Python 2.6, which is to busy wait
# on the socket instead of anything nice. Sigh.
# See http://bugs.python.org/issue3890 for more info.
@@ -107,11 +112,16 @@
CERT_OPTIONAL = 1
CERT_REQUIRED = 2
+ # Disable unused-argument because we're making a dumb wrapper
+ # that's like an upstream method.
+ #
+ # pylint: disable=W0613,R0913
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=_PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True):
+ """Backport of ssl.wrap_socket from Python 2.6."""
if cert_reqs != CERT_NONE and ca_certs:
raise CertificateValidationUnsupported(
'SSL certificate validation requires the ssl module'
@@ -120,6 +130,7 @@
# borrow httplib's workaround for no ssl.wrap_socket
sock = FakeSocket(sock, sslob)
return sock
+ # pylint: enable=W0613,R0913
class CertificateValidationUnsupported(Exception):
--- a/mercurial/localrepo.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/localrepo.py Tue Jul 16 11:13:18 2013 -0500
@@ -99,8 +99,9 @@
def known(self, nodes):
return self._repo.known(nodes)
- def getbundle(self, source, heads=None, common=None):
- return self._repo.getbundle(source, heads=heads, common=common)
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
+ return self._repo.getbundle(source, heads=heads, common=common,
+ bundlecaps=None)
# TODO We might want to move the next two calls into legacypeer and add
# unbundle instead.
@@ -1145,7 +1146,7 @@
if not force:
vdirs = []
- match.dir = vdirs.append
+ match.explicitdir = vdirs.append
match.bad = fail
wlock = self.wlock()
@@ -1674,6 +1675,7 @@
heads = rheads
if remote.capable('getbundle'):
+ # TODO: get bundlecaps from remote
cg = remote.getbundle('pull', common=common,
heads=heads or rheads)
elif heads is None:
@@ -1836,13 +1838,19 @@
remoteheads, newbranch,
bool(inc))
+ # TODO: get bundlecaps from remote
+ bundlecaps = None
# create a changegroup from local
if revs is None and not outgoing.excluded:
# push everything,
# use the fast path, no race possible on push
- cg = self._changegroup(outgoing.missing, 'push')
+ bundler = changegroup.bundle10(self, bundlecaps)
+ cg = self._changegroupsubset(outgoing,
+ bundler,
+ 'push',
+ fastpath=True)
else:
- cg = self.getlocalbundle('push', outgoing)
+ cg = self.getlocalbundle('push', outgoing, bundlecaps)
# apply changegroup to remote
if unbundle:
@@ -1983,24 +1991,24 @@
cl = self.changelog
if not bases:
bases = [nullid]
+ # TODO: remove call to nodesbetween.
csets, bases, heads = cl.nodesbetween(bases, heads)
- # We assume that all ancestors of bases are known
- common = cl.ancestors([cl.rev(n) for n in bases])
- return self._changegroupsubset(common, csets, heads, source)
+ bases = [p for n in bases for p in cl.parents(n) if p != nullid]
+ outgoing = discovery.outgoing(cl, bases, heads)
+ bundler = changegroup.bundle10(self)
+ return self._changegroupsubset(outgoing, bundler, source)
- def getlocalbundle(self, source, outgoing):
+ def getlocalbundle(self, source, outgoing, bundlecaps=None):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
precomputed sets in outgoing."""
if not outgoing.missing:
return None
- return self._changegroupsubset(outgoing.common,
- outgoing.missing,
- outgoing.missingheads,
- source)
+ bundler = changegroup.bundle10(self, bundlecaps)
+ return self._changegroupsubset(outgoing, bundler, source)
- def getbundle(self, source, heads=None, common=None):
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
"""Like changegroupsubset, but returns the set difference between the
ancestors of heads and the ancestors common.
@@ -2018,215 +2026,32 @@
if not heads:
heads = cl.heads()
return self.getlocalbundle(source,
- discovery.outgoing(cl, common, heads))
+ discovery.outgoing(cl, common, heads),
+ bundlecaps=bundlecaps)
@unfilteredmethod
- def _changegroupsubset(self, commonrevs, csets, heads, source):
+ def _changegroupsubset(self, outgoing, bundler, source,
+ fastpath=False):
+ commonrevs = outgoing.common
+ csets = outgoing.missing
+ heads = outgoing.missingheads
+ # We go through the fast path if we get told to, or if all (unfiltered
+ # heads have been requested (since we then know there all linkrevs will
+ # be pulled by the client).
+ heads.sort()
+ fastpathlinkrev = fastpath or (
+ self.filtername is None and heads == sorted(self.heads()))
- cl = self.changelog
- mf = self.manifest
- mfs = {} # needed manifests
- fnodes = {} # needed file nodes
- changedfiles = set()
- fstate = ['', {}]
- count = [0, 0]
-
- # can we go through the fast path ?
- heads.sort()
- if heads == sorted(self.heads()):
- return self._changegroup(csets, source)
-
- # slow path
self.hook('preoutgoing', throw=True, source=source)
self.changegroupinfo(csets, source)
-
- # filter any nodes that claim to be part of the known set
- def prune(revlog, missing):
- rr, rl = revlog.rev, revlog.linkrev
- return [n for n in missing
- if rl(rr(n)) not in commonrevs]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
-
- def lookup(revlog, x):
- if revlog == cl:
- c = cl.read(x)
- changedfiles.update(c[3])
- mfs.setdefault(c[0], x)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
- return x
- elif revlog == mf:
- clnode = mfs[x]
- mdata = mf.readfast(x)
- for f, n in mdata.iteritems():
- if f in changedfiles:
- fnodes[f].setdefault(n, clnode)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
- return clnode
- else:
- progress(_bundling, count[0], item=fstate[0],
- unit=_files, total=count[1])
- return fstate[1][x]
-
- bundler = changegroup.bundle10(lookup)
- reorder = self.ui.config('bundle', 'reorder', 'auto')
- if reorder == 'auto':
- reorder = None
- else:
- reorder = util.parsebool(reorder)
-
- def gengroup():
- # Create a changenode group generator that will call our functions
- # back to lookup the owning changenode and collect information.
- count[:] = [0, len(csets)]
- for chunk in cl.group(csets, bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- # Create a generator for the manifestnodes that calls our lookup
- # and data collection functions back.
- for f in changedfiles:
- fnodes[f] = {}
- count[:] = [0, len(mfs)]
- for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- mfs.clear()
-
- # Go through all our files in order sorted by name.
- count[:] = [0, len(changedfiles)]
- for fname in sorted(changedfiles):
- filerevlog = self.file(fname)
- if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
- fstate[0] = fname
- fstate[1] = fnodes.pop(fname, {})
-
- nodelist = prune(filerevlog, fstate[1])
- if nodelist:
- count[0] += 1
- yield bundler.fileheader(fname)
- for chunk in filerevlog.group(nodelist, bundler, reorder):
- yield chunk
-
- # Signal that no more groups are left.
- yield bundler.close()
- progress(_bundling, None)
-
- if csets:
- self.hook('outgoing', node=hex(csets[0]), source=source)
-
- return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
+ gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
+ return changegroup.unbundle10(util.chunkbuffer(gengroup), 'UN')
def changegroup(self, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
return self.changegroupsubset(basenodes, self.heads(), source)
@unfilteredmethod
- def _changegroup(self, nodes, source):
- """Compute the changegroup of all nodes that we have that a recipient
- doesn't. Return a chunkbuffer object whose read() method will return
- successive changegroup chunks.
-
- This is much easier than the previous function as we can assume that
- the recipient has any changenode we aren't sending them.
-
- nodes is the set of nodes to send"""
-
- cl = self.changelog
- mf = self.manifest
- mfs = {}
- changedfiles = set()
- fstate = ['']
- count = [0, 0]
-
- self.hook('preoutgoing', throw=True, source=source)
- self.changegroupinfo(nodes, source)
-
- revset = set([cl.rev(n) for n in nodes])
-
- def gennodelst(log):
- ln, llr = log.node, log.linkrev
- return [ln(r) for r in log if llr(r) in revset]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
-
- def lookup(revlog, x):
- if revlog == cl:
- c = cl.read(x)
- changedfiles.update(c[3])
- mfs.setdefault(c[0], x)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
- return x
- elif revlog == mf:
- count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
- return cl.node(revlog.linkrev(revlog.rev(x)))
- else:
- progress(_bundling, count[0], item=fstate[0],
- total=count[1], unit=_files)
- return cl.node(revlog.linkrev(revlog.rev(x)))
-
- bundler = changegroup.bundle10(lookup)
- reorder = self.ui.config('bundle', 'reorder', 'auto')
- if reorder == 'auto':
- reorder = None
- else:
- reorder = util.parsebool(reorder)
-
- def gengroup():
- '''yield a sequence of changegroup chunks (strings)'''
- # construct a list of all changed files
-
- count[:] = [0, len(nodes)]
- for chunk in cl.group(nodes, bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- count[:] = [0, len(mfs)]
- for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- count[:] = [0, len(changedfiles)]
- for fname in sorted(changedfiles):
- filerevlog = self.file(fname)
- if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
- fstate[0] = fname
- nodelist = gennodelst(filerevlog)
- if nodelist:
- count[0] += 1
- yield bundler.fileheader(fname)
- for chunk in filerevlog.group(nodelist, bundler, reorder):
- yield chunk
- yield bundler.close()
- progress(_bundling, None)
-
- if nodes:
- self.hook('outgoing', node=hex(nodes[0]), source=source)
-
- return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
-
- @unfilteredmethod
def addchangegroup(self, source, srctype, url, emptyok=False):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
@@ -2318,41 +2143,10 @@
pr.total = efiles
source.callback = None
- while True:
- chunkdata = source.filelogheader()
- if not chunkdata:
- break
- f = chunkdata["filename"]
- self.ui.debug("adding %s revisions\n" % f)
- pr()
- fl = self.file(f)
- o = len(fl)
- if not fl.addgroup(source, revmap, trp):
- raise util.Abort(_("received file revlog group is empty"))
- revisions += len(fl) - o
- files += 1
- if f in needfiles:
- needs = needfiles[f]
- for new in xrange(o, len(fl)):
- n = fl.node(new)
- if n in needs:
- needs.remove(n)
- else:
- raise util.Abort(
- _("received spurious file revlog entry"))
- if not needs:
- del needfiles[f]
- self.ui.progress(_('files'), None)
-
- for f, needs in needfiles.iteritems():
- fl = self.file(f)
- for n in needs:
- try:
- fl.rev(n)
- except error.LookupError:
- raise util.Abort(
- _('missing file data for %s:%s - run hg verify') %
- (f, hex(n)))
+ newrevs, newfiles = self.addchangegroupfiles(source, revmap, trp,
+ pr, needfiles)
+ revisions += newrevs
+ files += newfiles
dh = 0
if oldheads:
@@ -2432,6 +2226,47 @@
else:
return dh + 1
+ def addchangegroupfiles(self, source, revmap, trp, pr, needfiles):
+ revisions = 0
+ files = 0
+ while True:
+ chunkdata = source.filelogheader()
+ if not chunkdata:
+ break
+ f = chunkdata["filename"]
+ self.ui.debug("adding %s revisions\n" % f)
+ pr()
+ fl = self.file(f)
+ o = len(fl)
+ if not fl.addgroup(source, revmap, trp):
+ raise util.Abort(_("received file revlog group is empty"))
+ revisions += len(fl) - o
+ files += 1
+ if f in needfiles:
+ needs = needfiles[f]
+ for new in xrange(o, len(fl)):
+ n = fl.node(new)
+ if n in needs:
+ needs.remove(n)
+ else:
+ raise util.Abort(
+ _("received spurious file revlog entry"))
+ if not needs:
+ del needfiles[f]
+ self.ui.progress(_('files'), None)
+
+ for f, needs in needfiles.iteritems():
+ fl = self.file(f)
+ for n in needs:
+ try:
+ fl.rev(n)
+ except error.LookupError:
+ raise util.Abort(
+ _('missing file data for %s:%s - run hg verify') %
+ (f, hex(n)))
+
+ return revisions, files
+
def stream_in(self, remote, requirements):
lock = self.lock()
try:
--- a/mercurial/match.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/match.py Tue Jul 16 11:13:18 2013 -0500
@@ -119,8 +119,12 @@
found/accessed, with an error message
'''
pass
- def dir(self, f):
- pass
+ # If this is set, it will be called when an explicitly listed directory is
+ # visited.
+ explicitdir = None
+ # If this is set, it will be called when a directory discovered by recursive
+ # traversal is visited.
+ traversedir = None
def missing(self, f):
pass
def exact(self, f):
--- a/mercurial/merge.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/merge.py Tue Jul 16 11:13:18 2013 -0500
@@ -61,6 +61,8 @@
l.sort()
for f in l:
yield f
+ def files(self):
+ return self._state.keys()
def mark(self, dfile, state):
self._state[dfile][0] = state
self._dirty = True
@@ -95,6 +97,7 @@
def _checkunknownfile(repo, wctx, mctx, f):
return (not repo.dirstate._ignore(f)
and os.path.isfile(repo.wjoin(f))
+ and repo.wopener.audit.check(f)
and repo.dirstate.normalize(f) not in repo.dirstate
and mctx[f].cmp(wctx[f]))
@@ -364,8 +367,8 @@
actions.append((f, "r", None, "remote delete"))
elif repo.ui.promptchoice(
_("local changed %s which remote deleted\n"
- "use (c)hanged version or (d)elete?") % f,
- (_("&Changed"), _("&Delete")), 0):
+ "use (c)hanged version or (d)elete?"
+ "$$ &Changed $$ &Delete") % f, 0):
actions.append((f, "r", None, "prompt delete"))
else:
actions.append((f, "a", None, "prompt keep"))
@@ -374,8 +377,8 @@
actions.append((f, "g", (m2.flags(f),), "remote recreating"))
elif repo.ui.promptchoice(
_("remote changed %s which local deleted\n"
- "use (c)hanged version or leave (d)eleted?") % f,
- (_("&Changed"), _("&Deleted")), 0) == 0:
+ "use (c)hanged version or leave (d)eleted?"
+ "$$ &Changed $$ &Deleted") % f, 0) == 0:
actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
else: assert False, m
return actions
--- a/mercurial/patch.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/patch.py Tue Jul 16 11:13:18 2013 -0500
@@ -481,7 +481,7 @@
def close(self):
wctx = self.repo[None]
- addremoved = set(self.changed)
+ changed = set(self.changed)
for src, dst in self.copied:
scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
if self.removed:
@@ -491,14 +491,10 @@
# File was deleted and no longer belongs to the
# dirstate, it was probably marked added then
# deleted, and should not be considered by
- # addremove().
- addremoved.discard(f)
- if addremoved:
- cwd = self.repo.getcwd()
- if cwd:
- addremoved = [util.pathto(self.repo.root, cwd, f)
- for f in addremoved]
- scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
+ # marktouched().
+ changed.discard(f)
+ if changed:
+ scmutil.marktouched(self.repo, changed, self.similarity)
return sorted(self.changed)
class filestore(object):
@@ -1397,12 +1393,7 @@
ui.warn(line + '\n')
finally:
if files:
- cfiles = list(files)
- cwd = repo.getcwd()
- if cwd:
- cfiles = [util.pathto(repo.root, cwd, f)
- for f in cfiles]
- scmutil.addremove(repo, cfiles, similarity=similarity)
+ scmutil.marktouched(repo, files, similarity)
code = fp.close()
if code:
raise PatchError(_("patch command failed: %s") %
--- a/mercurial/revlog.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/revlog.py Tue Jul 16 11:13:18 2013 -0500
@@ -14,7 +14,7 @@
# import stuff from node for others to import from revlog
from node import bin, hex, nullid, nullrev
from i18n import _
-import ancestor, mdiff, parsers, error, util, dagutil
+import ancestor, mdiff, parsers, error, util
import struct, zlib, errno
_pack = struct.pack
@@ -991,6 +991,9 @@
p1, p2 - the parent nodeids of the revision
cachedelta - an optional precomputed delta
"""
+ if link == nullrev:
+ raise RevlogError(_("attempted to add linkrev -1 to %s")
+ % self.indexfile)
node = hash(text, p1, p2)
if node in self.nodemap:
return node
@@ -1143,44 +1146,6 @@
self._basecache = (curr, chainbase)
return node
- def group(self, nodelist, bundler, reorder=None):
- """Calculate a delta group, yielding a sequence of changegroup chunks
- (strings).
-
- Given a list of changeset revs, return a set of deltas and
- metadata corresponding to nodes. The first delta is
- first parent(nodelist[0]) -> nodelist[0], the receiver is
- guaranteed to have this parent as it has all history before
- these changesets. In the case firstparent is nullrev the
- changegroup starts with a full revision.
- """
-
- # if we don't have any revisions touched by these changesets, bail
- if len(nodelist) == 0:
- yield bundler.close()
- return
-
- # for generaldelta revlogs, we linearize the revs; this will both be
- # much quicker and generate a much smaller bundle
- if (self._generaldelta and reorder is not False) or reorder:
- dag = dagutil.revlogdag(self)
- revs = set(self.rev(n) for n in nodelist)
- revs = dag.linearize(revs)
- else:
- revs = sorted([self.rev(n) for n in nodelist])
-
- # add the parent of the first rev
- p = self.parentrevs(revs[0])[0]
- revs.insert(0, p)
-
- # build deltas
- for r in xrange(len(revs) - 1):
- prev, curr = revs[r], revs[r + 1]
- for c in bundler.revchunk(self, curr, prev):
- yield c
-
- yield bundler.close()
-
def addgroup(self, bundle, linkmapper, transaction):
"""
add a delta group
--- a/mercurial/scmutil.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/scmutil.py Tue Jul 16 11:13:18 2013 -0500
@@ -685,26 +685,11 @@
if similarity is None:
similarity = float(opts.get('similarity') or 0)
# we'd use status here, except handling of symlinks and ignore is tricky
- added, unknown, deleted, removed = [], [], [], []
- audit_path = pathauditor(repo.root)
m = match(repo[None], pats, opts)
rejected = []
m.bad = lambda x, y: rejected.append(x)
- ctx = repo[None]
- dirstate = repo.dirstate
- walkresults = dirstate.walk(m, sorted(ctx.substate), True, False)
- for abs, st in walkresults.iteritems():
- dstate = dirstate[abs]
- if dstate == '?' and audit_path.check(abs):
- unknown.append(abs)
- elif dstate != 'r' and not st:
- deleted.append(abs)
- # for finding renames
- elif dstate == 'r':
- removed.append(abs)
- elif dstate == 'a':
- added.append(abs)
+ added, unknown, deleted, removed = _interestingfiles(repo, m)
unknownset = set(unknown)
toprint = unknownset.copy()
@@ -718,32 +703,101 @@
status = _('removing %s\n') % ((pats and rel) or abs)
repo.ui.status(status)
- copies = {}
- if similarity > 0:
- for old, new, score in similar.findrenames(repo,
- added + unknown, removed + deleted, similarity):
- if repo.ui.verbose or not m.exact(old) or not m.exact(new):
- repo.ui.status(_('recording removal of %s as rename to %s '
- '(%d%% similar)\n') %
- (m.rel(old), m.rel(new), score * 100))
- copies[new] = old
+ renames = _findrenames(repo, m, added + unknown, removed + deleted,
+ similarity)
if not dry_run:
- wctx = repo[None]
- wlock = repo.wlock()
- try:
- wctx.forget(deleted)
- wctx.add(unknown)
- for new, old in copies.iteritems():
- wctx.copy(old, new)
- finally:
- wlock.release()
+ _markchanges(repo, unknown, deleted, renames)
+
+ for f in rejected:
+ if f in m.files():
+ return 1
+ return 0
+
+def marktouched(repo, files, similarity=0.0):
+ '''Assert that files have somehow been operated upon. files are relative to
+ the repo root.'''
+ m = matchfiles(repo, files)
+ rejected = []
+ m.bad = lambda x, y: rejected.append(x)
+
+ added, unknown, deleted, removed = _interestingfiles(repo, m)
+
+ if repo.ui.verbose:
+ unknownset = set(unknown)
+ toprint = unknownset.copy()
+ toprint.update(deleted)
+ for abs in sorted(toprint):
+ if abs in unknownset:
+ status = _('adding %s\n') % abs
+ else:
+ status = _('removing %s\n') % abs
+ repo.ui.status(status)
+
+ renames = _findrenames(repo, m, added + unknown, removed + deleted,
+ similarity)
+
+ _markchanges(repo, unknown, deleted, renames)
for f in rejected:
if f in m.files():
return 1
return 0
+def _interestingfiles(repo, matcher):
+ '''Walk dirstate with matcher, looking for files that addremove would care
+ about.
+
+ This is different from dirstate.status because it doesn't care about
+ whether files are modified or clean.'''
+ added, unknown, deleted, removed = [], [], [], []
+ audit_path = pathauditor(repo.root)
+
+ ctx = repo[None]
+ dirstate = repo.dirstate
+ walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False)
+ for abs, st in walkresults.iteritems():
+ dstate = dirstate[abs]
+ if dstate == '?' and audit_path.check(abs):
+ unknown.append(abs)
+ elif dstate != 'r' and not st:
+ deleted.append(abs)
+ # for finding renames
+ elif dstate == 'r':
+ removed.append(abs)
+ elif dstate == 'a':
+ added.append(abs)
+
+ return added, unknown, deleted, removed
+
+def _findrenames(repo, matcher, added, removed, similarity):
+ '''Find renames from removed files to added ones.'''
+ renames = {}
+ if similarity > 0:
+ for old, new, score in similar.findrenames(repo, added, removed,
+ similarity):
+ if (repo.ui.verbose or not matcher.exact(old)
+ or not matcher.exact(new)):
+ repo.ui.status(_('recording removal of %s as rename to %s '
+ '(%d%% similar)\n') %
+ (matcher.rel(old), matcher.rel(new),
+ score * 100))
+ renames[new] = old
+ return renames
+
+def _markchanges(repo, unknown, deleted, renames):
+ '''Marks the files in unknown as added, the files in deleted as removed,
+ and the files in renames as copied.'''
+ wctx = repo[None]
+ wlock = repo.wlock()
+ try:
+ wctx.forget(deleted)
+ wctx.add(unknown)
+ for new, old in renames.iteritems():
+ wctx.copy(old, new)
+ finally:
+ wlock.release()
+
def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
"""Update the dirstate to reflect the intent of copying src to dst. For
different reasons it might not end with dst being marked as copied from src.
--- a/mercurial/store.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/store.py Tue Jul 16 11:13:18 2013 -0500
@@ -322,13 +322,16 @@
def datafiles(self):
return self._walk('data', True)
+ def topfiles(self):
+ # yield manifest before changelog
+ return reversed(self._walk('', False))
+
def walk(self):
'''yields (unencoded, encoded, size)'''
# yield data files first
for x in self.datafiles():
yield x
- # yield manifest before changelog
- for x in reversed(self._walk('', False)):
+ for x in self.topfiles():
yield x
def copylist(self):
--- a/mercurial/subrepo.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/subrepo.py Tue Jul 16 11:13:18 2013 -0500
@@ -191,9 +191,8 @@
elif ld[0] != r[0]: # sources differ
if repo.ui.promptchoice(
_(' subrepository sources for %s differ\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?')
- % (s, l[0], r[0]),
- (_('&Local'), _('&Remote')), 0):
+ 'use (l)ocal source (%s) or (r)emote source (%s)?'
+ '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
@@ -215,8 +214,8 @@
else:
if repo.ui.promptchoice(
_(' local changed subrepository %s which remote removed\n'
- 'use (c)hanged version or (d)elete?') % s,
- (_('&Changed'), _('&Delete')), 0):
+ 'use (c)hanged version or (d)elete?'
+ '$$ &Changed $$ &Delete') % s, 0):
debug(s, "prompt remove")
wctx.sub(s).remove()
@@ -230,8 +229,8 @@
elif r != sa[s]:
if repo.ui.promptchoice(
_(' remote changed subrepository %s which local removed\n'
- 'use (c)hanged version or (d)elete?') % s,
- (_('&Changed'), _('&Delete')), 0) == 0:
+ 'use (c)hanged version or (d)elete?'
+ '$$ &Changed $$ &Delete') % s, 0) == 0:
debug(s, "prompt recreate", r)
wctx.sub(s).get(r)
sm[s] = r
@@ -242,14 +241,16 @@
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
+ 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
+ '$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
+ 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
+ '$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
- return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
+ return ui.promptchoice(msg, 0)
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
--- a/mercurial/templatefilters.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/templatefilters.py Tue Jul 16 11:13:18 2013 -0500
@@ -5,9 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from i18n import _
import cgi, re, os, time, urllib
-import encoding, node, util, error
+import encoding, node, util
import hbisect
def addbreaks(text):
@@ -100,8 +99,8 @@
para_re = None
space_re = None
-def fill(text, width):
- '''fill many paragraphs.'''
+def fill(text, width, initindent = '', hangindent = ''):
+ '''fill many paragraphs with optional indentation.'''
global para_re, space_re
if para_re is None:
para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
@@ -122,7 +121,8 @@
yield text[start:m.start(0)], m.group(1)
start = m.end(1)
- return "".join([space_re.sub(' ', util.wrap(para, width=width)) + rest
+ return "".join([util.wrap(space_re.sub(' ', util.wrap(para, width)),
+ width, initindent, hangindent) + rest
for para, rest in findparas()])
def fill68(text):
@@ -401,34 +401,5 @@
text = regexp.sub(format, text)
return text
-def fillfunc(context, mapping, args):
- if not (1 <= len(args) <= 2):
- raise error.ParseError(_("fill expects one or two arguments"))
-
- text = stringify(args[0][0](context, mapping, args[0][1]))
- width = 76
- if len(args) == 2:
- try:
- width = int(stringify(args[1][0](context, mapping, args[1][1])))
- except ValueError:
- raise error.ParseError(_("fill expects an integer width"))
-
- return fill(text, width)
-
-def datefunc(context, mapping, args):
- if not (1 <= len(args) <= 2):
- raise error.ParseError(_("date expects one or two arguments"))
-
- date = args[0][0](context, mapping, args[0][1])
- if len(args) == 2:
- fmt = stringify(args[1][0](context, mapping, args[1][1]))
- return util.datestr(date, fmt)
- return util.datestr(date)
-
-funcs = {
- "fill": fillfunc,
- "date": datefunc,
-}
-
# tell hggettext to extract docstrings from these functions:
i18nfunctions = filters.values()
--- a/mercurial/templater.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/templater.py Tue Jul 16 11:13:18 2013 -0500
@@ -199,15 +199,47 @@
if n in funcs:
f = funcs[n]
return (f, args)
- if n in templatefilters.funcs:
- f = templatefilters.funcs[n]
- return (f, args)
if n in context._filters:
if len(args) != 1:
raise error.ParseError(_("filter %s expects one argument") % n)
f = context._filters[n]
return (runfilter, (args[0][0], args[0][1], f))
+def date(context, mapping, args):
+ if not (1 <= len(args) <= 2):
+ raise error.ParseError(_("date expects one or two arguments"))
+
+ date = args[0][0](context, mapping, args[0][1])
+ if len(args) == 2:
+ fmt = stringify(args[1][0](context, mapping, args[1][1]))
+ return util.datestr(date, fmt)
+ return util.datestr(date)
+
+def fill(context, mapping, args):
+ if not (1 <= len(args) <= 4):
+ raise error.ParseError(_("fill expects one to four arguments"))
+
+ text = stringify(args[0][0](context, mapping, args[0][1]))
+ width = 76
+ initindent = ''
+ hangindent = ''
+ if 2 <= len(args) <= 4:
+ try:
+ width = int(stringify(args[1][0](context, mapping, args[1][1])))
+ except ValueError:
+ raise error.ParseError(_("fill expects an integer width"))
+ try:
+ initindent = stringify(args[2][0](context, mapping, args[2][1]))
+ initindent = stringify(runtemplate(context, mapping,
+ compiletemplate(initindent, context)))
+ hangindent = stringify(args[3][0](context, mapping, args[3][1]))
+ hangindent = stringify(runtemplate(context, mapping,
+ compiletemplate(hangindent, context)))
+ except IndexError:
+ pass
+
+ return templatefilters.fill(text, width, initindent, hangindent)
+
def get(context, mapping, args):
if len(args) != 2:
# i18n: "get" is a keyword
@@ -221,40 +253,6 @@
key = args[1][0](context, mapping, args[1][1])
yield dictarg.get(key)
-def join(context, mapping, args):
- if not (1 <= len(args) <= 2):
- # i18n: "join" is a keyword
- raise error.ParseError(_("join expects one or two arguments"))
-
- joinset = args[0][0](context, mapping, args[0][1])
- if util.safehasattr(joinset, '__call__'):
- jf = joinset.joinfmt
- joinset = [jf(x) for x in joinset()]
-
- joiner = " "
- if len(args) > 1:
- joiner = args[1][0](context, mapping, args[1][1])
-
- first = True
- for x in joinset:
- if first:
- first = False
- else:
- yield joiner
- yield x
-
-def sub(context, mapping, args):
- if len(args) != 3:
- # i18n: "sub" is a keyword
- raise error.ParseError(_("sub expects three arguments"))
-
- pat = stringify(args[0][0](context, mapping, args[0][1]))
- rpl = stringify(args[1][0](context, mapping, args[1][1]))
- src = stringify(args[2][0](context, mapping, args[2][1]))
- src = stringify(runtemplate(context, mapping,
- compiletemplate(src, context)))
- yield re.sub(pat, rpl, src)
-
def if_(context, mapping, args):
if not (2 <= len(args) <= 3):
# i18n: "if" is a keyword
@@ -282,6 +280,28 @@
t = stringify(args[3][0](context, mapping, args[3][1]))
yield runtemplate(context, mapping, compiletemplate(t, context))
+def join(context, mapping, args):
+ if not (1 <= len(args) <= 2):
+ # i18n: "join" is a keyword
+ raise error.ParseError(_("join expects one or two arguments"))
+
+ joinset = args[0][0](context, mapping, args[0][1])
+ if util.safehasattr(joinset, '__call__'):
+ jf = joinset.joinfmt
+ joinset = [jf(x) for x in joinset()]
+
+ joiner = " "
+ if len(args) > 1:
+ joiner = args[1][0](context, mapping, args[1][1])
+
+ first = True
+ for x in joinset:
+ if first:
+ first = False
+ else:
+ yield joiner
+ yield x
+
def label(context, mapping, args):
if len(args) != 2:
# i18n: "label" is a keyword
@@ -301,6 +321,28 @@
return minirst.format(text, style=style, keep=['verbose'])
+def strip(context, mapping, args):
+ if not (1 <= len(args) <= 2):
+ raise error.ParseError(_("strip expects one or two arguments"))
+
+ text = args[0][0](context, mapping, args[0][1])
+ if len(args) == 2:
+ chars = args[1][0](context, mapping, args[1][1])
+ return text.strip(chars)
+ return text.strip()
+
+def sub(context, mapping, args):
+ if len(args) != 3:
+ # i18n: "sub" is a keyword
+ raise error.ParseError(_("sub expects three arguments"))
+
+ pat = stringify(args[0][0](context, mapping, args[0][1]))
+ rpl = stringify(args[1][0](context, mapping, args[1][1]))
+ src = stringify(args[2][0](context, mapping, args[2][1]))
+ src = stringify(runtemplate(context, mapping,
+ compiletemplate(src, context)))
+ yield re.sub(pat, rpl, src)
+
methods = {
"string": lambda e, c: (runstring, e[1]),
"symbol": lambda e, c: (runsymbol, e[1]),
@@ -312,12 +354,15 @@
}
funcs = {
+ "date": date,
+ "fill": fill,
"get": get,
"if": if_,
"ifeq": ifeq,
"join": join,
"label": label,
"rstdoc": rstdoc,
+ "strip": strip,
"sub": sub,
}
@@ -394,6 +439,16 @@
engines = {'default': engine}
+def stylelist():
+ path = templatepath()[0]
+ dirlist = os.listdir(path)
+ stylelist = []
+ for file in dirlist:
+ split = file.split(".")
+ if split[0] == "map-cmdline":
+ stylelist.append(split[1])
+ return ", ".join(sorted(stylelist))
+
class templater(object):
def __init__(self, mapfile, filters={}, defaults={}, cache={},
@@ -415,7 +470,8 @@
if not mapfile:
return
if not os.path.exists(mapfile):
- raise util.Abort(_('style not found: %s') % mapfile)
+ raise util.Abort(_("style '%s' not found") % mapfile,
+ hint=_("available styles: %s") % stylelist())
conf = config.config()
conf.read(mapfile)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/map-cmdline.phases Tue Jul 16 11:13:18 2013 -0500
@@ -0,0 +1,25 @@
+changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}phase: {phase}\n{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
+changeset_quiet = '{rev}:{node|short}\n'
+changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
+changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}phase: {phase}\n{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
+start_files = 'files: '
+file = ' {file}'
+end_files = '\n'
+start_file_mods = 'files: '
+file_mod = ' {file_mod}'
+end_file_mods = '\n'
+start_file_adds = 'files+: '
+file_add = ' {file_add}'
+end_file_adds = '\n'
+start_file_dels = 'files-: '
+file_del = ' {file_del}'
+end_file_dels = '\n'
+start_file_copies = 'copies: '
+file_copy = ' {name} ({source})'
+end_file_copies = '\n'
+parent = 'parent: {rev}:{node|formatnode}\n'
+manifest = 'manifest: {rev}:{node}\n'
+branch = 'branch: {branch}\n'
+tag = 'tag: {tag}\n'
+bookmark = 'bookmark: {bookmark}\n'
+extra = 'extra: {key}={value|stringescape}\n'
--- a/mercurial/templates/paper/filerevision.tmpl Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/templates/paper/filerevision.tmpl Tue Jul 16 11:13:18 2013 -0500
@@ -68,7 +68,7 @@
<div class="overflow">
<div class="sourcefirst"> line source</div>
-{text%fileline}
+<pre class="sourcelines">{text%fileline}</pre>
<div class="sourcelast"></div>
</div>
</div>
--- a/mercurial/templates/paper/map Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/templates/paper/map Tue Jul 16 11:13:18 2013 -0500
@@ -72,7 +72,7 @@
filecomparison = filecomparison.tmpl
filelog = filelog.tmpl
fileline = '
- <div class="parity{parity} source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>'
+ <span id="{lineid}">{strip(line|escape, '\r\n')}</span><a href="#{lineid}"></a>'
filelogentry = filelogentry.tmpl
annotateline = '
--- a/mercurial/templates/static/style-paper.css Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/templates/static/style-paper.css Tue Jul 16 11:13:18 2013 -0500
@@ -209,6 +209,49 @@
.source a { color: #999; font-size: smaller; font-family: monospace;}
.bottomline { border-bottom: 1px solid #999; }
+.sourcelines {
+ font-size: 90%;
+ position: relative;
+}
+
+.sourcelines > span {
+ display: inline-block;
+ width: 100%;
+ padding: 1px 0px;
+ white-space: pre-wrap;
+ counter-increment: lineno;
+}
+
+.sourcelines > span:before {
+ -moz-user-select: -moz-none;
+ -khtml-user-select: none;
+ -webkit-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+ display: inline-block;
+ width: 4em;
+ margin-right: 1em;
+ font-size: smaller;
+ color: #999;
+ text-align: right;
+ content: counter(lineno);
+}
+
+.sourcelines > span:nth-child(4n+1) { background-color: #f0f0f0; }
+.sourcelines > span:nth-child(4n+3) { background-color: white; }
+
+.sourcelines > span:target {
+ background-color: #ffff99;
+}
+
+.sourcelines > a {
+ display: inline-block;
+ position: absolute;
+ left: 0px;
+ width: 4em;
+ height: 1em;
+}
+
.fileline { font-family: monospace; }
.fileline img { border: 0; }
--- a/mercurial/ui.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/ui.py Tue Jul 16 11:13:18 2013 -0500
@@ -6,7 +6,7 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import errno, getpass, os, re, socket, sys, tempfile, traceback
+import errno, getpass, os, socket, sys, tempfile, traceback
import config, scmutil, util, error, formatter
class ui(object):
@@ -284,22 +284,16 @@
ConfigError: foo.invalid is not a byte quantity ('somevalue')
"""
- orig = string = self.config(section, name)
- if orig is None:
+ value = self.config(section, name)
+ if value is None:
if not isinstance(default, str):
return default
- orig = string = default
- multiple = 1
- m = re.match(r'([^kmbg]+?)\s*([kmg]?)b?$', string, re.I)
- if m:
- string, key = m.groups()
- key = key.lower()
- multiple = dict(k=1024, m=1048576, g=1073741824).get(key, 1)
+ value = default
try:
- return int(float(string) * multiple)
- except ValueError:
+ return util.sizetoint(value)
+ except error.ParseError:
raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
- % (section, name, orig))
+ % (section, name, value))
def configlist(self, section, name, default=None, untrusted=False):
"""parse a configuration element as a list of comma/space separated
@@ -645,13 +639,20 @@
except EOFError:
raise util.Abort(_('response expected'))
- def promptchoice(self, msg, choices, default=0):
- """Prompt user with msg, read response, and ensure it matches
- one of the provided choices. The index of the choice is returned.
- choices is a sequence of acceptable responses with the format:
- ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
- If ui is not interactive, the default is returned.
+ def promptchoice(self, prompt, default=0):
+ """Prompt user with a message, read response, and ensure it matches
+ one of the provided choices. The prompt is formatted as follows:
+
+ "would you like fries with that (Yn)? $$ &Yes $$ &No"
+
+ The index of the choice is returned. Responses are case
+ insensitive. If ui is not interactive, the default is
+ returned.
"""
+
+ parts = prompt.split('$$')
+ msg = parts[0].rstrip(' ')
+ choices = [p.strip(' ') for p in parts[1:]]
resps = [s[s.index('&') + 1].lower() for s in choices]
while True:
r = self.prompt(msg, resps[default])
--- a/mercurial/util.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/util.py Tue Jul 16 11:13:18 2013 -0500
@@ -997,15 +997,18 @@
limit -= len(s)
yield s
-def makedate():
- ct = time.time()
- if ct < 0:
+def makedate(timestamp=None):
+ '''Return a unix timestamp (or the current time) as a (unixtime,
+ offset) tuple based off the local timezone.'''
+ if timestamp is None:
+ timestamp = time.time()
+ if timestamp < 0:
hint = _("check your clock")
- raise Abort(_("negative timestamp: %d") % ct, hint=hint)
- delta = (datetime.datetime.utcfromtimestamp(ct) -
- datetime.datetime.fromtimestamp(ct))
+ raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
+ delta = (datetime.datetime.utcfromtimestamp(timestamp) -
+ datetime.datetime.fromtimestamp(timestamp))
tz = delta.days * 86400 + delta.seconds
- return ct, tz
+ return timestamp, tz
def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
"""represent a (unixtime, offset) tuple as a localized time.
@@ -1924,3 +1927,41 @@
(' ' * _timenesting[0], func.__name__,
timecount(elapsed)))
return wrapper
+
+_sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
+ ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
+
+def sizetoint(s):
+ '''Convert a space specifier to a byte count.
+
+ >>> sizetoint('30')
+ 30
+ >>> sizetoint('2.2kb')
+ 2252
+ >>> sizetoint('6M')
+ 6291456
+ '''
+ t = s.strip().lower()
+ try:
+ for k, u in _sizeunits:
+ if t.endswith(k):
+ return int(float(t[:-len(k)]) * u)
+ return int(t)
+ except ValueError:
+ raise error.ParseError(_("couldn't parse size: %s") % s)
+
+class hooks(object):
+ '''A collection of hook functions that can be used to extend a
+ function's behaviour. Hooks are called in lexicographic order,
+ based on the names of their sources.'''
+
+ def __init__(self):
+ self._hooks = []
+
+ def add(self, source, hook):
+ self._hooks.append((source, hook))
+
+ def __call__(self, *args):
+ self._hooks.sort(key=lambda x: x[0])
+ for source, hook in self._hooks:
+ hook(*args)
--- a/mercurial/wireproto.py Mon Jul 15 15:04:42 2013 +0200
+++ b/mercurial/wireproto.py Tue Jul 16 11:13:18 2013 -0500
@@ -281,13 +281,15 @@
bases=bases, heads=heads)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
- def getbundle(self, source, heads=None, common=None):
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
self.requirecap('getbundle', _('look up remote changes'))
opts = {}
if heads is not None:
opts['heads'] = encodelist(heads)
if common is not None:
opts['common'] = encodelist(common)
+ if bundlecaps is not None:
+ opts['bundlecaps'] = ','.join(bundlecaps)
f = self._callstream("getbundle", **opts)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
@@ -449,9 +451,12 @@
return repo.debugwireargs(one, two, **opts)
def getbundle(repo, proto, others):
- opts = options('getbundle', ['heads', 'common'], others)
+ opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
for k, v in opts.iteritems():
- opts[k] = decodelist(v)
+ if k in ('heads', 'common'):
+ opts[k] = decodelist(v)
+ elif k == 'bundlecaps':
+ opts[k] = set(v.split(','))
cg = repo.getbundle('serve', **opts)
return streamres(proto.groupchunks(cg))
@@ -523,6 +528,10 @@
def _allowstream(ui):
return ui.configbool('server', 'uncompressed', True, untrusted=True)
+def _walkstreamfiles(repo):
+ # this is it's own function so extensions can override it
+ return repo.store.walk()
+
def stream(repo, proto):
'''If the server supports streaming clone, it advertises the "stream"
capability with a value representing the version and flags of the repo
@@ -544,7 +553,7 @@
lock = repo.lock()
try:
repo.ui.debug('scanning\n')
- for name, ename, size in repo.store.walk():
+ for name, ename, size in _walkstreamfiles(repo):
if size:
entries.append((name, size))
total_bytes += size
--- a/tests/dummyssh Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/dummyssh Tue Jul 16 11:13:18 2013 -0500
@@ -13,7 +13,7 @@
log = open("dummylog", "ab")
log.write("Got arguments")
for i, arg in enumerate(sys.argv[1:]):
- log.write(" %d:%s" % (i+1, arg))
+ log.write(" %d:%s" % (i + 1, arg))
log.write("\n")
log.close()
hgcmd = sys.argv[2]
--- a/tests/filterpyflakes.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/filterpyflakes.py Tue Jul 16 11:13:18 2013 -0500
@@ -4,35 +4,48 @@
import sys, re, os
-def makekey(message):
- # "path/file:line: message"
- match = re.search(r"(line \d+)", message)
- line = ''
- if match:
- line = match.group(0)
- message = re.sub(r"(line \d+)", '', message)
- return re.sub(r"([^:]*):([^:]+):([^']*)('[^']*')(.*)$",
- r'\3:\5:\4:\1:\2:' + line,
- message)
+def makekey(typeandline):
+ """
+ for sorting lines by: msgtype, path/to/file, lineno, message
+
+ typeandline is a sequence of a message type and the entire message line
+ the message line format is path/to/file:line: message
+
+ >>> makekey((3, 'example.py:36: any message'))
+ (3, 'example.py', 36, ' any message')
+ >>> makekey((7, 'path/to/file.py:68: dummy message'))
+ (7, 'path/to/file.py', 68, ' dummy message')
+ >>> makekey((2, 'fn:88: m')) > makekey((2, 'fn:9: m'))
+ True
+ """
+
+ msgtype, line = typeandline
+ fname, line, message = line.split(":", 2)
+ # line as int for ordering 9 before 88
+ return msgtype, fname, int(line), message
+
lines = []
for line in sys.stdin:
- # We whitelist tests
+ # We whitelist tests (see more messages in pyflakes.messages)
pats = [
r"imported but unused",
r"local variable '.*' is assigned to but never used",
r"unable to detect undefined names",
]
- if not re.search('|'.join(pats), line):
- continue
+ for msgtype, pat in enumerate(pats):
+ if re.search(pat, line):
+ break # pattern matches
+ else:
+ continue # no pattern matched, next line
fn = line.split(':', 1)[0]
f = open(os.path.join(os.path.dirname(os.path.dirname(__file__)), fn))
data = f.read()
f.close()
- if 'no-check-code' in data:
+ if 'no-' 'check-code' in data:
continue
- lines.append(line)
+ lines.append((msgtype, line))
-for line in sorted(lines, key = makekey):
+for msgtype, line in sorted(lines, key = makekey):
sys.stdout.write(line)
print
--- a/tests/hghave.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/hghave.py Tue Jul 16 11:13:18 2013 -0500
@@ -105,7 +105,7 @@
sock = socket.socket(socket.AF_UNIX)
try:
sock.bind(name)
- except socket.error, err:
+ except socket.error:
return False
sock.close()
os.unlink(name)
--- a/tests/run-tests.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/run-tests.py Tue Jul 16 11:13:18 2013 -0500
@@ -56,15 +56,14 @@
import re
import threading
import killdaemons as killmod
-import cPickle as pickle
import Queue as queue
processlock = threading.Lock()
closefds = os.name == 'posix'
-def Popen4(cmd, wd, timeout):
+def Popen4(cmd, wd, timeout, env=None):
processlock.acquire()
- p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd,
+ p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
close_fds=closefds,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
@@ -137,8 +136,6 @@
help="always run tests listed in the specified whitelist file")
parser.add_option("-C", "--annotate", action="store_true",
help="output files annotated with coverage")
- parser.add_option("--child", type="int",
- help="run as child process, summary to given fd")
parser.add_option("-c", "--cover", action="store_true",
help="print a test coverage report")
parser.add_option("-d", "--debug", action="store_true",
@@ -161,6 +158,8 @@
help="run tests matching keywords")
parser.add_option("-l", "--local", action="store_true",
help="shortcut for --with-hg=<testdir>/../hg")
+ parser.add_option("--loop", action="store_true",
+ help="loop tests repeatedly")
parser.add_option("-n", "--nodiff", action="store_true",
help="skip showing test changes")
parser.add_option("-p", "--port", type="int",
@@ -240,32 +239,15 @@
parser.error("sorry, coverage options do not work when --local "
"is specified")
- global vlog
+ global verbose
if options.verbose:
- if options.jobs > 1 or options.child is not None:
- pid = "[%d]" % os.getpid()
- else:
- pid = None
- def vlog(*msg):
- iolock.acquire()
- if pid:
- print pid,
- for m in msg:
- print m,
- print
- sys.stdout.flush()
- iolock.release()
- else:
- vlog = lambda *msg: None
+ verbose = ''
if options.tmpdir:
options.tmpdir = os.path.expanduser(options.tmpdir)
if options.jobs < 1:
parser.error('--jobs must be positive')
- if options.interactive and options.jobs > 1:
- print '(--interactive overrides --jobs)'
- options.jobs = 1
if options.interactive and options.debug:
parser.error("-i/--interactive and -d/--debug are incompatible")
if options.debug:
@@ -273,18 +255,13 @@
sys.stderr.write(
'warning: --timeout option ignored with --debug\n')
options.timeout = 0
- if options.time:
- sys.stderr.write(
- 'warning: --time option ignored with --debug\n')
- options.time = False
if options.py3k_warnings:
if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
parser.error('--py3k-warnings can only be used on Python 2.6+')
if options.blacklist:
options.blacklist = parselistfiles(options.blacklist, 'blacklist')
if options.whitelist:
- options.whitelisted = parselistfiles(options.whitelist, 'whitelist',
- warn=options.child is None)
+ options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
else:
options.whitelisted = {}
@@ -319,6 +296,28 @@
for line in difflib.unified_diff(expected, output, ref, err):
sys.stdout.write(line)
+verbose = False
+def vlog(*msg):
+ if verbose is not False:
+ iolock.acquire()
+ if verbose:
+ print verbose,
+ for m in msg:
+ print m,
+ print
+ sys.stdout.flush()
+ iolock.release()
+
+def log(*msg):
+ iolock.acquire()
+ if verbose:
+ print verbose,
+ for m in msg:
+ print m,
+ print
+ sys.stdout.flush()
+ iolock.release()
+
def findprogram(program):
"""Search PATH for a executable program"""
for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
@@ -327,6 +326,65 @@
return name
return None
+def createhgrc(path, options):
+ # create a fresh hgrc
+ hgrc = open(path, 'w')
+ hgrc.write('[ui]\n')
+ hgrc.write('slash = True\n')
+ hgrc.write('interactive = False\n')
+ hgrc.write('[defaults]\n')
+ hgrc.write('backout = -d "0 0"\n')
+ hgrc.write('commit = -d "0 0"\n')
+ hgrc.write('tag = -d "0 0"\n')
+ if options.inotify:
+ hgrc.write('[extensions]\n')
+ hgrc.write('inotify=\n')
+ hgrc.write('[inotify]\n')
+ hgrc.write('pidfile=daemon.pids')
+ hgrc.write('appendpid=True\n')
+ if options.extra_config_opt:
+ for opt in options.extra_config_opt:
+ section, key = opt.split('.', 1)
+ assert '=' in key, ('extra config opt %s must '
+ 'have an = for assignment' % opt)
+ hgrc.write('[%s]\n%s\n' % (section, key))
+ hgrc.close()
+
+def createenv(options, testtmp, threadtmp, port):
+ env = os.environ.copy()
+ env['TESTTMP'] = testtmp
+ env['HOME'] = testtmp
+ env["HGPORT"] = str(port)
+ env["HGPORT1"] = str(port + 1)
+ env["HGPORT2"] = str(port + 2)
+ env["HGRCPATH"] = os.path.join(threadtmp, '.hgrc')
+ env["DAEMON_PIDS"] = os.path.join(threadtmp, 'daemon.pids')
+ env["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
+ env["HGMERGE"] = "internal:merge"
+ env["HGUSER"] = "test"
+ env["HGENCODING"] = "ascii"
+ env["HGENCODINGMODE"] = "strict"
+
+ # Reset some environment variables to well-known values so that
+ # the tests produce repeatable output.
+ env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
+ env['TZ'] = 'GMT'
+ env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
+ env['COLUMNS'] = '80'
+ env['TERM'] = 'xterm'
+
+ for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
+ 'NO_PROXY').split():
+ if k in env:
+ del env[k]
+
+ # unset env related to hooks
+ for k in env.keys():
+ if k.startswith('HG_'):
+ del env[k]
+
+ return env
+
def checktools():
# Before we go any further, check for pre-requisite tools
# stuff from coreutils (cat, rm, etc) are not tested
@@ -347,8 +405,8 @@
except OSError:
pass
-def killdaemons():
- return killmod.killdaemons(DAEMON_PIDS, tryhard=False, remove=True,
+def killdaemons(pidfile):
+ return killmod.killdaemons(pidfile, tryhard=False, remove=True,
logfn=vlog)
def cleanup(options):
@@ -498,9 +556,6 @@
vlog('# Running: %s' % cmd)
os.system(cmd)
- if options.child:
- return
-
covrun('-c')
omit = ','.join(os.path.join(x, '*') for x in [BINDIR, TESTDIR])
covrun('-i', '-r', '"--omit=%s"' % omit) # report
@@ -513,13 +568,13 @@
os.mkdir(adir)
covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
-def pytest(test, wd, options, replacements):
+def pytest(test, wd, options, replacements, env):
py3kswitch = options.py3k_warnings and ' -3' or ''
cmd = '%s%s "%s"' % (PYTHON, py3kswitch, test)
vlog("# Running", cmd)
if os.name == 'nt':
replacements.append((r'\r\n', '\n'))
- return run(cmd, wd, options, replacements)
+ return run(cmd, wd, options, replacements, env)
needescape = re.compile(r'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
escapesub = re.compile(r'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
@@ -546,9 +601,7 @@
if el + '\n' == l:
if os.name == 'nt':
# matching on "/" is not needed for this line
- iolock.acquire()
- print "\nInfo, unnecessary glob: %s (glob)" % el
- iolock.release()
+ log("\nInfo, unnecessary glob: %s (glob)" % el)
return True
i, n = 0, len(el)
res = ''
@@ -581,7 +634,7 @@
return True
return False
-def tsttest(test, wd, options, replacements):
+def tsttest(test, wd, options, replacements, env):
# We generate a shell script which outputs unique markers to line
# up script results with our source. These markers include input
# line number and the last return code
@@ -707,7 +760,7 @@
cmd = '%s "%s"' % (options.shell, name)
vlog("# Running", cmd)
- exitcode, output = run(cmd, wd, options, replacements)
+ exitcode, output = run(cmd, wd, options, replacements, env)
# do not merge output if skipped, return hghave message instead
# similarly, with --debug, output is None
if exitcode == SKIPPED_STATUS or output is None:
@@ -757,22 +810,22 @@
return exitcode, postout
wifexited = getattr(os, "WIFEXITED", lambda x: False)
-def run(cmd, wd, options, replacements):
+def run(cmd, wd, options, replacements, env):
"""Run command in a sub-process, capturing the output (stdout and stderr).
Return a tuple (exitcode, output). output is None in debug mode."""
# TODO: Use subprocess.Popen if we're running on Python 2.4
if options.debug:
- proc = subprocess.Popen(cmd, shell=True, cwd=wd)
+ proc = subprocess.Popen(cmd, shell=True, cwd=wd, env=env)
ret = proc.wait()
return (ret, None)
- proc = Popen4(cmd, wd, options.timeout)
+ proc = Popen4(cmd, wd, options.timeout, env)
def cleanup():
terminate(proc)
ret = proc.wait()
if ret == 0:
ret = signal.SIGTERM << 8
- killdaemons()
+ killdaemons(env['DAEMON_PIDS'])
return ret
output = ''
@@ -793,41 +846,26 @@
ret = 'timeout'
if ret:
- killdaemons()
+ killdaemons(env['DAEMON_PIDS'])
+
+ if abort:
+ raise KeyboardInterrupt()
for s, r in replacements:
output = re.sub(s, r, output)
return ret, output.splitlines(True)
-def runone(options, test):
- '''tristate output:
- None -> skipped
- True -> passed
- False -> failed'''
-
- global results, resultslock, iolock
-
- testpath = os.path.join(TESTDIR, test)
-
- def result(l, e):
- resultslock.acquire()
- results[l].append(e)
- resultslock.release()
+def runone(options, test, count):
+ '''returns a result element: (code, test, msg)'''
def skip(msg):
- if not options.verbose:
- result('s', (test, msg))
- else:
- iolock.acquire()
- print "\nSkipping %s: %s" % (testpath, msg)
- iolock.release()
- return None
+ if options.verbose:
+ log("\nSkipping %s: %s" % (testpath, msg))
+ return 's', test, msg
def fail(msg, ret):
if not options.nodiff:
- iolock.acquire()
- print "\nERROR: %s %s" % (testpath, msg)
- iolock.release()
+ log("\nERROR: %s %s" % (testpath, msg))
if (not ret and options.interactive
and os.path.exists(testpath + ".err")):
iolock.acquire()
@@ -839,34 +877,33 @@
rename(testpath + ".err", testpath)
else:
rename(testpath + ".err", testpath + ".out")
- result('p', test)
- return
- result('f', (test, msg))
+ return '.', test, ''
+ return '!', test, msg
def success():
- result('p', test)
+ return '.', test, ''
def ignore(msg):
- result('i', (test, msg))
+ return 'i', test, msg
+
+ def describe(ret):
+ if ret < 0:
+ return 'killed by signal %d' % -ret
+ return 'returned error code %d' % ret
- if (os.path.basename(test).startswith("test-") and '~' not in test and
- ('.' not in test or test.endswith('.py') or
- test.endswith('.bat') or test.endswith('.t'))):
- if not os.path.exists(test):
- skip("doesn't exist")
- return None
- else:
- vlog('# Test file', test, 'not supported, ignoring')
- return None # not a supported test, don't record
+ testpath = os.path.join(TESTDIR, test)
+ err = os.path.join(TESTDIR, test + ".err")
+ lctest = test.lower()
+
+ if not os.path.exists(testpath):
+ return skip("doesn't exist")
if not (options.whitelisted and test in options.whitelisted):
if options.blacklist and test in options.blacklist:
- skip("blacklisted")
- return None
+ return skip("blacklisted")
if options.retest and not os.path.exists(test + ".err"):
- ignore("not retesting")
- return None
+ return ignore("not retesting")
if options.keywords:
fp = open(test)
@@ -876,62 +913,32 @@
if k in t:
break
else:
- ignore("doesn't match keyword")
- return None
+ return ignore("doesn't match keyword")
+
+ for ext, func, out in testtypes:
+ if lctest.startswith("test-") and lctest.endswith(ext):
+ runner = func
+ ref = os.path.join(TESTDIR, test + out)
+ break
+ else:
+ return skip("unknown test type")
vlog("# Test", test)
- # create a fresh hgrc
- hgrc = open(HGRCPATH, 'w+')
- hgrc.write('[ui]\n')
- hgrc.write('slash = True\n')
- hgrc.write('interactive = False\n')
- hgrc.write('[defaults]\n')
- hgrc.write('backout = -d "0 0"\n')
- hgrc.write('commit = -d "0 0"\n')
- hgrc.write('tag = -d "0 0"\n')
- if options.inotify:
- hgrc.write('[extensions]\n')
- hgrc.write('inotify=\n')
- hgrc.write('[inotify]\n')
- hgrc.write('pidfile=%s\n' % DAEMON_PIDS)
- hgrc.write('appendpid=True\n')
- if options.extra_config_opt:
- for opt in options.extra_config_opt:
- section, key = opt.split('.', 1)
- assert '=' in key, ('extra config opt %s must '
- 'have an = for assignment' % opt)
- hgrc.write('[%s]\n%s\n' % (section, key))
- hgrc.close()
-
- ref = os.path.join(TESTDIR, test+".out")
- err = os.path.join(TESTDIR, test+".err")
if os.path.exists(err):
os.remove(err) # Remove any previous output files
- try:
- tf = open(testpath)
- firstline = tf.readline().rstrip()
- tf.close()
- except IOError:
- firstline = ''
- lctest = test.lower()
-
- if lctest.endswith('.py') or firstline == '#!/usr/bin/env python':
- runner = pytest
- elif lctest.endswith('.t'):
- runner = tsttest
- ref = testpath
- else:
- return skip("unknown test type")
# Make a tmp subdirectory to work in
- testtmp = os.environ["TESTTMP"] = os.environ["HOME"] = \
- os.path.join(HGTMP, os.path.basename(test))
+ threadtmp = os.path.join(HGTMP, "child%d" % count)
+ testtmp = os.path.join(threadtmp, os.path.basename(test))
+ os.mkdir(threadtmp)
+ os.mkdir(testtmp)
+ port = options.port + count * 3
replacements = [
- (r':%s\b' % options.port, ':$HGPORT'),
- (r':%s\b' % (options.port + 1), ':$HGPORT1'),
- (r':%s\b' % (options.port + 2), ':$HGPORT2'),
+ (r':%s\b' % port, ':$HGPORT'),
+ (r':%s\b' % (port + 1), ':$HGPORT1'),
+ (r':%s\b' % (port + 2), ':$HGPORT2'),
]
if os.name == 'nt':
replacements.append(
@@ -943,18 +950,21 @@
else:
replacements.append((re.escape(testtmp), '$TESTTMP'))
- os.mkdir(testtmp)
- if options.time:
- starttime = time.time()
- ret, out = runner(testpath, testtmp, options, replacements)
- if options.time:
+ env = createenv(options, testtmp, threadtmp, port)
+ createhgrc(env['HGRCPATH'], options)
+
+ starttime = time.time()
+ try:
+ ret, out = runner(testpath, testtmp, options, replacements, env)
+ except KeyboardInterrupt:
endtime = time.time()
- times.append((test, endtime - starttime))
+ log('INTERRUPTED: %s (after %d seconds)' % (test, endtime - starttime))
+ raise
+ endtime = time.time()
+ times.append((test, endtime - starttime))
vlog("# Ret was:", ret)
- killdaemons()
-
- mark = '.'
+ killdaemons(env['DAEMON_PIDS'])
skipped = (ret == SKIPPED_STATUS)
@@ -976,13 +986,7 @@
f.write(line)
f.close()
- def describe(ret):
- if ret < 0:
- return 'killed by signal %d' % -ret
- return 'returned error code %d' % ret
-
if skipped:
- mark = 's'
if out is None: # debug mode: nothing to parse
missing = ['unknown']
failed = None
@@ -991,15 +995,13 @@
if not missing:
missing = ['irrelevant']
if failed:
- fail("hghave failed checking for %s" % failed[-1], ret)
+ result = fail("hghave failed checking for %s" % failed[-1], ret)
skipped = False
else:
- skip(missing[-1])
+ result = skip(missing[-1])
elif ret == 'timeout':
- mark = 't'
- fail("timed out", ret)
+ result = fail("timed out", ret)
elif out != refout:
- mark = '!'
if not options.nodiff:
iolock.acquire()
if options.view:
@@ -1008,27 +1010,23 @@
showdiff(refout, out, ref, err)
iolock.release()
if ret:
- fail("output changed and " + describe(ret), ret)
+ result = fail("output changed and " + describe(ret), ret)
else:
- fail("output changed", ret)
- ret = 1
+ result = fail("output changed", ret)
elif ret:
- mark = '!'
- fail(describe(ret), ret)
+ result = fail(describe(ret), ret)
else:
- success()
+ result = success()
if not options.verbose:
iolock.acquire()
- sys.stdout.write(mark)
+ sys.stdout.write(result[0])
sys.stdout.flush()
iolock.release()
if not options.keep_tmpdir:
- shutil.rmtree(testtmp, True)
- if skipped:
- return None
- return ret == 0
+ shutil.rmtree(threadtmp, True)
+ return result
_hgpath = None
@@ -1057,135 +1055,47 @@
' (expected %s)\n'
% (verb, actualhg, expecthg))
-def runchildren(options, tests):
- if INST:
- installhg(options)
- _checkhglib("Testing")
- else:
- usecorrectpython()
-
- optcopy = dict(options.__dict__)
- optcopy['jobs'] = 1
-
- # Because whitelist has to override keyword matches, we have to
- # actually load the whitelist in the children as well, so we allow
- # the list of whitelist files to pass through and be parsed in the
- # children, but not the dict of whitelisted tests resulting from
- # the parse, used here to override blacklisted tests.
- whitelist = optcopy['whitelisted'] or []
- del optcopy['whitelisted']
-
- blacklist = optcopy['blacklist'] or []
- del optcopy['blacklist']
- blacklisted = []
-
- if optcopy['with_hg'] is None:
- optcopy['with_hg'] = os.path.join(BINDIR, "hg")
- optcopy.pop('anycoverage', None)
-
- opts = []
- for opt, value in optcopy.iteritems():
- name = '--' + opt.replace('_', '-')
- if value is True:
- opts.append(name)
- elif isinstance(value, list):
- for v in value:
- opts.append(name + '=' + str(v))
- elif value is not None:
- opts.append(name + '=' + str(value))
-
- tests.reverse()
- jobs = [[] for j in xrange(options.jobs)]
- while tests:
- for job in jobs:
- if not tests:
- break
- test = tests.pop()
- if test not in whitelist and test in blacklist:
- blacklisted.append(test)
- else:
- job.append(test)
-
- waitq = queue.Queue()
-
- # windows lacks os.wait, so we must emulate it
- def waitfor(proc, rfd):
- fp = os.fdopen(rfd, 'rb')
- return lambda: waitq.put((proc.pid, proc.wait(), fp))
-
- for j, job in enumerate(jobs):
- if not job:
- continue
- rfd, wfd = os.pipe()
- childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
- childtmp = os.path.join(HGTMP, 'child%d' % j)
- childopts += ['--tmpdir', childtmp]
- cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
- vlog(' '.join(cmdline))
- proc = subprocess.Popen(cmdline, executable=cmdline[0])
- threading.Thread(target=waitfor(proc, rfd)).start()
- os.close(wfd)
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- failures = 0
- passed, skipped, failed = 0, 0, 0
- skips = []
- fails = []
- for job in jobs:
- if not job:
- continue
- pid, status, fp = waitq.get()
- try:
- childresults = pickle.load(fp)
- except (pickle.UnpicklingError, EOFError):
- sys.exit(255)
- else:
- passed += len(childresults['p'])
- skipped += len(childresults['s'])
- failed += len(childresults['f'])
- skips.extend(childresults['s'])
- fails.extend(childresults['f'])
- if options.time:
- childtimes = pickle.load(fp)
- times.extend(childtimes)
-
- vlog('pid %d exited, status %d' % (pid, status))
- failures |= status
- print
- skipped += len(blacklisted)
- if not options.noskips:
- for s in skips:
- print "Skipped %s: %s" % (s[0], s[1])
- for s in blacklisted:
- print "Skipped %s: blacklisted" % s
- for s in fails:
- print "Failed %s: %s" % (s[0], s[1])
-
- _checkhglib("Tested")
- print "# Ran %d tests, %d skipped, %d failed." % (
- passed + failed, skipped, failed)
-
- if options.time:
- outputtimes(options)
- if options.anycoverage:
- outputcoverage(options)
- sys.exit(failures != 0)
-
-results = dict(p=[], f=[], s=[], i=[])
-resultslock = threading.Lock()
+results = {'.':[], '!':[], 's':[], 'i':[]}
times = []
iolock = threading.Lock()
+abort = False
-def runqueue(options, tests):
- for test in tests:
- ret = runone(options, test)
- if options.first and ret is not None and not ret:
- break
+def scheduletests(options, tests):
+ jobs = options.jobs
+ done = queue.Queue()
+ running = 0
+ count = 0
+ global abort
+
+ def job(test, count):
+ try:
+ done.put(runone(options, test, count))
+ except KeyboardInterrupt:
+ pass
+
+ try:
+ while tests or running:
+ if not done.empty() or running == jobs or not tests:
+ try:
+ code, test, msg = done.get(True, 1)
+ results[code].append((test, msg))
+ if options.first and code not in '.si':
+ break
+ except queue.Empty:
+ continue
+ running -= 1
+ if tests and not running == jobs:
+ test = tests.pop(0)
+ if options.loop:
+ tests.append(test)
+ t = threading.Thread(target=job, args=(test, count))
+ t.start()
+ running += 1
+ count += 1
+ except KeyboardInterrupt:
+ abort = True
def runtests(options, tests):
- global DAEMON_PIDS, HGRCPATH
- DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
- HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
-
try:
if INST:
installhg(options)
@@ -1203,88 +1113,75 @@
print "running all tests"
tests = orig
- runqueue(options, tests)
+ scheduletests(options, tests)
- failed = len(results['f'])
- tested = len(results['p']) + failed
+ failed = len(results['!'])
+ tested = len(results['.']) + failed
skipped = len(results['s'])
ignored = len(results['i'])
- if options.child:
- fp = os.fdopen(options.child, 'wb')
- pickle.dump(results, fp, pickle.HIGHEST_PROTOCOL)
- if options.time:
- pickle.dump(times, fp, pickle.HIGHEST_PROTOCOL)
- fp.close()
- else:
- print
+ print
+ if not options.noskips:
for s in results['s']:
print "Skipped %s: %s" % s
- for s in results['f']:
- print "Failed %s: %s" % s
- _checkhglib("Tested")
- print "# Ran %d tests, %d skipped, %d failed." % (
- tested, skipped + ignored, failed)
- if options.time:
- outputtimes(options)
+ for s in results['!']:
+ print "Failed %s: %s" % s
+ _checkhglib("Tested")
+ print "# Ran %d tests, %d skipped, %d failed." % (
+ tested, skipped + ignored, failed)
+ if options.time:
+ outputtimes(options)
if options.anycoverage:
outputcoverage(options)
except KeyboardInterrupt:
failed = True
- if not options.child:
- print "\ninterrupted!"
+ print "\ninterrupted!"
if failed:
sys.exit(1)
+testtypes = [('.py', pytest, '.out'),
+ ('.t', tsttest, '')]
+
def main():
(options, args) = parseargs()
- if not options.child:
- os.umask(022)
+ os.umask(022)
+
+ checktools()
- checktools()
-
- if len(args) == 0:
- args = sorted(os.listdir("."))
+ if len(args) == 0:
+ args = [t for t in os.listdir(".")
+ if t.startswith("test-")
+ and (t.endswith(".py") or t.endswith(".t"))]
tests = args
if options.random:
random.shuffle(tests)
+ else:
+ # keywords for slow tests
+ slow = 'svn gendoc check-code-hg'.split()
+ def sortkey(f):
+ # run largest tests first, as they tend to take the longest
+ try:
+ val = -os.stat(f).st_size
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ return -1e9 # file does not exist, tell early
+ for kw in slow:
+ if kw in f:
+ val *= 10
+ return val
+ tests.sort(key=sortkey)
- # Reset some environment variables to well-known values so that
- # the tests produce repeatable output.
- os.environ['LANG'] = os.environ['LC_ALL'] = os.environ['LANGUAGE'] = 'C'
- os.environ['TZ'] = 'GMT'
- os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
- os.environ['CDPATH'] = ''
- os.environ['COLUMNS'] = '80'
- os.environ['GREP_OPTIONS'] = ''
- os.environ['http_proxy'] = ''
- os.environ['no_proxy'] = ''
- os.environ['NO_PROXY'] = ''
- os.environ['TERM'] = 'xterm'
if 'PYTHONHASHSEED' not in os.environ:
# use a random python hash seed all the time
# we do the randomness ourself to know what seed is used
os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
print 'python hash seed:', os.environ['PYTHONHASHSEED']
- # unset env related to hooks
- for k in os.environ.keys():
- if k.startswith('HG_'):
- # can't remove on solaris
- os.environ[k] = ''
- del os.environ[k]
- if 'HG' in os.environ:
- # can't remove on solaris
- os.environ['HG'] = ''
- del os.environ['HG']
- if 'HGPROF' in os.environ:
- os.environ['HGPROF'] = ''
- del os.environ['HGPROF']
-
global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
TESTDIR = os.environ["TESTDIR"] = os.getcwd()
if options.tmpdir:
@@ -1310,17 +1207,6 @@
d = os.getenv('TMP')
tmpdir = tempfile.mkdtemp('', 'hgtests.', d)
HGTMP = os.environ['HGTMP'] = os.path.realpath(tmpdir)
- DAEMON_PIDS = None
- HGRCPATH = None
-
- os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
- os.environ["HGMERGE"] = "internal:merge"
- os.environ["HGUSER"] = "test"
- os.environ["HGENCODING"] = "ascii"
- os.environ["HGENCODINGMODE"] = "strict"
- os.environ["HGPORT"] = str(options.port)
- os.environ["HGPORT1"] = str(options.port + 1)
- os.environ["HGPORT2"] = str(options.port + 2)
if options.with_hg:
INST = None
@@ -1340,22 +1226,21 @@
os.environ["BINDIR"] = BINDIR
os.environ["PYTHON"] = PYTHON
- if not options.child:
- path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
- os.environ["PATH"] = os.pathsep.join(path)
+ path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
+ os.environ["PATH"] = os.pathsep.join(path)
- # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
- # can run .../tests/run-tests.py test-foo where test-foo
- # adds an extension to HGRC
- pypath = [PYTHONDIR, TESTDIR]
- # We have to augment PYTHONPATH, rather than simply replacing
- # it, in case external libraries are only available via current
- # PYTHONPATH. (In particular, the Subversion bindings on OS X
- # are in /opt/subversion.)
- oldpypath = os.environ.get(IMPL_PATH)
- if oldpypath:
- pypath.append(oldpypath)
- os.environ[IMPL_PATH] = os.pathsep.join(pypath)
+ # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
+ # can run .../tests/run-tests.py test-foo where test-foo
+ # adds an extension to HGRC
+ pypath = [PYTHONDIR, TESTDIR]
+ # We have to augment PYTHONPATH, rather than simply replacing
+ # it, in case external libraries are only available via current
+ # PYTHONPATH. (In particular, the Subversion bindings on OS X
+ # are in /opt/subversion.)
+ oldpypath = os.environ.get(IMPL_PATH)
+ if oldpypath:
+ pypath.append(oldpypath)
+ os.environ[IMPL_PATH] = os.pathsep.join(pypath)
COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
@@ -1365,10 +1250,7 @@
vlog("# Using", IMPL_PATH, os.environ[IMPL_PATH])
try:
- if len(tests) > 1 and options.jobs > 1:
- runchildren(options, tests)
- else:
- runtests(options, tests)
+ runtests(options, tests)
finally:
time.sleep(.1)
cleanup(options)
--- a/tests/test-bisect.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-bisect.t Tue Jul 16 11:13:18 2013 -0500
@@ -184,6 +184,12 @@
$ hg bisect -r
$ hg bisect -b
+ $ hg summary
+ parent: 31:58c80a7c8a40 tip
+ msg 31
+ branch: default
+ commit: (clean)
+ update: (current)
$ hg bisect -g 1
Testing changeset 16:a2e6ea4973e9 (30 changesets remaining, ~4 tests)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-bookmarks-current.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-bookmarks-current.t Tue Jul 16 11:13:18 2013 -0500
@@ -43,16 +43,19 @@
$ hg bookmarks
* Z -1:000000000000
-new bookmark Y
+new bookmarks X and Y, first one made active
- $ hg bookmark Y
+ $ hg bookmark Y X
list bookmarks
$ hg bookmark
+ X -1:000000000000
* Y -1:000000000000
Z -1:000000000000
+ $ hg bookmark -d X
+
commit
$ echo 'b' > b
--- a/tests/test-bookmarks.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-bookmarks.t Tue Jul 16 11:13:18 2013 -0500
@@ -168,11 +168,14 @@
$ hg bookmark -d REVSET
$ hg bookmark -d TIP
-rename without new name
+rename without new name or multiple names
$ hg bookmark -m Y
abort: new bookmark name required
[255]
+ $ hg bookmark -m Y Y2 Y3
+ abort: only one new bookmark name allowed
+ [255]
delete without name
@@ -417,8 +420,9 @@
a@ 2:db815d6d32e6
x y 2:db815d6d32e6
- $ hg bookmark -d @
- $ hg bookmark -d a@
+delete multiple bookmarks at once
+
+ $ hg bookmark -d @ a@
test clone with a bookmark named "default" (issue3677)
--- a/tests/test-check-code-hg.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-check-code-hg.t Tue Jul 16 11:13:18 2013 -0500
@@ -6,13 +6,7 @@
> exit 80
> fi
-New errors are not allowed. Warnings are strongly discouraged.
-
- $ hg manifest 2>/dev/null \
- > | xargs "$check_code" --warnings --nolineno --per-file=0 \
- > || false
-
-Check Python files without py extension
+Prepare check for Python files without py extension
$ cp \
> hg \
@@ -23,7 +17,15 @@
> contrib/hgweb.wsgi \
> contrib/simplemerge \
> contrib/undumprevlog \
+ > i18n/hggettext \
+ > i18n/posplit \
+ > tests/hghave \
+ > tests/dummyssh \
> "$TESTTMP"/
- $ for f in "$TESTTMP"/*; do cp "$f" "$f.py"; done
- $ "$check_code" --warnings --nolineno --per-file=0 "$TESTTMP"/*.py \
+ $ for f in "$TESTTMP"/*; do mv "$f" "$f.py"; done
+
+New errors are not allowed. Warnings are strongly discouraged.
+
+ $ { hg manifest 2>/dev/null; ls "$TESTTMP"/*.py; } \
+ > | xargs "$check_code" --warnings --per-file=0 \
> || false
--- a/tests/test-check-pyflakes.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-check-pyflakes.t Tue Jul 16 11:13:18 2013 -0500
@@ -1,6 +1,21 @@
$ "$TESTDIR/hghave" pyflakes || exit 80
$ cd "`dirname "$TESTDIR"`"
- $ pyflakes mercurial hgext 2>&1 | "$TESTDIR/filterpyflakes.py"
+
+run pyflakes on all tracked files ending in .py or without a file ending
+(skipping binary file random-seed)
+ $ hg manifest 2>/dev/null | egrep "\.py$|^[^.]*$" | grep -v /random_seed$ \
+ > | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
+ contrib/win32/hgwebdir_wsgi.py:*: 'win32traceutil' imported but unused (glob)
+ setup.py:*: 'sha' imported but unused (glob)
+ setup.py:*: 'zlib' imported but unused (glob)
+ setup.py:*: 'bz2' imported but unused (glob)
+ setup.py:*: 'py2exe' imported but unused (glob)
+ tests/hghave.py:*: 'hgext' imported but unused (glob)
+ tests/hghave.py:*: '_lsprof' imported but unused (glob)
+ tests/hghave.py:*: 'publish_cmdline' imported but unused (glob)
+ tests/hghave.py:*: 'pygments' imported but unused (glob)
+ tests/hghave.py:*: 'ssl' imported but unused (glob)
+ contrib/win32/hgwebdir_wsgi.py:*: 'from isapi.install import *' used; unable to detect undefined names (glob)
hgext/inotify/linux/__init__.py:*: 'from _inotify import *' used; unable to detect undefined names (glob)
--- a/tests/test-command-template.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-command-template.t Tue Jul 16 11:13:18 2013 -0500
@@ -458,7 +458,8 @@
Error if no style:
$ hg log --style notexist
- abort: style not found: notexist
+ abort: style 'notexist' not found
+ (available styles: bisect, changelog, compact, default, phases, xml)
[255]
Error if style missing key:
@@ -1535,3 +1536,31 @@
$ hg log -R latesttag -r 10 --template '{sub("[0-9]", "x", "{rev}")}\n'
xx
+
+Test the strip function with chars specified:
+
+ $ hg log -R latesttag --template '{desc}\n'
+ at3
+ t5
+ t3
+ t2
+ t1
+ merge
+ h2e
+ h2d
+ h1c
+ b
+ a
+
+ $ hg log -R latesttag --template '{strip(desc, "te")}\n'
+ at3
+ 5
+ 3
+ 2
+ 1
+ merg
+ h2
+ h2d
+ h1c
+ b
+ a
--- a/tests/test-commandserver.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-commandserver.py Tue Jul 16 11:13:18 2013 -0500
@@ -25,7 +25,11 @@
else:
return channel, server.stdout.read(length)
-def runcommand(server, args, output=sys.stdout, error=sys.stderr, input=None):
+def sep(text):
+ return text.replace('\\', '/')
+
+def runcommand(server, args, output=sys.stdout, error=sys.stderr, input=None,
+ outfilter=lambda x: x):
print ' runcommand', ' '.join(args)
sys.stdout.flush()
server.stdin.write('runcommand\n')
@@ -37,7 +41,7 @@
while True:
ch, data = readchannel(server)
if ch == 'o':
- output.write(data)
+ output.write(outfilter(data))
output.flush()
elif ch == 'e':
error.write(data)
@@ -249,7 +253,8 @@
# make it public; draft marker moves to 4:7966c8e3734d
runcommand(server, ['phase', '-p', '.'])
- runcommand(server, ['phase', '.']) # load _phasecache.phaseroots
+ # load _phasecache.phaseroots
+ runcommand(server, ['phase', '.'], outfilter=sep)
# strip 1::4 outside server
os.system('hg -q --config extensions.mq= strip 1')
--- a/tests/test-contrib.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-contrib.t Tue Jul 16 11:13:18 2013 -0500
@@ -103,34 +103,6 @@
no changes found
[1]
-
-#if hardlink
-
-Test shrink-revlog:
- $ cd repo-a
- $ hg --config extensions.shrink="$CONTRIBDIR/shrink-revlog.py" shrink
- shrinking $TESTTMP/repo-a/.hg/store/00manifest.i (glob)
- reading revs
- sorting revs
- writing revs
- old file size: 324 bytes ( 0.0 MiB)
- new file size: 324 bytes ( 0.0 MiB)
- shrinkage: 0.0% (1.0x)
- note: old revlog saved in:
- $TESTTMP/repo-a/.hg/store/00manifest.i.old (glob)
- $TESTTMP/repo-a/.hg/store/00manifest.d.old (glob)
- (You can delete those files when you are satisfied that your
- repository is still sane. Running 'hg verify' is strongly recommended.)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 1 files, 3 changesets, 3 total revisions
- $ cd ..
-
-#endif
-
Test simplemerge command:
$ cp "$CONTRIBDIR/simplemerge" .
--- a/tests/test-convert-git.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-convert-git.t Tue Jul 16 11:13:18 2013 -0500
@@ -13,6 +13,10 @@
$ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
$ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
$ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
+ $ INVALIDID1=afd12345af
+ $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86
+ $ VALIDID1=39b3d83f9a69a9ba4ebb111461071a0af0027357
+ $ VALIDID2=8dd6476bd09d9c7776355dc454dafe38efaec5da
$ count=10
$ commit()
> {
@@ -298,6 +302,36 @@
$ commit -a -m 'addsubmodule' >/dev/null 2>/dev/null
$ cd ..
+test invalid splicemap1
+
+ $ cat > splicemap <<EOF
+ > $VALIDID1
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap1-hg
+ initializing destination git-repo2-splicemap1-hg repository
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap2
+
+ $ cat > splicemap <<EOF
+ > $VALIDID1 $VALIDID2, $VALIDID2, $VALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap2-hg
+ initializing destination git-repo2-splicemap2-hg repository
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap3
+
+ $ cat > splicemap <<EOF
+ > $INVALIDID1 $INVALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap3-hg
+ initializing destination git-repo2-splicemap3-hg repository
+ abort: splicemap entry afd12345af is not a valid revision identifier
+ [255]
+
convert sub modules
$ hg convert git-repo6 git-repo6-hg
initializing destination git-repo6-hg repository
--- a/tests/test-convert-splicemap.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-convert-splicemap.t Tue Jul 16 11:13:18 2013 -0500
@@ -37,6 +37,8 @@
$ hg ci -Am addaandd
adding a
adding d
+ $ INVALIDID1=afd12345af
+ $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86
$ CHILDID1=`hg id --debug -i`
$ echo d >> d
$ hg ci -Am changed
@@ -53,7 +55,7 @@
o 0:527cdedf31fb "addaandd" files: a d
-test invalid splicemap
+test invalid splicemap1
$ cat > splicemap <<EOF
> $CHILDID2
@@ -62,6 +64,24 @@
abort: syntax error in splicemap(1): child parent1[,parent2] expected
[255]
+test invalid splicemap2
+
+ $ cat > splicemap <<EOF
+ > $CHILDID2 $PARENTID1, $PARENTID2, $PARENTID2
+ > EOF
+ $ hg convert --splicemap splicemap repo2 repo1
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap3
+
+ $ cat > splicemap <<EOF
+ > $INVALIDID1 $INVALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap repo2 repo1
+ abort: splicemap entry afd12345af is not a valid revision identifier
+ [255]
+
splice repo2 on repo1
$ cat > splicemap <<EOF
--- a/tests/test-convert-svn-source.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-convert-svn-source.t Tue Jul 16 11:13:18 2013 -0500
@@ -16,6 +16,8 @@
#else
$ SVNREPOURL=file://`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
#endif
+ $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1
+ $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
Now test that it works with trunk/tags layout, but no branches yet.
@@ -168,6 +170,15 @@
|
o 0 second letter files: letter2.txt
+test invalid splicemap1
+
+ $ cat > splicemap <<EOF
+ > $INVALIDREVISIONID $VALIDREVISIONID
+ > EOF
+ $ hg convert --splicemap splicemap "$SVNREPOURL/proj%20B/mytrunk" smap
+ initializing destination smap repository
+ abort: splicemap entry svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 is not a valid revision identifier
+ [255]
Test stop revision
$ hg convert --rev 1 "$SVNREPOURL/proj%20B/mytrunk" stoprev
--- a/tests/test-hgweb-auth.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-hgweb-auth.py Tue Jul 16 11:13:18 2013 -0500
@@ -41,7 +41,7 @@
if authinfo is not None:
pm.add_password(*authinfo)
print ' ', pm.find_user_password('test', u)
- except Abort, e:
+ except Abort:
print 'abort'
if not urls:
--- a/tests/test-hgweb-commands.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-hgweb-commands.t Tue Jul 16 11:13:18 2013 -0500
@@ -668,9 +668,8 @@
<div class="overflow">
<div class="sourcefirst"> line source</div>
-
- <div class="parity0 source"><a href="#l1" id="l1"> 1</a> foo
- </div>
+ <pre class="sourcelines">
+ <span id="l1">foo</span><a href="#l1"></a></pre>
<div class="sourcelast"></div>
</div>
</div>
--- a/tests/test-highlight.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-highlight.t Tue Jul 16 11:13:18 2013 -0500
@@ -137,39 +137,39 @@
<div class="overflow">
<div class="sourcefirst"> line source</div>
-
- <div class="parity0 source"><a href="#l1" id="l1"> 1</a> <span class="c">#!/usr/bin/env python</span></div>
- <div class="parity1 source"><a href="#l2" id="l2"> 2</a> </div>
- <div class="parity0 source"><a href="#l3" id="l3"> 3</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></div>
- <div class="parity1 source"><a href="#l4" id="l4"> 4</a> </div>
- <div class="parity0 source"><a href="#l5" id="l5"> 5</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></div>
- <div class="parity1 source"><a href="#l6" id="l6"> 6</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></div>
- <div class="parity0 source"><a href="#l7" id="l7"> 7</a> <span class="sd">"""</span></div>
- <div class="parity1 source"><a href="#l8" id="l8"> 8</a> </div>
- <div class="parity0 source"><a href="#l9" id="l9"> 9</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></div>
- <div class="parity1 source"><a href="#l10" id="l10"> 10</a> </div>
- <div class="parity0 source"><a href="#l11" id="l11"> 11</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></div>
- <div class="parity1 source"><a href="#l12" id="l12"> 12</a> <span class="sd">"""Generate all primes."""</span></div>
- <div class="parity0 source"><a href="#l13" id="l13"> 13</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></div>
- <div class="parity1 source"><a href="#l14" id="l14"> 14</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></div>
- <div class="parity0 source"><a href="#l15" id="l15"> 15</a> <span class="c"># It is important to yield *here* in order to stop the</span></div>
- <div class="parity1 source"><a href="#l16" id="l16"> 16</a> <span class="c"># infinite recursion.</span></div>
- <div class="parity0 source"><a href="#l17" id="l17"> 17</a> <span class="kn">yield</span> <span class="n">p</span></div>
- <div class="parity1 source"><a href="#l18" id="l18"> 18</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></div>
- <div class="parity0 source"><a href="#l19" id="l19"> 19</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></div>
- <div class="parity1 source"><a href="#l20" id="l20"> 20</a> <span class="kn">yield</span> <span class="n">n</span></div>
- <div class="parity0 source"><a href="#l21" id="l21"> 21</a> </div>
- <div class="parity1 source"><a href="#l22" id="l22"> 22</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></div>
- <div class="parity0 source"><a href="#l23" id="l23"> 23</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></div>
- <div class="parity1 source"><a href="#l24" id="l24"> 24</a> </div>
- <div class="parity0 source"><a href="#l25" id="l25"> 25</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></div>
- <div class="parity1 source"><a href="#l26" id="l26"> 26</a> <span class="kn">import</span> <span class="nn">sys</span></div>
- <div class="parity0 source"><a href="#l27" id="l27"> 27</a> <span class="kn">try</span><span class="p">:</span></div>
- <div class="parity1 source"><a href="#l28" id="l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></div>
- <div class="parity0 source"><a href="#l29" id="l29"> 29</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></div>
- <div class="parity1 source"><a href="#l30" id="l30"> 30</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></div>
- <div class="parity0 source"><a href="#l31" id="l31"> 31</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></div>
- <div class="parity1 source"><a href="#l32" id="l32"> 32</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></div>
+ <pre class="sourcelines">
+ <span id="l1"><span class="c">#!/usr/bin/env python</span></span><a href="#l1"></a>
+ <span id="l2"></span><a href="#l2"></a>
+ <span id="l3"><span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l3"></a>
+ <span id="l4"></span><a href="#l4"></a>
+ <span id="l5"><span class="sd">primes = 2 : sieve [3, 5..]</span></span><a href="#l5"></a>
+ <span id="l6"><span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></span><a href="#l6"></a>
+ <span id="l7"><span class="sd">"""</span></span><a href="#l7"></a>
+ <span id="l8"></span><a href="#l8"></a>
+ <span id="l9"><span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></span><a href="#l9"></a>
+ <span id="l10"></span><a href="#l10"></a>
+ <span id="l11"><span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></span><a href="#l11"></a>
+ <span id="l12"> <span class="sd">"""Generate all primes."""</span></span><a href="#l12"></a>
+ <span id="l13"> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l13"></a>
+ <span id="l14"> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></span><a href="#l14"></a>
+ <span id="l15"> <span class="c"># It is important to yield *here* in order to stop the</span></span><a href="#l15"></a>
+ <span id="l16"> <span class="c"># infinite recursion.</span></span><a href="#l16"></a>
+ <span id="l17"> <span class="kn">yield</span> <span class="n">p</span></span><a href="#l17"></a>
+ <span id="l18"> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></span><a href="#l18"></a>
+ <span id="l19"> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l19"></a>
+ <span id="l20"> <span class="kn">yield</span> <span class="n">n</span></span><a href="#l20"></a>
+ <span id="l21"></span><a href="#l21"></a>
+ <span id="l22"> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></span><a href="#l22"></a>
+ <span id="l23"> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></span><a href="#l23"></a>
+ <span id="l24"></span><a href="#l24"></a>
+ <span id="l25"><span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></span><a href="#l25"></a>
+ <span id="l26"> <span class="kn">import</span> <span class="nn">sys</span></span><a href="#l26"></a>
+ <span id="l27"> <span class="kn">try</span><span class="p">:</span></span><a href="#l27"></a>
+ <span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></span><a href="#l28"></a>
+ <span id="l29"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l29"></a>
+ <span id="l30"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l30"></a>
+ <span id="l31"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l31"></a>
+ <span id="l32"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l32"></a></pre>
<div class="sourcelast"></div>
</div>
</div>
@@ -593,17 +593,14 @@
$ hgserveget euc-jp eucjp.txt
% HGENCODING=euc-jp hg serve
% hgweb filerevision, html
- <div class="parity0 source"><a href="#l1" id="l1"> 1</a> \xb5\xfe</div> (esc)
% errors encountered
$ hgserveget utf-8 eucjp.txt
% HGENCODING=utf-8 hg serve
% hgweb filerevision, html
- <div class="parity0 source"><a href="#l1" id="l1"> 1</a> \xef\xbf\xbd\xef\xbf\xbd</div> (esc)
% errors encountered
$ hgserveget us-ascii eucjp.txt
% HGENCODING=us-ascii hg serve
% hgweb filerevision, html
- <div class="parity0 source"><a href="#l1" id="l1"> 1</a> ??</div>
% errors encountered
$ cd ..
--- a/tests/test-histedit-edit.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-histedit-edit.t Tue Jul 16 11:13:18 2013 -0500
@@ -152,6 +152,15 @@
When you are finished, run hg histedit --continue to resume.
$ hg status
A f
+
+ $ hg summary
+ parent: 5:a5e1ba2f7afb
+ foobaz
+ branch: default
+ commit: 1 added (new branch head)
+ update: 1 new changesets (update)
+ hist: 1 remaining (histedit --continue)
+
$ HGEDITOR='true' hg histedit --continue
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-backup.hg (glob)
--- a/tests/test-log.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-log.t Tue Jul 16 11:13:18 2013 -0500
@@ -84,6 +84,25 @@
abort: cannot follow file not in parent revision: "dir"
[255]
+-f, a wrong style
+
+ $ hg log -f -l1 --style something
+ abort: style 'something' not found
+ (available styles: bisect, changelog, compact, default, phases, xml)
+ [255]
+
+-f, phases style
+
+
+ $ hg log -f -l1 --style phases
+ changeset: 4:7e4639b4691b
+ tag: tip
+ phase: draft
+ user: test
+ date: Thu Jan 01 00:00:05 1970 +0000
+ summary: e
+
+
-f, but no args
$ hg log -f
--- a/tests/test-merge-tools.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-merge-tools.t Tue Jul 16 11:13:18 2013 -0500
@@ -281,7 +281,7 @@
environment variables in true.executable are handled:
- $ echo 'echo "custom merge tool"' > "$HGTMP/merge.sh"
+ $ echo 'echo "custom merge tool"' > .hg/merge.sh
$ beforemerge
[merge-tools]
false.whatever=
@@ -289,7 +289,7 @@
true.executable=cat
# hg update -C 1
$ hg --config merge-tools.true.executable='sh' \
- > --config merge-tools.true.args="$HGTMP/merge.sh" \
+ > --config merge-tools.true.args=.hg/merge.sh \
> merge -r 2
merging f
custom merge tool
--- a/tests/test-nested-repo.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-nested-repo.t Tue Jul 16 11:13:18 2013 -0500
@@ -8,6 +8,9 @@
$ hg add b
$ hg st
+ $ echo y > b/y
+ $ hg st
+
Should fail:
$ hg st b/x
--- a/tests/test-pathencode.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-pathencode.py Tue Jul 16 11:13:18 2013 -0500
@@ -5,7 +5,6 @@
# that have proven likely to expose bugs and divergent behaviour in
# different encoding implementations.
-from mercurial import parsers
from mercurial import store
import binascii, itertools, math, os, random, sys, time
import collections
--- a/tests/test-rebase-cache.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-rebase-cache.t Tue Jul 16 11:13:18 2013 -0500
@@ -396,27 +396,6 @@
> [ui]
> logtemplate={rev} {desc} {phase}\n
> EOF
- $ cat $HGRCPATH
- [ui]
- slash = True
- interactive = False
- [defaults]
- backout = -d "0 0"
- commit = -d "0 0"
- tag = -d "0 0"
- [extensions]
- graphlog=
- rebase=
- mq=
-
- [phases]
- publish=False
-
- [alias]
- tglog = log -G --template "{rev}: '{desc}' {branches}\n"
- theads = heads --template "{rev}: '{desc}' {branches}\n"
- [ui]
- logtemplate={rev} {desc} {phase}\n
$ hg init c4
--- a/tests/test-rebase-parameters.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-rebase-parameters.t Tue Jul 16 11:13:18 2013 -0500
@@ -419,6 +419,16 @@
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
+ $ hg summary
+ parent: 1:56daeba07f4b
+ c2
+ parent: 2:e4e3f3546619 tip
+ c2b
+ branch: default
+ commit: 1 modified, 1 unresolved (merge)
+ update: (current)
+ rebase: 0 rebased, 1 remaining (rebase --continue)
+
$ hg resolve -l
U c2
--- a/tests/test-remove.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-remove.t Tue Jul 16 11:13:18 2013 -0500
@@ -1,6 +1,6 @@
$ remove() {
> hg rm $@
- > echo "exit code: $?" # no-check-code
+ > echo "exit code: $?"
> hg st
> # do not use ls -R, which recurses in .hg subdirs on Mac OS X 10.5
> find . -name .hg -prune -o -type f -print | sort
--- a/tests/test-subrepo-paths.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-subrepo-paths.t Tue Jul 16 11:13:18 2013 -0500
@@ -17,7 +17,7 @@
hg debugsub with remapping
$ echo '[subpaths]' >> .hg/hgrc
- $ printf 'http://example.net/lib(.*) = C:\\libs\\\\1-lib\\\n' >> .hg/hgrc # no-check-code
+ $ printf 'http://example.net/lib(.*) = C:\\libs\\\\1-lib\\\n' >> .hg/hgrc
$ hg debugsub
path sub
--- a/tests/test-symlinks.t Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-symlinks.t Tue Jul 16 11:13:18 2013 -0500
@@ -160,6 +160,15 @@
adding bar/a
adding foo
removing foo/a
+
+commit and update back
+
+ $ hg ci -mb
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg up tip
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
$ cd ..
== root of repository is symlinked ==
--- a/tests/test-ui-color.py Mon Jul 15 15:04:42 2013 +0200
+++ b/tests/test-ui-color.py Tue Jul 16 11:13:18 2013 -0500
@@ -1,4 +1,4 @@
-import os, sys
+import os
from hgext import color
from mercurial import dispatch, ui