changeset 20111:9bfa86746c9c

Merge
author Bryan O'Sullivan <bryano@fb.com>
date Tue, 26 Nov 2013 21:55:21 -0800
parents 40b7c6e4b993 (diff) e57c532c3835 (current diff)
children 169f8141ba00
files mercurial/parsers.c
diffstat 72 files changed, 1220 insertions(+), 540 deletions(-) [+]
line wrap: on
line diff
--- a/.hgignore	Tue Nov 26 16:14:22 2013 -0800
+++ b/.hgignore	Tue Nov 26 21:55:21 2013 -0800
@@ -1,6 +1,7 @@
 syntax: glob
 
 *.elc
+*.tmp
 *.orig
 *.rej
 *~
--- a/Makefile	Tue Nov 26 16:14:22 2013 -0800
+++ b/Makefile	Tue Nov 26 21:55:21 2013 -0800
@@ -53,7 +53,8 @@
 
 clean:
 	-$(PYTHON) setup.py clean --all # ignore errors from this command
-	find . \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
+	find contrib doc hgext i18n mercurial tests \
+		\( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
 	rm -f $(addprefix mercurial/,$(notdir $(wildcard mercurial/pure/[a-z]*.py)))
 	rm -f MANIFEST MANIFEST.in mercurial/__version__.py tests/*.err
 	rm -rf build mercurial/locale
@@ -107,7 +108,7 @@
 	  mercurial/fileset.py mercurial/revset.py \
 	  mercurial/templatefilters.py mercurial/templatekw.py \
 	  mercurial/filemerge.py \
-	  $(DOCFILES) > i18n/hg.pot
+	  $(DOCFILES) > i18n/hg.pot~
         # All strings marked for translation in Mercurial contain
         # ASCII characters only. But some files contain string
         # literals like this '\037\213'. xgettext thinks it has to
@@ -119,11 +120,17 @@
 	  --msgid-bugs-address "<mercurial-devel@selenic.com>" \
 	  --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
 	  --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
-	  -d hg -p i18n -o hg.pot
-	$(PYTHON) i18n/posplit i18n/hg.pot
+	  -d hg -p i18n -o hg.pot~
+	$(PYTHON) i18n/posplit i18n/hg.pot~
+        # The target file is not created before the last step. So it never is in
+        # an intermediate state.
+	mv -f i18n/hg.pot~ i18n/hg.pot
 
 %.po: i18n/hg.pot
-	msgmerge --no-location --update $@ $^
+        # work on a temporary copy for never having a half completed target
+	cp $@ $@.tmp
+	msgmerge --no-location --update $@.tmp $^
+	mv -f $@~ $@
 
 .PHONY: help all local build doc clean install install-bin install-doc \
 	install-home install-home-bin install-home-doc dist dist-notests tests \
--- a/contrib/check-code.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/contrib/check-code.py	Tue Nov 26 21:55:21 2013 -0800
@@ -141,17 +141,15 @@
     (r'^  saved backup bundle to \$TESTTMP.*\.hg$', winglobmsg),
     (r'^  changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
      winglobmsg),
-    (r'^  pulling from \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
-    (r'^  reverting .*/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
-    (r'^  cloning subrepo \S+/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
-    (r'^  pushing to \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
-    (r'^  pushing subrepo \S+/\S+ to.*[^)]$', winglobmsg,
-     '\$TESTTMP/unix-repo$'),
+    (r'^  pulling from \$TESTTMP/.*[^)]$', winglobmsg,
+     '\$TESTTMP/unix-repo$'), # in test-issue1802.t which skipped on windows
+    (r'^  reverting .*/.*[^)]$', winglobmsg),
+    (r'^  cloning subrepo \S+/.*[^)]$', winglobmsg),
+    (r'^  pushing to \$TESTTMP/.*[^)]$', winglobmsg),
+    (r'^  pushing subrepo \S+/\S+ to.*[^)]$', winglobmsg),
     (r'^  moving \S+/.*[^)]$', winglobmsg),
-    (r'^  no changes made to subrepo since.*/.*[^)]$',
-     winglobmsg, '\$TESTTMP/unix-repo$'),
-    (r'^  .*: largefile \S+ not available from file:.*/.*[^)]$',
-     winglobmsg, '\$TESTTMP/unix-repo$'),
+    (r'^  no changes made to subrepo since.*/.*[^)]$', winglobmsg),
+    (r'^  .*: largefile \S+ not available from file:.*/.*[^)]$', winglobmsg),
   ],
   # warnings
   [
@@ -264,7 +262,7 @@
     (r'[\s\(](open|file)\([^)]*\)\.read\(',
      "use util.readfile() instead"),
     (r'[\s\(](open|file)\([^)]*\)\.write\(',
-     "use util.readfile() instead"),
+     "use util.writefile() instead"),
     (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
      "always assign an opened file to a variable, and close it afterwards"),
     (r'[\s\(](open|file)\([^)]*\)\.',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/editmerge	Tue Nov 26 21:55:21 2013 -0800
@@ -0,0 +1,58 @@
+#!/bin/bash
+# A simple script for opening merge conflicts in the editor.
+# Use the following Mercurial settings to enable it.
+#
+# [ui]
+# merge = editmerge
+#
+# [merge-tools]
+# editmerge.args=$output
+# editmerge.check=changed
+# editmerge.premerge=keep
+
+FILE=$1
+
+getlines() {
+  grep -n "<<<<<<" $FILE | cut -f1 -d:
+}
+
+# editor preference loosely based on http://mercurial.selenic.com/wiki/editor
+# hg showconfig is at the bottom though, since it's slow to run (0.15 seconds)
+ED=$HGEDITOR
+if [ "$ED" = "" ] ; then
+  ED=$VISUAL
+fi
+if [ "$ED" = "" ] ; then
+  ED=$EDITOR
+fi
+if [ "$ED" = "" ] ; then
+  ED=$(hg showconfig ui.editor)
+fi
+if [ "$ED" = "" ] ; then
+  echo "merge failed - unable to find editor"
+  exit 1
+fi
+
+if [ "$ED" = "emacs" ] || [ "$ED" = "nano" ] || [ "$ED" = "vim" ] ; then
+  FIRSTLINE=$(getlines | head -n 1)
+  PREVIOUSLINE=""
+
+  # open the editor to the first conflict until there are no more
+  # or the user stops editing the file
+  while [ ! "$FIRSTLINE" = "" ] && [ ! "$FIRSTLINE" = "$PREVIOUSLINE" ] ; do
+    $ED +$FIRSTLINE $FILE
+    PREVIOUSLINE=$FIRSTLINE
+    FIRSTLINE=$(getlines | head -n 1)
+  done
+else
+  $ED $FILE
+fi
+
+# get the line numbers of the remaining conflicts
+CONFLICTS=$(getlines | sed ':a;N;$!ba;s/\n/, /g')
+if [ ! "$CONFLICTS" = "" ] ; then
+  echo "merge failed - resolve the conflicts (line $CONFLICTS) then use 'hg resolve --mark'"
+  exit 1
+fi
+
+exit 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/import-checker.py	Tue Nov 26 21:55:21 2013 -0800
@@ -0,0 +1,221 @@
+import ast
+import os
+import sys
+
+def dotted_name_of_path(path):
+    """Given a relative path to a source file, return its dotted module name.
+
+
+    >>> dotted_name_of_path('mercurial/error.py')
+    'mercurial.error'
+    """
+    parts = path.split('/')
+    parts[-1] = parts[-1][:-3] # remove .py
+    return '.'.join(parts)
+
+
+def list_stdlib_modules():
+    """List the modules present in the stdlib.
+
+    >>> mods = set(list_stdlib_modules())
+    >>> 'BaseHTTPServer' in mods
+    True
+
+    os.path isn't really a module, so it's missing:
+
+    >>> 'os.path' in mods
+    False
+
+    sys requires special treatment, because it's baked into the
+    interpreter, but it should still appear:
+
+    >>> 'sys' in mods
+    True
+
+    >>> 'collections' in mods
+    True
+
+    >>> 'cStringIO' in mods
+    True
+    """
+    for m in sys.builtin_module_names:
+        yield m
+    # These modules only exist on windows, but we should always
+    # consider them stdlib.
+    for m in ['msvcrt', '_winreg']:
+        yield m
+    # These get missed too
+    for m in 'ctypes', 'email':
+        yield m
+    yield 'builtins' # python3 only
+    for libpath in sys.path:
+        # We want to walk everything in sys.path that starts with
+        # either sys.prefix or sys.exec_prefix.
+        if not (libpath.startswith(sys.prefix)
+                or libpath.startswith(sys.exec_prefix)):
+            continue
+        if 'site-packages' in libpath:
+            continue
+        for top, dirs, files in os.walk(libpath):
+            for name in files:
+                if name == '__init__.py':
+                    continue
+                if not (name.endswith('.py') or name.endswith('.so')):
+                    continue
+                full_path = os.path.join(top, name)
+                if 'site-packages' in full_path:
+                    continue
+                rel_path = full_path[len(libpath) + 1:]
+                mod = dotted_name_of_path(rel_path)
+                yield mod
+
+stdlib_modules = set(list_stdlib_modules())
+
+def imported_modules(source, ignore_nested=False):
+    """Given the source of a file as a string, yield the names
+    imported by that file.
+
+    Args:
+      source: The python source to examine as a string.
+      ignore_nested: If true, import statements that do not start in
+                     column zero will be ignored.
+
+    Returns:
+      A list of module names imported by the given source.
+
+    >>> sorted(imported_modules(
+    ...         'import foo ; from baz import bar; import foo.qux'))
+    ['baz.bar', 'foo', 'foo.qux']
+    >>> sorted(imported_modules(
+    ... '''import foo
+    ... def wat():
+    ...     import bar
+    ... ''', ignore_nested=True))
+    ['foo']
+    """
+    for node in ast.walk(ast.parse(source)):
+        if ignore_nested and getattr(node, 'col_offset', 0) > 0:
+            continue
+        if isinstance(node, ast.Import):
+            for n in node.names:
+                yield n.name
+        elif isinstance(node, ast.ImportFrom):
+            prefix = node.module + '.'
+            for n in node.names:
+                yield prefix + n.name
+
+def verify_stdlib_on_own_line(source):
+    """Given some python source, verify that stdlib imports are done
+    in separate statements from relative local module imports.
+
+    Observing this limitation is important as it works around an
+    annoying lib2to3 bug in relative import rewrites:
+    http://bugs.python.org/issue19510.
+
+    >>> list(verify_stdlib_on_own_line('import sys, foo'))
+    ['mixed stdlib and relative imports:\\n   foo, sys']
+    >>> list(verify_stdlib_on_own_line('import sys, os'))
+    []
+    >>> list(verify_stdlib_on_own_line('import foo, bar'))
+    []
+    """
+    for node in ast.walk(ast.parse(source)):
+        if isinstance(node, ast.Import):
+            from_stdlib = {}
+            for n in node.names:
+                from_stdlib[n.name] = n.name in stdlib_modules
+            num_std = len([x for x in from_stdlib.values() if x])
+            if num_std not in (len(from_stdlib.values()), 0):
+                yield ('mixed stdlib and relative imports:\n   %s' %
+                       ', '.join(sorted(from_stdlib.iterkeys())))
+
+class CircularImport(Exception):
+    pass
+
+
+def cyclekey(names):
+    return tuple(sorted(set(names)))
+
+def check_one_mod(mod, imports, path=None, ignore=None):
+    if path is None:
+        path = []
+    if ignore is None:
+        ignore = []
+    path = path + [mod]
+    for i in sorted(imports.get(mod, [])):
+        if i not in stdlib_modules:
+            i = mod.rsplit('.', 1)[0] + '.' + i
+        if i in path:
+            firstspot = path.index(i)
+            cycle = path[firstspot:] + [i]
+            if cyclekey(cycle) not in ignore:
+                raise CircularImport(cycle)
+            continue
+        check_one_mod(i, imports, path=path, ignore=ignore)
+
+def rotatecycle(cycle):
+    """arrange a cycle so that the lexicographically first module listed first
+
+    >>> rotatecycle(['foo', 'bar', 'foo'])
+    ['bar', 'foo', 'bar']
+    """
+    lowest = min(cycle)
+    idx = cycle.index(lowest)
+    return cycle[idx:] + cycle[1:idx] + [lowest]
+
+def find_cycles(imports):
+    """Find cycles in an already-loaded import graph.
+
+    >>> imports = {'top.foo': ['bar', 'os.path', 'qux'],
+    ...            'top.bar': ['baz', 'sys'],
+    ...            'top.baz': ['foo'],
+    ...            'top.qux': ['foo']}
+    >>> print '\\n'.join(sorted(find_cycles(imports)))
+    top.bar -> top.baz -> top.foo -> top.bar -> top.bar
+    top.foo -> top.qux -> top.foo -> top.foo
+    """
+    cycles = {}
+    for mod in sorted(imports.iterkeys()):
+        try:
+            check_one_mod(mod, imports, ignore=cycles)
+        except CircularImport, e:
+            cycle = e.args[0]
+            cycles[cyclekey(cycle)] = ' -> '.join(rotatecycle(cycle))
+    return cycles.values()
+
+def _cycle_sortkey(c):
+    return len(c), c
+
+def main(argv):
+    if len(argv) < 2:
+        print 'Usage: %s file [file] [file] ...'
+        return 1
+    used_imports = {}
+    any_errors = False
+    for source_path in argv[1:]:
+        f = open(source_path)
+        modname = dotted_name_of_path(source_path)
+        src = f.read()
+        used_imports[modname] = sorted(
+            imported_modules(src, ignore_nested=True))
+        for error in verify_stdlib_on_own_line(src):
+            any_errors = True
+            print source_path, error
+        f.close()
+    cycles = find_cycles(used_imports)
+    if cycles:
+        firstmods = set()
+        for c in sorted(cycles, key=_cycle_sortkey):
+            first = c.split()[0]
+            # As a rough cut, ignore any cycle that starts with the
+            # same module as some other cycle. Otherwise we see lots
+            # of cycles that are effectively duplicates.
+            if first in firstmods:
+                continue
+            print 'Import cycle:', c
+            firstmods.add(first)
+        any_errors = True
+    return not any_errors
+
+if __name__ == '__main__':
+    sys.exit(int(main(sys.argv)))
--- a/contrib/perf.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/contrib/perf.py	Tue Nov 26 21:55:21 2013 -0800
@@ -386,7 +386,7 @@
     allfilters = []
     while possiblefilters:
         for name in possiblefilters:
-            subset = repoview.subsettable.get(name)
+            subset = branchmap.subsettable.get(name)
             if subset not in possiblefilters:
                 break
         else:
--- a/doc/gendoc.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/doc/gendoc.py	Tue Nov 26 21:55:21 2013 -0800
@@ -40,11 +40,16 @@
             shortopt, longopt, default, desc, optlabel = opt
         else:
             shortopt, longopt, default, desc = opt
+            optlabel = _("VALUE")
         allopts = []
         if shortopt:
             allopts.append("-%s" % shortopt)
         if longopt:
             allopts.append("--%s" % longopt)
+        if isinstance(default, list):
+            allopts[-1] += " <%s[+]>" % optlabel
+        elif (default is not None) and not isinstance(default, bool):
+            allopts[-1] += " <%s>" % optlabel
         desc += default and _(" (default: %s)") % default or ""
         yield (", ".join(allopts), desc)
 
@@ -71,8 +76,14 @@
 def showdoc(ui):
     # print options
     ui.write(minirst.section(_("Options")))
+    multioccur = False
     for optstr, desc in get_opts(globalopts):
         ui.write("%s\n    %s\n\n" % (optstr, desc))
+        if optstr.endswith("[+]>"):
+            multioccur = True
+    if multioccur:
+        ui.write(_("\n[+] marked option can be specified multiple times\n"))
+        ui.write("\n")
 
     # print cmds
     ui.write(minirst.section(_("Commands")))
@@ -157,12 +168,18 @@
         if opt_output:
             opts_len = max([len(line[0]) for line in opt_output])
             ui.write(_("Options:\n\n"))
+            multioccur = False
             for optstr, desc in opt_output:
                 if desc:
                     s = "%-*s  %s" % (opts_len, optstr, desc)
                 else:
                     s = optstr
                 ui.write("%s\n" % s)
+                if optstr.endswith("[+]>"):
+                    multioccur = True
+            if multioccur:
+                ui.write(_("\n[+] marked option can be specified"
+                           " multiple times\n"))
             ui.write("\n")
         # aliases
         if d['aliases']:
--- a/hgext/convert/filemap.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/convert/filemap.py	Tue Nov 26 21:55:21 2013 -0800
@@ -10,12 +10,20 @@
 from mercurial import util, error
 from common import SKIPREV, converter_source
 
-def rpairs(name):
-    e = len(name)
-    while e != -1:
-        yield name[:e], name[e + 1:]
-        e = name.rfind('/', 0, e)
-    yield '.', name
+def rpairs(path):
+    '''Yield tuples with path split at '/', starting with the full path.
+    No leading, trailing or double '/', please.
+    >>> for x in rpairs('foo/bar/baz'): print x
+    ('foo/bar/baz', '')
+    ('foo/bar', 'baz')
+    ('foo', 'bar/baz')
+    ('.', 'foo/bar/baz')
+    '''
+    i = len(path)
+    while i != -1:
+        yield path[:i], path[i + 1:]
+        i = path.rfind('/', 0, i)
+    yield '.', path
 
 def normalize(path):
     ''' We use posixpath.normpath to support cross-platform path format.
--- a/hgext/keyword.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/keyword.py	Tue Nov 26 21:55:21 2013 -0800
@@ -84,7 +84,7 @@
 
 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
 from mercurial import localrepo, match, patch, templatefilters, templater, util
-from mercurial import scmutil
+from mercurial import scmutil, pathutil
 from mercurial.hgweb import webcommands
 from mercurial.i18n import _
 import os, re, shutil, tempfile
@@ -673,7 +673,7 @@
                 expansion. '''
                 source = repo.dirstate.copied(dest)
                 if 'l' in wctx.flags(source):
-                    source = scmutil.canonpath(repo.root, cwd,
+                    source = pathutil.canonpath(repo.root, cwd,
                                                os.path.realpath(source))
                 return kwt.match(source)
 
--- a/hgext/largefiles/overrides.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/largefiles/overrides.py	Tue Nov 26 21:55:21 2013 -0800
@@ -12,7 +12,7 @@
 import copy
 
 from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
-    node, archival, error, merge, discovery
+    node, archival, error, merge, discovery, pathutil
 from mercurial.i18n import _
 from mercurial.node import hex
 from hgext import rebase
@@ -469,7 +469,7 @@
         return orig(ui, repo, pats, opts, rename)
 
     def makestandin(relpath):
-        path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
+        path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
         return os.path.join(repo.wjoin(lfutil.standin(path)))
 
     fullpats = scmutil.expandpats(pats)
--- a/hgext/mq.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/mq.py	Tue Nov 26 21:55:21 2013 -0800
@@ -2565,8 +2565,10 @@
         ph = patchheader(q.join(parent), q.plainmode)
         message, user = ph.message, ph.user
         for msg in messages:
-            message.append('* * *')
-            message.extend(msg)
+            if msg:
+                if message:
+                    message.append('* * *')
+                message.extend(msg)
         message = '\n'.join(message)
 
     if opts.get('edit'):
--- a/hgext/relink.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/relink.py	Tue Nov 26 21:55:21 2013 -0800
@@ -48,6 +48,10 @@
         ui.status(_('there is nothing to relink\n'))
         return
 
+    if not util.samedevice(src.store.path, repo.store.path):
+        # No point in continuing
+        raise util.Abort(_('source and destination are on different devices'))
+
     locallock = repo.lock()
     try:
         remotelock = src.lock()
--- a/hgext/strip.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/strip.py	Tue Nov 26 21:55:21 2013 -0800
@@ -182,37 +182,32 @@
 
         revs = sorted(rootnodes)
         if update and opts.get('keep'):
-            wlock = repo.wlock()
-            try:
-                urev, p2 = repo.changelog.parents(revs[0])
-                if (util.safehasattr(repo, 'mq') and p2 != nullid
-                    and p2 in [x.node for x in repo.mq.applied]):
-                    urev = p2
-                uctx = repo[urev]
+            urev, p2 = repo.changelog.parents(revs[0])
+            if (util.safehasattr(repo, 'mq') and p2 != nullid
+                and p2 in [x.node for x in repo.mq.applied]):
+                urev = p2
+            uctx = repo[urev]
 
-                # only reset the dirstate for files that would actually change
-                # between the working context and uctx
-                descendantrevs = repo.revs("%s::." % uctx.rev())
-                changedfiles = []
-                for rev in descendantrevs:
-                    # blindly reset the files, regardless of what actually
-                    # changed
-                    changedfiles.extend(repo[rev].files())
+            # only reset the dirstate for files that would actually change
+            # between the working context and uctx
+            descendantrevs = repo.revs("%s::." % uctx.rev())
+            changedfiles = []
+            for rev in descendantrevs:
+                # blindly reset the files, regardless of what actually changed
+                changedfiles.extend(repo[rev].files())
 
-                # reset files that only changed in the dirstate too
-                dirstate = repo.dirstate
-                dirchanges = [f for f in dirstate if dirstate[f] != 'n']
-                changedfiles.extend(dirchanges)
+            # reset files that only changed in the dirstate too
+            dirstate = repo.dirstate
+            dirchanges = [f for f in dirstate if dirstate[f] != 'n']
+            changedfiles.extend(dirchanges)
 
-                repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
-                repo.dirstate.write()
-                update = False
-            finally:
-                wlock.release()
+            repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
+            repo.dirstate.write()
+            update = False
 
         if opts.get('bookmark'):
             if mark == repo._bookmarkcurrent:
-                bookmarks.setcurrent(repo, None)
+                bookmarks.unsetcurrent(repo)
             del marks[mark]
             marks.write()
             ui.write(_("bookmark '%s' deleted\n") % mark)
--- a/hgext/transplant.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/hgext/transplant.py	Tue Nov 26 21:55:21 2013 -0800
@@ -154,7 +154,7 @@
                     # transplants before them fail.
                     domerge = True
                     if not hasnode(repo, node):
-                        repo.pull(source, heads=[node])
+                        repo.pull(source.peer(), heads=[node])
 
                 skipmerge = False
                 if parents[1] != revlog.nullid:
--- a/mercurial/ancestor.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/ancestor.py	Tue Nov 26 21:55:21 2013 -0800
@@ -5,7 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import heapq, util
+import heapq
+import util
 from node import nullrev
 
 def ancestors(pfunc, *orignodes):
--- a/mercurial/bookmarks.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/bookmarks.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 from mercurial.i18n import _
-from mercurial.node import hex
+from mercurial.node import hex, bin
 from mercurial import encoding, error, util, obsolete
 import errno
 
@@ -58,7 +58,7 @@
         '''
         repo = self._repo
         if repo._bookmarkcurrent not in self:
-            setcurrent(repo, None)
+            unsetcurrent(repo)
 
         wlock = repo.wlock()
         try:
@@ -106,13 +106,13 @@
     Set the name of the bookmark that we are on (hg update <bookmark>).
     The name is recorded in .hg/bookmarks.current
     '''
+    if mark not in repo._bookmarks:
+        raise AssertionError('bookmark %s does not exist!' % mark)
+
     current = repo._bookmarkcurrent
     if current == mark:
         return
 
-    if mark not in repo._bookmarks:
-        mark = ''
-
     wlock = repo.wlock()
     try:
         file = repo.opener('bookmarks.current', 'w', atomictemp=True)
@@ -239,49 +239,176 @@
     finally:
         w.release()
 
+def compare(repo, srcmarks, dstmarks,
+            srchex=None, dsthex=None, targets=None):
+    '''Compare bookmarks between srcmarks and dstmarks
+
+    This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
+    differ, invalid)", each are list of bookmarks below:
+
+    :addsrc:  added on src side (removed on dst side, perhaps)
+    :adddst:  added on dst side (removed on src side, perhaps)
+    :advsrc:  advanced on src side
+    :advdst:  advanced on dst side
+    :diverge: diverge
+    :differ:  changed, but changeset referred on src is unknown on dst
+    :invalid: unknown on both side
+
+    Each elements of lists in result tuple is tuple "(bookmark name,
+    changeset ID on source side, changeset ID on destination
+    side)". Each changeset IDs are 40 hexadecimal digit string or
+    None.
+
+    Changeset IDs of tuples in "addsrc", "adddst", "differ" or
+     "invalid" list may be unknown for repo.
+
+    This function expects that "srcmarks" and "dstmarks" return
+    changeset ID in 40 hexadecimal digit string for specified
+    bookmark. If not so (e.g. bmstore "repo._bookmarks" returning
+    binary value), "srchex" or "dsthex" should be specified to convert
+    into such form.
+
+    If "targets" is specified, only bookmarks listed in it are
+    examined.
+    '''
+    if not srchex:
+        srchex = lambda x: x
+    if not dsthex:
+        dsthex = lambda x: x
+
+    if targets:
+        bset = set(targets)
+    else:
+        srcmarkset = set(srcmarks)
+        dstmarkset = set(dstmarks)
+        bset = srcmarkset ^ dstmarkset
+        for b in srcmarkset & dstmarkset:
+            if srchex(srcmarks[b]) != dsthex(dstmarks[b]):
+                bset.add(b)
+
+    results = ([], [], [], [], [], [], [])
+    addsrc = results[0].append
+    adddst = results[1].append
+    advsrc = results[2].append
+    advdst = results[3].append
+    diverge = results[4].append
+    differ = results[5].append
+    invalid = results[6].append
+
+    for b in sorted(bset):
+        if b not in srcmarks:
+            if b in dstmarks:
+                adddst((b, None, dsthex(dstmarks[b])))
+            else:
+                invalid((b, None, None))
+        elif b not in dstmarks:
+            addsrc((b, srchex(srcmarks[b]), None))
+        else:
+            scid = srchex(srcmarks[b])
+            dcid = dsthex(dstmarks[b])
+            if scid in repo and dcid in repo:
+                sctx = repo[scid]
+                dctx = repo[dcid]
+                if sctx.rev() < dctx.rev():
+                    if validdest(repo, sctx, dctx):
+                        advdst((b, scid, dcid))
+                    else:
+                        diverge((b, scid, dcid))
+                else:
+                    if validdest(repo, dctx, sctx):
+                        advsrc((b, scid, dcid))
+                    else:
+                        diverge((b, scid, dcid))
+            else:
+                # it is too expensive to examine in detail, in this case
+                differ((b, scid, dcid))
+
+    return results
+
+def _diverge(ui, b, path, localmarks):
+    if b == '@':
+        b = ''
+    # find a unique @ suffix
+    for x in range(1, 100):
+        n = '%s@%d' % (b, x)
+        if n not in localmarks:
+            break
+    # try to use an @pathalias suffix
+    # if an @pathalias already exists, we overwrite (update) it
+    for p, u in ui.configitems("paths"):
+        if path == u:
+            n = '%s@%s' % (b, p)
+    return n
+
 def updatefromremote(ui, repo, remotemarks, path):
     ui.debug("checking for updated bookmarks\n")
-    changed = False
     localmarks = repo._bookmarks
-    for k in sorted(remotemarks):
-        if k in localmarks:
-            nr, nl = remotemarks[k], localmarks[k]
-            if nr in repo:
-                cr = repo[nr]
-                cl = repo[nl]
-                if cl.rev() >= cr.rev():
-                    continue
-                if validdest(repo, cl, cr):
-                    localmarks[k] = cr.node()
-                    changed = True
-                    ui.status(_("updating bookmark %s\n") % k)
-                else:
-                    if k == '@':
-                        kd = ''
-                    else:
-                        kd = k
-                    # find a unique @ suffix
-                    for x in range(1, 100):
-                        n = '%s@%d' % (kd, x)
-                        if n not in localmarks:
-                            break
-                    # try to use an @pathalias suffix
-                    # if an @pathalias already exists, we overwrite (update) it
-                    for p, u in ui.configitems("paths"):
-                        if path == u:
-                            n = '%s@%s' % (kd, p)
+    (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
+     ) = compare(repo, remotemarks, localmarks, dsthex=hex)
+
+    changed = []
+    for b, scid, dcid in addsrc:
+        if scid in repo: # add remote bookmarks for changes we already have
+            changed.append((b, bin(scid), ui.status,
+                            _("adding remote bookmark %s\n") % (b)))
+    for b, scid, dcid in advsrc:
+        changed.append((b, bin(scid), ui.status,
+                        _("updating bookmark %s\n") % (b)))
+    for b, scid, dcid in diverge:
+        db = _diverge(ui, b, path, localmarks)
+        changed.append((db, bin(scid), ui.warn,
+                        _("divergent bookmark %s stored as %s\n") % (b, db)))
+    if changed:
+        for b, node, writer, msg in sorted(changed):
+            localmarks[b] = node
+            writer(msg)
+        localmarks.write()
+
+def updateremote(ui, repo, remote, revs):
+    ui.debug("checking for updated bookmarks\n")
+    revnums = map(repo.changelog.rev, revs or [])
+    ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
+    (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
+     ) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
+                 srchex=hex)
 
-                    localmarks[n] = cr.node()
-                    changed = True
-                    ui.warn(_("divergent bookmark %s stored as %s\n") % (k, n))
-        elif remotemarks[k] in repo:
-            # add remote bookmarks for changes we already have
-            localmarks[k] = repo[remotemarks[k]].node()
-            changed = True
-            ui.status(_("adding remote bookmark %s\n") % k)
+    for b, scid, dcid in advsrc:
+        if ancestors and repo[scid].rev() not in ancestors:
+            continue
+        if remote.pushkey('bookmarks', b, dcid, scid):
+            ui.status(_("updating bookmark %s\n") % b)
+        else:
+            ui.warn(_('updating bookmark %s failed!\n') % b)
+
+def pushtoremote(ui, repo, remote, targets):
+    (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
+     ) = compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
+                 srchex=hex, targets=targets)
+    if invalid:
+        b, scid, dcid = invalid[0]
+        ui.warn(_('bookmark %s does not exist on the local '
+                  'or remote repository!\n') % b)
+        return 2
 
-    if changed:
-        localmarks.write()
+    def push(b, old, new):
+        r = remote.pushkey('bookmarks', b, old, new)
+        if not r:
+            ui.warn(_('updating bookmark %s failed!\n') % b)
+            return 1
+        return 0
+    failed = 0
+    for b, scid, dcid in sorted(addsrc + advsrc + advdst + diverge + differ):
+        ui.status(_("exporting bookmark %s\n") % b)
+        if dcid is None:
+            dcid = ''
+        failed += push(b, dcid, scid)
+    for b, scid, dcid in adddst:
+        # treat as "deleted locally"
+        ui.status(_("deleting remote bookmark %s\n") % b)
+        failed += push(b, dcid, '')
+
+    if failed:
+        return 1
 
 def diff(ui, dst, src):
     ui.status(_("searching for changed bookmarks\n"))
--- a/mercurial/branchmap.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/branchmap.py	Tue Nov 26 21:55:21 2013 -0800
@@ -7,7 +7,7 @@
 
 from node import bin, hex, nullid, nullrev
 import encoding
-import util, repoview
+import util
 
 def _filename(repo):
     """name of a branchcache file for a given repo or repoview"""
@@ -58,6 +58,17 @@
 
 
 
+### Nearest subset relation
+# Nearest subset of filter X is a filter Y so that:
+# * Y is included in X,
+# * X - Y is as small as possible.
+# This create and ordering used for branchmap purpose.
+# the ordering may be partial
+subsettable = {None: 'visible',
+               'visible': 'served',
+               'served': 'immutable',
+               'immutable': 'base'}
+
 def updatecache(repo):
     cl = repo.changelog
     filtername = repo.filtername
@@ -67,7 +78,7 @@
     if partial is None or not partial.validfor(repo):
         partial = read(repo)
         if partial is None:
-            subsetname = repoview.subsettable.get(filtername)
+            subsetname = subsettable.get(filtername)
             if subsetname is None:
                 partial = branchcache()
             else:
--- a/mercurial/cmdutil.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/cmdutil.py	Tue Nov 26 21:55:21 2013 -0800
@@ -10,7 +10,7 @@
 import os, sys, errno, re, tempfile
 import util, scmutil, templater, patch, error, templatekw, revlog, copies
 import match as matchmod
-import subrepo, context, repair, graphmod, revset, phases, obsolete
+import subrepo, context, repair, graphmod, revset, phases, obsolete, pathutil
 import changelog
 import bookmarks
 import lock as lockmod
@@ -274,7 +274,7 @@
     # relsrc: ossep
     # otarget: ossep
     def copyfile(abssrc, relsrc, otarget, exact):
-        abstarget = scmutil.canonpath(repo.root, cwd, otarget)
+        abstarget = pathutil.canonpath(repo.root, cwd, otarget)
         if '/' in abstarget:
             # We cannot normalize abstarget itself, this would prevent
             # case only renames, like a => A.
@@ -367,7 +367,7 @@
     # return: function that takes hgsep and returns ossep
     def targetpathfn(pat, dest, srcs):
         if os.path.isdir(pat):
-            abspfx = scmutil.canonpath(repo.root, cwd, pat)
+            abspfx = pathutil.canonpath(repo.root, cwd, pat)
             abspfx = util.localpath(abspfx)
             if destdirexists:
                 striplen = len(os.path.split(abspfx)[0])
@@ -393,7 +393,7 @@
             res = lambda p: os.path.join(dest,
                                          os.path.basename(util.localpath(p)))
         else:
-            abspfx = scmutil.canonpath(repo.root, cwd, pat)
+            abspfx = pathutil.canonpath(repo.root, cwd, pat)
             if len(abspfx) < len(srcs[0][0]):
                 # A directory. Either the target path contains the last
                 # component of the source path or it does not.
@@ -2063,7 +2063,7 @@
                 fc = ctx[f]
                 repo.wwrite(f, fc.data(), fc.flags())
 
-            audit_path = scmutil.pathauditor(repo.root)
+            audit_path = pathutil.pathauditor(repo.root)
             for f in remove[0]:
                 if repo.dirstate[f] == 'a':
                     repo.dirstate.drop(f)
--- a/mercurial/commands.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/commands.py	Tue Nov 26 21:55:21 2013 -0800
@@ -17,7 +17,8 @@
 import merge as mergemod
 import minirst, revset, fileset
 import dagparser, context, simplemerge, graphmod
-import random, setdiscovery, treediscovery, dagutil, pvec, localrepo
+import random
+import setdiscovery, treediscovery, dagutil, pvec, localrepo
 import phases, obsolete
 
 table = {}
@@ -700,7 +701,7 @@
                 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
                 check_state(state, interactive=False)
                 # bisect
-                nodes, changesets, good = hbisect.bisect(repo.changelog, state)
+                nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
                 # update to next check
                 node = nodes[0]
                 if not noupdate:
@@ -709,7 +710,7 @@
         finally:
             state['current'] = [node]
             hbisect.save_state(repo, state)
-        print_result(nodes, good)
+        print_result(nodes, bgood)
         return
 
     # update state
@@ -866,7 +867,7 @@
             if mark not in marks:
                 raise util.Abort(_("bookmark '%s' does not exist") % mark)
             if mark == repo._bookmarkcurrent:
-                bookmarks.setcurrent(repo, None)
+                bookmarks.unsetcurrent(repo)
             del marks[mark]
         marks.write()
 
@@ -892,7 +893,7 @@
             if newact is None:
                 newact = mark
             if inactive and mark == repo._bookmarkcurrent:
-                bookmarks.setcurrent(repo, None)
+                bookmarks.unsetcurrent(repo)
                 return
             tgt = cur
             if rev:
@@ -902,7 +903,7 @@
         if not inactive and cur == marks[newact] and not rev:
             bookmarks.setcurrent(repo, newact)
         elif cur != tgt and newact == repo._bookmarkcurrent:
-            bookmarks.setcurrent(repo, None)
+            bookmarks.unsetcurrent(repo)
         marks.write()
 
     # Same message whether trying to deactivate the current bookmark (-i
@@ -914,7 +915,7 @@
         if not repo._bookmarkcurrent:
             ui.status(_("no active bookmark\n"))
         else:
-            bookmarks.setcurrent(repo, None)
+            bookmarks.unsetcurrent(repo)
 
     else: # show bookmarks
         for bmark, n in sorted(marks.iteritems()):
@@ -3762,12 +3763,12 @@
                                         files, eolmode=None)
                     except patch.PatchError, e:
                         raise util.Abort(str(e))
-                    memctx = patch.makememctx(repo, (p1.node(), p2.node()),
-                                              message,
-                                              opts.get('user') or user,
-                                              opts.get('date') or date,
-                                              branch, files, store,
-                                              editor=cmdutil.commiteditor)
+                    memctx = context.makememctx(repo, (p1.node(), p2.node()),
+                                                message,
+                                                opts.get('user') or user,
+                                                opts.get('date') or date,
+                                                branch, files, store,
+                                                editor=cmdutil.commiteditor)
                     repo.savecommitmessage(memctx.description())
                     n = memctx.commit()
                 finally:
@@ -4713,25 +4714,11 @@
     result = not result
 
     if opts.get('bookmark'):
-        rb = other.listkeys('bookmarks')
-        for b in opts['bookmark']:
-            # explicit push overrides remote bookmark if any
-            if b in repo._bookmarks:
-                ui.status(_("exporting bookmark %s\n") % b)
-                new = repo[b].hex()
-            elif b in rb:
-                ui.status(_("deleting remote bookmark %s\n") % b)
-                new = '' # delete
-            else:
-                ui.warn(_('bookmark %s does not exist on the local '
-                          'or remote repository!\n') % b)
-                return 2
-            old = rb.get(b, '')
-            r = other.pushkey('bookmarks', b, old, new)
-            if not r:
-                ui.warn(_('updating bookmark %s failed!\n') % b)
-                if not result:
-                    result = 2
+        bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
+        if bresult == 2:
+            return 2
+        if not result and bresult:
+            result = 2
 
     return result
 
--- a/mercurial/commandserver.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/commandserver.py	Tue Nov 26 21:55:21 2013 -0800
@@ -184,7 +184,10 @@
         # persist between requests
         copiedui = self.ui.copy()
         self.repo.baseui = copiedui
-        self.repo.ui = self.repo.dirstate._ui = self.repoui.copy()
+        # clone ui without using ui.copy because this is protected
+        repoui = self.repoui.__class__(self.repoui)
+        repoui.copy = copiedui.copy # redo copy protection
+        self.repo.ui = self.repo.dirstate._ui = repoui
         self.repo.invalidate()
         self.repo.invalidatedirstate()
 
--- a/mercurial/context.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/context.py	Tue Nov 26 21:55:21 2013 -0800
@@ -195,6 +195,21 @@
     def dirty(self):
         return False
 
+def makememctx(repo, parents, text, user, date, branch, files, store,
+               editor=None):
+    def getfilectx(repo, memctx, path):
+        data, (islink, isexec), copied = store.getfile(path)
+        return memfilectx(path, data, islink=islink, isexec=isexec,
+                                  copied=copied)
+    extra = {}
+    if branch:
+        extra['branch'] = encoding.fromlocal(branch)
+    ctx =  memctx(repo, parents, text, files, getfilectx, user,
+                          date, extra)
+    if editor:
+        ctx._text = editor(repo, ctx, [])
+    return ctx
+
 class changectx(basectx):
     """A changecontext object makes access to data related to a particular
     changeset convenient. It represents a read-only context already present in
--- a/mercurial/dirstate.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/dirstate.py	Tue Nov 26 21:55:21 2013 -0800
@@ -8,7 +8,7 @@
 
 from node import nullid
 from i18n import _
-import scmutil, util, ignore, osutil, parsers, encoding
+import scmutil, util, ignore, osutil, parsers, encoding, pathutil
 import os, stat, errno, gc
 
 propertycache = util.propertycache
@@ -736,7 +736,7 @@
                 # unknown == True means we walked the full directory tree above.
                 # So if a file is not seen it was either a) not matching matchfn
                 # b) ignored, c) missing, or d) under a symlink directory.
-                audit_path = scmutil.pathauditor(self._root)
+                audit_path = pathutil.pathauditor(self._root)
 
                 for nf in iter(visit):
                     # Report ignored items in the dmap as long as they are not
--- a/mercurial/discovery.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/discovery.py	Tue Nov 26 21:55:21 2013 -0800
@@ -313,8 +313,8 @@
             if 1 < len(newhs):
                 dhs = list(newhs)
                 if error is None:
-                    error = (_("push creates multiple headed new branch '%s'")
-                             % (branch))
+                    error = (_("push creates new branch '%s' "
+                               "with multiple heads") % (branch))
                     hint = _("merge or"
                              " see \"hg help push\" for details about"
                              " pushing new heads")
@@ -337,10 +337,12 @@
                     hint = _("merge or"
                              " see \"hg help push\" for details about"
                              " pushing new heads")
-            if branch is not None:
-                repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
+            if branch is None:
+                repo.ui.note(_("new remote heads:\n"))
+            else:
+                repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
             for h in dhs:
-                repo.ui.note(_("new remote head %s\n") % short(h))
+                repo.ui.note((" %s\n") % short(h))
     if error:
         raise util.Abort(error, hint=hint)
 
--- a/mercurial/fancyopts.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/fancyopts.py	Tue Nov 26 21:55:21 2013 -0800
@@ -5,7 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import getopt, util
+import getopt
+import util
 from i18n import _
 
 def gnugetopt(args, options, longoptions):
--- a/mercurial/fileset.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/fileset.py	Tue Nov 26 21:55:21 2013 -0800
@@ -5,7 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import parser, error, util, merge, re
+import re
+import parser, error, util, merge
 from i18n import _
 
 elements = {
--- a/mercurial/hbisect.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/hbisect.py	Tue Nov 26 21:55:21 2013 -0800
@@ -8,7 +8,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import os, error
+import os
+import error
 from i18n import _
 from node import short, hex
 import util
--- a/mercurial/help.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/help.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,7 +6,8 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import gettext, _
-import itertools, sys, os, error
+import itertools, sys, os
+import error
 import extensions, revset, fileset, templatekw, templatefilters, filemerge
 import encoding, util, minirst
 import cmdutil
--- a/mercurial/help/templates.txt	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/help/templates.txt	Tue Nov 26 21:55:21 2013 -0800
@@ -102,3 +102,7 @@
 - Invert the firstline filter, i.e. everything but the first line::
 
    $ hg log -r 0 --template "{sub(r'^.*\n?\n?', '', desc)}\n"
+
+- Display the contents of the 'extra' field, one per line::
+
+  $ hg log -r 0 --template "{join(extras, '\n')}\n"
--- a/mercurial/hg.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/hg.py	Tue Nov 26 21:55:21 2013 -0800
@@ -202,19 +202,20 @@
         hardlink = None
         num = 0
         srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
+        srcvfs = scmutil.vfs(srcrepo.sharedpath)
+        dstvfs = scmutil.vfs(destpath)
         for f in srcrepo.store.copylist():
             if srcpublishing and f.endswith('phaseroots'):
                 continue
-            src = os.path.join(srcrepo.sharedpath, f)
-            dst = os.path.join(destpath, f)
-            dstbase = os.path.dirname(dst)
-            if dstbase and not os.path.exists(dstbase):
-                os.mkdir(dstbase)
-            if os.path.exists(src):
-                if dst.endswith('data'):
+            dstbase = os.path.dirname(f)
+            if dstbase and not dstvfs.exists(dstbase):
+                dstvfs.mkdir(dstbase)
+            if srcvfs.exists(f):
+                if f.endswith('data'):
                     # lock to avoid premature writing to the target
-                    destlock = lock.lock(os.path.join(dstbase, "lock"))
-                hardlink, n = util.copyfiles(src, dst, hardlink)
+                    destlock = lock.lock(dstvfs, dstbase + "/lock")
+                hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
+                                             hardlink)
                 num += n
         if hardlink:
             ui.debug("linked %d files\n" % num)
--- a/mercurial/hgweb/hgweb_mod.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/hgweb/hgweb_mod.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import os
+import os, re
 from mercurial import ui, hg, hook, error, encoding, templater, util, repoview
 from mercurial.templatefilters import websub
 from mercurial.i18n import _
@@ -14,7 +14,7 @@
 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
 from request import wsgirequest
-import webcommands, protocol, webutil, re
+import webcommands, protocol, webutil
 
 perms = {
     'changegroup': 'pull',
--- a/mercurial/hgweb/webcommands.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/hgweb/webcommands.py	Tue Nov 26 21:55:21 2013 -0800
@@ -845,15 +845,11 @@
     end = min(count, start + revcount) # last rev on this page
     parity = paritygen(web.stripecount, offset=start - end)
 
-    def entries(latestonly, **map):
+    def entries():
         l = []
 
         repo = web.repo
-        revs = repo.changelog.revs(start, end - 1)
-        if latestonly:
-            for r in revs:
-                pass
-            revs = (r,)
+        revs = fctx.filelog().revs(start, end - 1)
         for i in revs:
             iterfctx = fctx.filectx(i)
 
@@ -877,11 +873,14 @@
         for e in reversed(l):
             yield e
 
+    entries = list(entries())
+    latestentry = entries[:1]
+
     revnav = webutil.filerevnav(web.repo, fctx.path())
     nav = revnav.gen(end - 1, revcount, count)
     return tmpl("filelog", file=f, node=fctx.hex(), nav=nav,
-                entries=lambda **x: entries(latestonly=False, **x),
-                latestentry=lambda **x: entries(latestonly=True, **x),
+                entries=entries,
+                latestentry=latestentry,
                 revcount=revcount, morevars=morevars, lessvars=lessvars)
 
 def archive(web, req, tmpl):
--- a/mercurial/hgweb/webutil.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/hgweb/webutil.py	Tue Nov 26 21:55:21 2013 -0800
@@ -7,7 +7,7 @@
 # GNU General Public License version 2 or any later version.
 
 import os, copy
-from mercurial import match, patch, scmutil, error, ui, util
+from mercurial import match, patch, error, ui, util, pathutil
 from mercurial.i18n import _
 from mercurial.node import hex, nullid
 from common import ErrorResponse
@@ -196,7 +196,7 @@
 
 def cleanpath(repo, path):
     path = path.lstrip('/')
-    return scmutil.canonpath(repo.root, '', path)
+    return pathutil.canonpath(repo.root, '', path)
 
 def changeidctx (repo, changeid):
     try:
--- a/mercurial/localrepo.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/localrepo.py	Tue Nov 26 21:55:21 2013 -0800
@@ -8,14 +8,15 @@
 from i18n import _
 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
-import lock, transaction, store, encoding
+import lock as lockmod
+import transaction, store, encoding
 import scmutil, util, extensions, hook, error, revset
 import match as matchmod
 import merge as mergemod
 import tags as tagsmod
 from lock import release
 import weakref, errno, os, time, inspect
-import branchmap
+import branchmap, pathutil
 propertycache = util.propertycache
 filecache = scmutil.filecache
 
@@ -166,11 +167,12 @@
         self.root = self.wvfs.base
         self.path = self.wvfs.join(".hg")
         self.origroot = path
-        self.auditor = scmutil.pathauditor(self.root, self._checknested)
+        self.auditor = pathutil.pathauditor(self.root, self._checknested)
         self.vfs = scmutil.vfs(self.path)
         self.opener = self.vfs
         self.baseui = baseui
         self.ui = baseui.copy()
+        self.ui.copy = baseui.copy # prevent copying repo configuration
         # A list of callback to shape the phase if no data were found.
         # Callback are in the form: func(repo, roots) --> processed root.
         # This list it to be filled by extension during repo setup
@@ -832,7 +834,7 @@
         renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
         rp = report and report or self.ui.warn
         tr = transaction.transaction(rp, self.sopener,
-                                     self.sjoin("journal"),
+                                     "journal",
                                      aftertrans(renames),
                                      self.store.createmode)
         self._transref = weakref.ref(tr)
@@ -866,7 +868,7 @@
         try:
             if self.svfs.exists("journal"):
                 self.ui.status(_("rolling back interrupted transaction\n"))
-                transaction.rollback(self.sopener, self.sjoin("journal"),
+                transaction.rollback(self.sopener, "journal",
                                      self.ui.warn)
                 self.invalidate()
                 return True
@@ -922,7 +924,7 @@
 
         parents = self.dirstate.parents()
         self.destroying()
-        transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
+        transaction.rollback(self.sopener, 'undo', ui.warn)
         if self.vfs.exists('undo.bookmarks'):
             self.vfs.rename('undo.bookmarks', 'bookmarks')
         if self.svfs.exists('undo.phaseroots'):
@@ -998,17 +1000,18 @@
                 pass
         self.invalidatecaches()
 
-    def _lock(self, lockname, wait, releasefn, acquirefn, desc):
+    def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
         try:
-            l = lock.lock(lockname, 0, releasefn, desc=desc)
+            l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
         except error.LockHeld, inst:
             if not wait:
                 raise
             self.ui.warn(_("waiting for lock on %s held by %r\n") %
                          (desc, inst.locker))
             # default to 600 seconds timeout
-            l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
-                          releasefn, desc=desc)
+            l = lockmod.lock(vfs, lockname,
+                             int(self.ui.config("ui", "timeout", "600")),
+                             releasefn, desc=desc)
         if acquirefn:
             acquirefn()
         return l
@@ -1041,7 +1044,7 @@
                     continue
                 ce.refresh()
 
-        l = self._lock(self.sjoin("lock"), wait, unlock,
+        l = self._lock(self.svfs, "lock", wait, unlock,
                        self.invalidate, _('repository %s') % self.origroot)
         self._lockref = weakref.ref(l)
         return l
@@ -1059,7 +1062,7 @@
             self.dirstate.write()
             self._filecache['dirstate'].refresh()
 
-        l = self._lock(self.join("wlock"), wait, unlock,
+        l = self._lock(self.vfs, "wlock", wait, unlock,
                        self.invalidatedirstate, _('working directory of %s') %
                        self.origroot)
         self._wlockref = weakref.ref(l)
@@ -1976,27 +1979,7 @@
             if locallock is not None:
                 locallock.release()
 
-        self.ui.debug("checking for updated bookmarks\n")
-        rb = remote.listkeys('bookmarks')
-        revnums = map(unfi.changelog.rev, revs or [])
-        ancestors = [
-            a for a in unfi.changelog.ancestors(revnums, inclusive=True)]
-        for k in rb.keys():
-            if k in unfi._bookmarks:
-                nr, nl = rb[k], hex(self._bookmarks[k])
-                if nr in unfi:
-                    cr = unfi[nr]
-                    cl = unfi[nl]
-                    if bookmarks.validdest(unfi, cr, cl):
-                        if ancestors and cl.rev() not in ancestors:
-                            continue
-                        r = remote.pushkey('bookmarks', k, nr, nl)
-                        if r:
-                            self.ui.status(_("updating bookmark %s\n") % k)
-                        else:
-                            self.ui.warn(_('updating bookmark %s'
-                                           ' failed!\n') % k)
-
+        bookmarks.updateremote(self.ui, unfi, remote, revs)
         return ret
 
     def changegroupinfo(self, nodes, source):
--- a/mercurial/lock.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/lock.py	Tue Nov 26 21:55:21 2013 -0800
@@ -29,7 +29,8 @@
 
     _host = None
 
-    def __init__(self, file, timeout=-1, releasefn=None, desc=None):
+    def __init__(self, vfs, file, timeout=-1, releasefn=None, desc=None):
+        self.vfs = vfs
         self.f = file
         self.held = 0
         self.timeout = timeout
@@ -75,13 +76,14 @@
         lockname = '%s:%s' % (lock._host, self.pid)
         while not self.held:
             try:
-                util.makelock(lockname, self.f)
+                self.vfs.makelock(lockname, self.f)
                 self.held = 1
             except (OSError, IOError), why:
                 if why.errno == errno.EEXIST:
                     locker = self.testlock()
                     if locker is not None:
-                        raise error.LockHeld(errno.EAGAIN, self.f, self.desc,
+                        raise error.LockHeld(errno.EAGAIN,
+                                             self.vfs.join(self.f), self.desc,
                                              locker)
                 else:
                     raise error.LockUnavailable(why.errno, why.strerror,
@@ -99,7 +101,7 @@
 
         """
         try:
-            locker = util.readlock(self.f)
+            locker = self.vfs.readlock(self.f)
         except (OSError, IOError), why:
             if why.errno == errno.ENOENT:
                 return None
@@ -119,8 +121,8 @@
         # if locker dead, break lock.  must do this with another lock
         # held, or can race and break valid lock.
         try:
-            l = lock(self.f + '.break', timeout=0)
-            util.unlink(self.f)
+            l = lock(self.vfs, self.f + '.break', timeout=0)
+            self.vfs.unlink(self.f)
             l.release()
         except error.LockError:
             return locker
@@ -140,7 +142,7 @@
             if self.releasefn:
                 self.releasefn()
             try:
-                util.unlink(self.f)
+                self.vfs.unlink(self.f)
             except OSError:
                 pass
             for callback in self.postrelease:
--- a/mercurial/manifest.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/manifest.py	Tue Nov 26 21:55:21 2013 -0800
@@ -30,8 +30,9 @@
 
 class manifest(revlog.revlog):
     def __init__(self, opener):
-        # we expect to deal with not more than three revs at a time in merge
-        self._mancache = util.lrucachedict(3)
+        # we expect to deal with not more than four revs at a time,
+        # during a commit --amend
+        self._mancache = util.lrucachedict(4)
         revlog.revlog.__init__(self, opener, "00manifest.i")
 
     def parse(self, lines):
--- a/mercurial/match.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/match.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 import re
-import scmutil, util, fileset
+import util, fileset, pathutil
 from i18n import _
 
 def _rematcher(pat):
@@ -317,7 +317,7 @@
     pats = []
     for kind, name in [_patsplit(p, default) for p in names]:
         if kind in ('glob', 'relpath'):
-            name = scmutil.canonpath(root, cwd, name, auditor)
+            name = pathutil.canonpath(root, cwd, name, auditor)
         elif kind in ('relglob', 'path'):
             name = util.normpath(name)
         elif kind in ('listfile', 'listfile0'):
--- a/mercurial/mdiff.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/mdiff.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,8 +6,8 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import bdiff, mpatch, util
-import re, struct, base85, zlib
+import bdiff, mpatch, util, base85
+import re, struct, zlib
 
 def splitnewlines(text):
     '''like str.splitlines, but only split on newlines.'''
--- a/mercurial/obsolete.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/obsolete.py	Tue Nov 26 21:55:21 2013 -0800
@@ -196,6 +196,14 @@
         self._data = data
         self._decodedmeta = None
 
+    def __hash__(self):
+        return hash(self._data)
+
+    def __eq__(self, other):
+        if type(other) != type(self):
+            return False
+        return self._data == other._data
+
     def precnode(self):
         """Precursor changeset node identifier"""
         return self._data[0]
@@ -268,7 +276,11 @@
         if not _enabled:
             raise util.Abort('obsolete feature is not enabled on this repo')
         known = set(self._all)
-        new = [m for m in markers if m not in known]
+        new = []
+        for m in markers:
+            if m not in known:
+                known.add(m)
+                new.append(m)
         if new:
             f = self.sopener('obsstore', 'ab')
             try:
--- a/mercurial/parsers.c	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/parsers.c	Tue Nov 26 21:55:21 2013 -0800
@@ -927,8 +927,13 @@
 static int nt_init(indexObject *self)
 {
 	if (self->nt == NULL) {
+		if (self->raw_length > INT_MAX) {
+			PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
+			return -1;
+		}
 		self->ntcapacity = self->raw_length < 4
-			? 4 : self->raw_length / 2;
+			? 4 : (int)self->raw_length / 2;
+
 		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
 		if (self->nt == NULL) {
 			PyErr_NoMemory();
--- a/mercurial/patch.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/patch.py	Tue Nov 26 21:55:21 2013 -0800
@@ -16,7 +16,6 @@
 from i18n import _
 from node import hex, short
 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
-import context
 
 gitre = re.compile('diff --git a/(.*) b/(.*)')
 
@@ -1441,21 +1440,6 @@
     backend = repobackend(ui, repo, ctx, store)
     return patchbackend(ui, backend, patchobj, strip, files, eolmode)
 
-def makememctx(repo, parents, text, user, date, branch, files, store,
-               editor=None):
-    def getfilectx(repo, memctx, path):
-        data, (islink, isexec), copied = store.getfile(path)
-        return context.memfilectx(path, data, islink=islink, isexec=isexec,
-                                  copied=copied)
-    extra = {}
-    if branch:
-        extra['branch'] = encoding.fromlocal(branch)
-    ctx =  context.memctx(repo, parents, text, files, getfilectx, user,
-                          date, extra)
-    if editor:
-        ctx._text = editor(repo, ctx, [])
-    return ctx
-
 def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
           similarity=0):
     """Apply <patchname> to the working directory.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/pathutil.py	Tue Nov 26 21:55:21 2013 -0800
@@ -0,0 +1,144 @@
+import os, errno, stat
+
+import util
+from i18n import _
+
+class pathauditor(object):
+    '''ensure that a filesystem path contains no banned components.
+    the following properties of a path are checked:
+
+    - ends with a directory separator
+    - under top-level .hg
+    - starts at the root of a windows drive
+    - contains ".."
+    - traverses a symlink (e.g. a/symlink_here/b)
+    - inside a nested repository (a callback can be used to approve
+      some nested repositories, e.g., subrepositories)
+    '''
+
+    def __init__(self, root, callback=None):
+        self.audited = set()
+        self.auditeddir = set()
+        self.root = root
+        self.callback = callback
+        if os.path.lexists(root) and not util.checkcase(root):
+            self.normcase = util.normcase
+        else:
+            self.normcase = lambda x: x
+
+    def __call__(self, path):
+        '''Check the relative path.
+        path may contain a pattern (e.g. foodir/**.txt)'''
+
+        path = util.localpath(path)
+        normpath = self.normcase(path)
+        if normpath in self.audited:
+            return
+        # AIX ignores "/" at end of path, others raise EISDIR.
+        if util.endswithsep(path):
+            raise util.Abort(_("path ends in directory separator: %s") % path)
+        parts = util.splitpath(path)
+        if (os.path.splitdrive(path)[0]
+            or parts[0].lower() in ('.hg', '.hg.', '')
+            or os.pardir in parts):
+            raise util.Abort(_("path contains illegal component: %s") % path)
+        if '.hg' in path.lower():
+            lparts = [p.lower() for p in parts]
+            for p in '.hg', '.hg.':
+                if p in lparts[1:]:
+                    pos = lparts.index(p)
+                    base = os.path.join(*parts[:pos])
+                    raise util.Abort(_("path '%s' is inside nested repo %r")
+                                     % (path, base))
+
+        normparts = util.splitpath(normpath)
+        assert len(parts) == len(normparts)
+
+        parts.pop()
+        normparts.pop()
+        prefixes = []
+        while parts:
+            prefix = os.sep.join(parts)
+            normprefix = os.sep.join(normparts)
+            if normprefix in self.auditeddir:
+                break
+            curpath = os.path.join(self.root, prefix)
+            try:
+                st = os.lstat(curpath)
+            except OSError, err:
+                # EINVAL can be raised as invalid path syntax under win32.
+                # They must be ignored for patterns can be checked too.
+                if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
+                    raise
+            else:
+                if stat.S_ISLNK(st.st_mode):
+                    raise util.Abort(
+                        _('path %r traverses symbolic link %r')
+                        % (path, prefix))
+                elif (stat.S_ISDIR(st.st_mode) and
+                      os.path.isdir(os.path.join(curpath, '.hg'))):
+                    if not self.callback or not self.callback(curpath):
+                        raise util.Abort(_("path '%s' is inside nested "
+                                           "repo %r")
+                                         % (path, prefix))
+            prefixes.append(normprefix)
+            parts.pop()
+            normparts.pop()
+
+        self.audited.add(normpath)
+        # only add prefixes to the cache after checking everything: we don't
+        # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
+        self.auditeddir.update(prefixes)
+
+    def check(self, path):
+        try:
+            self(path)
+            return True
+        except (OSError, util.Abort):
+            return False
+
+def canonpath(root, cwd, myname, auditor=None):
+    '''return the canonical path of myname, given cwd and root'''
+    if util.endswithsep(root):
+        rootsep = root
+    else:
+        rootsep = root + os.sep
+    name = myname
+    if not os.path.isabs(name):
+        name = os.path.join(root, cwd, name)
+    name = os.path.normpath(name)
+    if auditor is None:
+        auditor = pathauditor(root)
+    if name != rootsep and name.startswith(rootsep):
+        name = name[len(rootsep):]
+        auditor(name)
+        return util.pconvert(name)
+    elif name == root:
+        return ''
+    else:
+        # Determine whether `name' is in the hierarchy at or beneath `root',
+        # by iterating name=dirname(name) until that causes no change (can't
+        # check name == '/', because that doesn't work on windows). The list
+        # `rel' holds the reversed list of components making up the relative
+        # file name we want.
+        rel = []
+        while True:
+            try:
+                s = util.samefile(name, root)
+            except OSError:
+                s = False
+            if s:
+                if not rel:
+                    # name was actually the same as root (maybe a symlink)
+                    return ''
+                rel.reverse()
+                name = os.path.join(*rel)
+                auditor(name)
+                return util.pconvert(name)
+            dirname, basename = util.split(name)
+            rel.append(basename)
+            if dirname == name:
+                break
+            name = dirname
+
+        raise util.Abort(_("%s not under root '%s'") % (myname, root))
--- a/mercurial/repair.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/repair.py	Tue Nov 26 21:55:21 2013 -0800
@@ -38,16 +38,8 @@
     """return the changesets which will be broken by the truncation"""
     s = set()
     def collectone(revlog):
-        linkgen = (revlog.linkrev(i) for i in revlog)
-        # find the truncation point of the revlog
-        for lrev in linkgen:
-            if lrev >= striprev:
-                break
-        # see if any revision after this point has a linkrev
-        # less than striprev (those will be broken by strip)
-        for lrev in linkgen:
-            if lrev < striprev:
-                s.add(lrev)
+        _, brokenset = revlog.getstrippoint(striprev)
+        s.update([revlog.linkrev(r) for r in brokenset])
 
     collectone(repo.manifest)
     for fname in files:
--- a/mercurial/repoview.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/repoview.py	Tue Nov 26 21:55:21 2013 -0800
@@ -98,16 +98,6 @@
                'served': computeunserved,
                'immutable':  computemutable,
                'base':  computeimpactable}
-### Nearest subset relation
-# Nearest subset of filter X is a filter Y so that:
-# * Y is included in X,
-# * X - Y is as small as possible.
-# This create and ordering used for branchmap purpose.
-# the ordering may be partial
-subsettable = {None: 'visible',
-               'visible': 'served',
-               'served': 'immutable',
-               'immutable': 'base'}
 
 def filterrevs(repo, filtername):
     """returns set of filtered revision for this filter name"""
@@ -215,4 +205,3 @@
     @property
     def requirements(self):
         return self._unfilteredrepo.requirements
-
--- a/mercurial/revlog.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/revlog.py	Tue Nov 26 21:55:21 2013 -0800
@@ -401,7 +401,29 @@
         heads = [self.rev(n) for n in heads]
 
         # we want the ancestors, but inclusive
-        has = set(self.ancestors(common))
+        class lazyset(object):
+            def __init__(self, lazyvalues):
+                self.addedvalues = set()
+                self.lazyvalues = lazyvalues
+
+            def __contains__(self, value):
+                return value in self.addedvalues or value in self.lazyvalues
+
+            def __iter__(self):
+                added = self.addedvalues
+                for r in added:
+                    yield r
+                for r in self.lazyvalues:
+                    if not r in added:
+                        yield r
+
+            def add(self, value):
+                self.addedvalues.add(value)
+
+            def update(self, values):
+                self.addedvalues.update(values)
+
+        has = lazyset(self.ancestors(common))
         has.add(nullrev)
         has.update(common)
 
@@ -1263,6 +1285,46 @@
 
         return content
 
+    def getstrippoint(self, minlink):
+        """find the minimum rev that must be stripped to strip the linkrev
+
+        Returns a tuple containing the minimum rev and a set of all revs that
+        have linkrevs that will be broken by this strip.
+        """
+        brokenrevs = set()
+        strippoint = len(self)
+
+        heads = {}
+        futurelargelinkrevs = set()
+        for head in self.headrevs():
+            headlinkrev = self.linkrev(head)
+            heads[head] = headlinkrev
+            if headlinkrev >= minlink:
+                futurelargelinkrevs.add(headlinkrev)
+
+        # This algorithm involves walking down the rev graph, starting at the
+        # heads. Since the revs are topologically sorted according to linkrev,
+        # once all head linkrevs are below the minlink, we know there are
+        # no more revs that could have a linkrev greater than minlink.
+        # So we can stop walking.
+        while futurelargelinkrevs:
+            strippoint -= 1
+            linkrev = heads.pop(strippoint)
+
+            if linkrev < minlink:
+                brokenrevs.add(strippoint)
+            else:
+                futurelargelinkrevs.remove(linkrev)
+
+            for p in self.parentrevs(strippoint):
+                if p != nullrev:
+                    plinkrev = self.linkrev(p)
+                    heads[p] = plinkrev
+                    if plinkrev >= minlink:
+                        futurelargelinkrevs.add(plinkrev)
+
+        return strippoint, brokenrevs
+
     def strip(self, minlink, transaction):
         """truncate the revlog on the first revision with a linkrev >= minlink
 
@@ -1280,10 +1342,8 @@
         if len(self) == 0:
             return
 
-        for rev in self:
-            if self.index[rev][4] >= minlink:
-                break
-        else:
+        rev, _ = self.getstrippoint(minlink)
+        if rev == len(self):
             return
 
         # first truncate the files on disk
--- a/mercurial/scmutil.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/scmutil.py	Tue Nov 26 21:55:21 2013 -0800
@@ -8,8 +8,9 @@
 from i18n import _
 from mercurial.node import nullrev
 import util, error, osutil, revset, similar, encoding, phases, parsers
+import pathutil
 import match as matchmod
-import os, errno, re, stat, glob
+import os, errno, re, glob
 
 if os.name == 'nt':
     import scmwindows as scmplatform
@@ -97,9 +98,10 @@
         self._newfiles = set()
 
     def __call__(self, f):
+        if f in self._newfiles:
+            return
         fl = encoding.lower(f)
-        if (fl in self._loweredfiles and f not in self._dirstate and
-            f not in self._newfiles):
+        if fl in self._loweredfiles and f not in self._dirstate:
             msg = _('possible case-folding collision for %s') % f
             if self._abort:
                 raise util.Abort(msg)
@@ -107,100 +109,6 @@
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
 
-class pathauditor(object):
-    '''ensure that a filesystem path contains no banned components.
-    the following properties of a path are checked:
-
-    - ends with a directory separator
-    - under top-level .hg
-    - starts at the root of a windows drive
-    - contains ".."
-    - traverses a symlink (e.g. a/symlink_here/b)
-    - inside a nested repository (a callback can be used to approve
-      some nested repositories, e.g., subrepositories)
-    '''
-
-    def __init__(self, root, callback=None):
-        self.audited = set()
-        self.auditeddir = set()
-        self.root = root
-        self.callback = callback
-        if os.path.lexists(root) and not util.checkcase(root):
-            self.normcase = util.normcase
-        else:
-            self.normcase = lambda x: x
-
-    def __call__(self, path):
-        '''Check the relative path.
-        path may contain a pattern (e.g. foodir/**.txt)'''
-
-        path = util.localpath(path)
-        normpath = self.normcase(path)
-        if normpath in self.audited:
-            return
-        # AIX ignores "/" at end of path, others raise EISDIR.
-        if util.endswithsep(path):
-            raise util.Abort(_("path ends in directory separator: %s") % path)
-        parts = util.splitpath(path)
-        if (os.path.splitdrive(path)[0]
-            or parts[0].lower() in ('.hg', '.hg.', '')
-            or os.pardir in parts):
-            raise util.Abort(_("path contains illegal component: %s") % path)
-        if '.hg' in path.lower():
-            lparts = [p.lower() for p in parts]
-            for p in '.hg', '.hg.':
-                if p in lparts[1:]:
-                    pos = lparts.index(p)
-                    base = os.path.join(*parts[:pos])
-                    raise util.Abort(_("path '%s' is inside nested repo %r")
-                                     % (path, base))
-
-        normparts = util.splitpath(normpath)
-        assert len(parts) == len(normparts)
-
-        parts.pop()
-        normparts.pop()
-        prefixes = []
-        while parts:
-            prefix = os.sep.join(parts)
-            normprefix = os.sep.join(normparts)
-            if normprefix in self.auditeddir:
-                break
-            curpath = os.path.join(self.root, prefix)
-            try:
-                st = os.lstat(curpath)
-            except OSError, err:
-                # EINVAL can be raised as invalid path syntax under win32.
-                # They must be ignored for patterns can be checked too.
-                if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
-                    raise
-            else:
-                if stat.S_ISLNK(st.st_mode):
-                    raise util.Abort(
-                        _('path %r traverses symbolic link %r')
-                        % (path, prefix))
-                elif (stat.S_ISDIR(st.st_mode) and
-                      os.path.isdir(os.path.join(curpath, '.hg'))):
-                    if not self.callback or not self.callback(curpath):
-                        raise util.Abort(_("path '%s' is inside nested "
-                                           "repo %r")
-                                         % (path, prefix))
-            prefixes.append(normprefix)
-            parts.pop()
-            normparts.pop()
-
-        self.audited.add(normpath)
-        # only add prefixes to the cache after checking everything: we don't
-        # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
-        self.auditeddir.update(prefixes)
-
-    def check(self, path):
-        try:
-            self(path)
-            return True
-        except (OSError, util.Abort):
-            return False
-
 class abstractvfs(object):
     """Abstract base class; cannot be instantiated"""
 
@@ -242,6 +150,9 @@
         finally:
             fp.close()
 
+    def chmod(self, path, mode):
+        return os.chmod(self.join(path), mode)
+
     def exists(self, path=None):
         return os.path.exists(self.join(path))
 
@@ -251,6 +162,9 @@
     def isdir(self, path=None):
         return os.path.isdir(self.join(path))
 
+    def isfile(self, path=None):
+        return os.path.isfile(self.join(path))
+
     def islink(self, path=None):
         return os.path.islink(self.join(path))
 
@@ -263,12 +177,18 @@
     def makedirs(self, path=None, mode=None):
         return util.makedirs(self.join(path), mode)
 
+    def makelock(self, info, path):
+        return util.makelock(info, self.join(path))
+
     def mkdir(self, path=None):
         return os.mkdir(self.join(path))
 
     def readdir(self, path=None, stat=None, skip=None):
         return osutil.listdir(self.join(path), stat, skip)
 
+    def readlock(self, path):
+        return util.readlock(self.join(path))
+
     def rename(self, src, dst):
         return util.rename(self.join(src), self.join(dst))
 
@@ -309,7 +229,7 @@
     def _setmustaudit(self, onoff):
         self._audit = onoff
         if onoff:
-            self.audit = pathauditor(self.base)
+            self.audit = pathutil.pathauditor(self.base)
         else:
             self.audit = util.always
 
@@ -444,52 +364,6 @@
         return self.vfs(path, mode, *args, **kw)
 
 
-def canonpath(root, cwd, myname, auditor=None):
-    '''return the canonical path of myname, given cwd and root'''
-    if util.endswithsep(root):
-        rootsep = root
-    else:
-        rootsep = root + os.sep
-    name = myname
-    if not os.path.isabs(name):
-        name = os.path.join(root, cwd, name)
-    name = os.path.normpath(name)
-    if auditor is None:
-        auditor = pathauditor(root)
-    if name != rootsep and name.startswith(rootsep):
-        name = name[len(rootsep):]
-        auditor(name)
-        return util.pconvert(name)
-    elif name == root:
-        return ''
-    else:
-        # Determine whether `name' is in the hierarchy at or beneath `root',
-        # by iterating name=dirname(name) until that causes no change (can't
-        # check name == '/', because that doesn't work on windows). The list
-        # `rel' holds the reversed list of components making up the relative
-        # file name we want.
-        rel = []
-        while True:
-            try:
-                s = util.samefile(name, root)
-            except OSError:
-                s = False
-            if s:
-                if not rel:
-                    # name was actually the same as root (maybe a symlink)
-                    return ''
-                rel.reverse()
-                name = os.path.join(*rel)
-                auditor(name)
-                return util.pconvert(name)
-            dirname, basename = util.split(name)
-            rel.append(basename)
-            if dirname == name:
-                break
-            name = dirname
-
-        raise util.Abort(_("%s not under root '%s'") % (myname, root))
-
 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
     '''yield every hg repository under path, always recursively.
     The recurse flag will only control recursion into repo working dirs'''
@@ -767,7 +641,7 @@
     This is different from dirstate.status because it doesn't care about
     whether files are modified or clean.'''
     added, unknown, deleted, removed = [], [], [], []
-    audit_path = pathauditor(repo.root)
+    audit_path = pathutil.pathauditor(repo.root)
 
     ctx = repo[None]
     dirstate = repo.dirstate
@@ -851,14 +725,14 @@
               "Mercurial)") % "', '".join(missings))
     return requirements
 
-class filecacheentry(object):
-    def __init__(self, path, stat=True):
+class filecachesubentry(object):
+    def __init__(self, path, stat):
         self.path = path
         self.cachestat = None
         self._cacheable = None
 
         if stat:
-            self.cachestat = filecacheentry.stat(self.path)
+            self.cachestat = filecachesubentry.stat(self.path)
 
             if self.cachestat:
                 self._cacheable = self.cachestat.cacheable()
@@ -868,7 +742,7 @@
 
     def refresh(self):
         if self.cacheable():
-            self.cachestat = filecacheentry.stat(self.path)
+            self.cachestat = filecachesubentry.stat(self.path)
 
     def cacheable(self):
         if self._cacheable is not None:
@@ -882,7 +756,7 @@
         if not self.cacheable():
             return True
 
-        newstat = filecacheentry.stat(self.path)
+        newstat = filecachesubentry.stat(self.path)
 
         # we may not know if it's cacheable yet, check again now
         if newstat and self._cacheable is None:
@@ -906,24 +780,44 @@
             if e.errno != errno.ENOENT:
                 raise
 
+class filecacheentry(object):
+    def __init__(self, paths, stat=True):
+        self._entries = []
+        for path in paths:
+            self._entries.append(filecachesubentry(path, stat))
+
+    def changed(self):
+        '''true if any entry has changed'''
+        for entry in self._entries:
+            if entry.changed():
+                return True
+        return False
+
+    def refresh(self):
+        for entry in self._entries:
+            entry.refresh()
+
 class filecache(object):
-    '''A property like decorator that tracks a file under .hg/ for updates.
+    '''A property like decorator that tracks files under .hg/ for updates.
 
     Records stat info when called in _filecache.
 
-    On subsequent calls, compares old stat info with new info, and recreates
-    the object when needed, updating the new stat info in _filecache.
+    On subsequent calls, compares old stat info with new info, and recreates the
+    object when any of the files changes, updating the new stat info in
+    _filecache.
 
     Mercurial either atomic renames or appends for files under .hg,
     so to ensure the cache is reliable we need the filesystem to be able
     to tell us if a file has been replaced. If it can't, we fallback to
     recreating the object on every call (essentially the same behaviour as
-    propertycache).'''
-    def __init__(self, path):
-        self.path = path
+    propertycache).
+
+    '''
+    def __init__(self, *paths):
+        self.paths = paths
 
     def join(self, obj, fname):
-        """Used to compute the runtime path of the cached file.
+        """Used to compute the runtime path of a cached file.
 
         Users should subclass filecache and provide their own version of this
         function to call the appropriate join function on 'obj' (an instance
@@ -948,11 +842,11 @@
             if entry.changed():
                 entry.obj = self.func(obj)
         else:
-            path = self.join(obj, self.path)
+            paths = [self.join(obj, path) for path in self.paths]
 
             # We stat -before- creating the object so our cache doesn't lie if
             # a writer modified between the time we read and stat
-            entry = filecacheentry(path)
+            entry = filecacheentry(paths, True)
             entry.obj = self.func(obj)
 
             obj._filecache[self.name] = entry
@@ -964,7 +858,8 @@
         if self.name not in obj._filecache:
             # we add an entry for the missing value because X in __dict__
             # implies X in _filecache
-            ce = filecacheentry(self.join(obj, self.path), False)
+            paths = [self.join(obj, path) for path in self.paths]
+            ce = filecacheentry(paths, False)
             obj._filecache[self.name] = ce
         else:
             ce = obj._filecache[self.name]
--- a/mercurial/setdiscovery.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/setdiscovery.py	Tue Nov 26 21:55:21 2013 -0800
@@ -8,7 +8,8 @@
 
 from node import nullid
 from i18n import _
-import random, util, dagutil
+import random
+import util, dagutil
 
 def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
     # if nodes is empty we scan the entire graph
--- a/mercurial/statichttprepo.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/statichttprepo.py	Tue Nov 26 21:55:21 2013 -0800
@@ -54,8 +54,10 @@
             data = data[:bytes]
         self.pos += len(data)
         return data
+    def readlines(self):
+        return self.read().splitlines(True)
     def __iter__(self):
-        return iter(self.read().splitlines(1))
+        return iter(self.readlines())
     def close(self):
         pass
 
--- a/mercurial/subrepo.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/subrepo.py	Tue Nov 26 21:55:21 2013 -0800
@@ -9,7 +9,8 @@
 import xml.dom.minidom
 import stat, subprocess, tarfile
 from i18n import _
-import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
+import config, util, node, error, cmdutil, bookmarks, match as matchmod
+import pathutil
 hg = None
 propertycache = util.propertycache
 
@@ -344,7 +345,7 @@
     import hg as h
     hg = h
 
-    scmutil.pathauditor(ctx._repo.root)(path)
+    pathutil.pathauditor(ctx._repo.root)(path)
     state = ctx.substate[path]
     if state[2] not in types:
         raise util.Abort(_('unknown subrepo type %s') % state[2])
--- a/mercurial/templatekw.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/templatekw.py	Tue Nov 26 21:55:21 2013 -0800
@@ -220,11 +220,10 @@
     return '%s: +%s/-%s' % (len(stats), adds, removes)
 
 def showextras(**args):
-    templ = args['templ']
-    for key, value in sorted(args['ctx'].extra().items()):
-        args = args.copy()
-        args.update(dict(key=key, value=value))
-        yield templ('extra', **args)
+    """:extras: List of dicts with key, value entries of the 'extras'
+    field of this changeset."""
+    yield showlist('extra', sorted(dict(key=a, value=b)
+                   for (a, b) in args['ctx'].extra().items()), **args)
 
 def showfileadds(**args):
     """:file_adds: List of strings. Files added by this changeset."""
@@ -392,6 +391,7 @@
     'parents': _showparents,
 }
 dockeywords.update(keywords)
+del dockeywords['branches']
 
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = dockeywords.values()
--- a/mercurial/transaction.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/transaction.py	Tue Nov 26 21:55:21 2013 -0800
@@ -12,8 +12,8 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import os, errno
-import error, util
+import errno
+import error
 
 def active(func):
     def _active(self, *args, **kwds):
@@ -35,14 +35,11 @@
                 raise
         else:
             try:
-                fp = opener(f)
-                fn = fp.name
-                fp.close()
-                util.unlink(fn)
+                opener.unlink(f)
             except (IOError, OSError), inst:
                 if inst.errno != errno.ENOENT:
                     raise
-    util.unlink(journal)
+    opener.unlink(journal)
 
 class transaction(object):
     def __init__(self, report, opener, journal, after=None, createmode=None):
@@ -56,9 +53,9 @@
         self.journal = journal
         self._queue = []
 
-        self.file = util.posixfile(self.journal, "w")
+        self.file = opener.open(self.journal, "w")
         if createmode is not None:
-            os.chmod(self.journal, createmode & 0666)
+            opener.chmod(self.journal, createmode & 0666)
 
     def __del__(self):
         if self.journal:
@@ -136,8 +133,8 @@
         self.entries = []
         if self.after:
             self.after()
-        if os.path.isfile(self.journal):
-            util.unlink(self.journal)
+        if self.opener.isfile(self.journal):
+            self.opener.unlink(self.journal)
         self.journal = None
 
     @active
@@ -155,7 +152,7 @@
         try:
             if not self.entries:
                 if self.journal:
-                    util.unlink(self.journal)
+                    self.opener.unlink(self.journal)
                 return
 
             self.report(_("transaction abort!\n"))
@@ -173,7 +170,7 @@
 def rollback(opener, file, report):
     entries = []
 
-    fp = util.posixfile(file)
+    fp = opener.open(file)
     lines = fp.readlines()
     fp.close()
     for l in lines:
--- a/mercurial/util.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/util.py	Tue Nov 26 21:55:21 2013 -0800
@@ -14,9 +14,9 @@
 """
 
 from i18n import _
-import error, osutil, encoding, collections
+import error, osutil, encoding
 import errno, re, shutil, sys, tempfile, traceback
-import os, time, datetime, calendar, textwrap, signal
+import os, time, datetime, calendar, textwrap, signal, collections
 import imp, socket, urllib
 
 if os.name == 'nt':
@@ -1033,9 +1033,10 @@
     if t < 0:
         t = 0   # time.gmtime(lt) fails on Windows for lt < -43200
         tz = 0
-    if "%1" in format or "%2" in format:
+    if "%1" in format or "%2" in format or "%z" in format:
         sign = (tz > 0) and "-" or "+"
         minutes = abs(tz) // 60
+        format = format.replace("%z", "%1%2")
         format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
         format = format.replace("%2", "%02d" % (minutes % 60))
     try:
--- a/mercurial/worker.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/mercurial/worker.py	Tue Nov 26 21:55:21 2013 -0800
@@ -6,7 +6,8 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import errno, os, signal, sys, threading, util
+import errno, os, signal, sys, threading
+import util
 
 def countcpus():
     '''try to count the number of CPUs on the system'''
--- a/tests/hghave.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/hghave.py	Tue Nov 26 21:55:21 2013 -0800
@@ -233,6 +233,9 @@
     finally:
         os.rmdir(d)
 
+def has_root():
+    return os.geteuid() == 0
+
 def has_pyflakes():
     return matchoutput("sh -c \"echo 'import re' 2>&1 | pyflakes\"",
                        r"<stdin>:1: 're' imported but unused",
@@ -312,6 +315,7 @@
     "p4": (has_p4, "Perforce server and client"),
     "pyflakes": (has_pyflakes, "Pyflakes python linter"),
     "pygments": (has_pygments, "Pygments source highlighting library"),
+    "root": (has_root, "root permissions"),
     "serve": (has_serve, "platform and python can manage 'hg serve -d'"),
     "ssl": (has_ssl, "python >= 2.6 ssl module and python OpenSSL"),
     "svn": (has_svn, "subversion client and admin tools"),
--- a/tests/run-tests.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/run-tests.py	Tue Nov 26 21:55:21 2013 -0800
@@ -103,6 +103,7 @@
 
 requiredtools = [os.path.basename(sys.executable), "diff", "grep", "unzip",
                  "gunzip", "bunzip2", "sed"]
+createdfiles = []
 
 defaults = {
     'jobs': ('HGTEST_JOBS', 1),
@@ -420,6 +421,11 @@
     if not options.keep_tmpdir:
         vlog("# Cleaning up HGTMP", HGTMP)
         shutil.rmtree(HGTMP, True)
+        for f in createdfiles:
+            try:
+                os.remove(f)
+            except OSError:
+                pass
 
 def usecorrectpython():
     # some tests run python interpreter. they must use same
@@ -439,6 +445,7 @@
         if findprogram(pyexename) != sys.executable:
             try:
                 os.symlink(sys.executable, mypython)
+                createdfiles.append(mypython)
             except OSError, err:
                 # child processes may race, which is harmless
                 if err.errno != errno.EEXIST:
@@ -498,18 +505,6 @@
 
     usecorrectpython()
 
-    vlog("# Installing dummy diffstat")
-    f = open(os.path.join(BINDIR, 'diffstat'), 'w')
-    f.write('#!' + sys.executable + '\n'
-            'import sys\n'
-            'files = 0\n'
-            'for line in sys.stdin:\n'
-            '    if line.startswith("diff "):\n'
-            '        files += 1\n'
-            'sys.stdout.write("files patched: %d\\n" % files)\n')
-    f.close()
-    os.chmod(os.path.join(BINDIR, 'diffstat'), 0700)
-
     if options.py3k_warnings and not options.anycoverage:
         vlog("# Updating hg command to enable Py3k Warnings switch")
         f = open(os.path.join(BINDIR, 'hg'), 'r')
@@ -1139,6 +1134,8 @@
         _checkhglib("Tested")
         print "# Ran %d tests, %d skipped, %d failed." % (
             tested, skipped + ignored, failed)
+        if results['!']:
+            print 'python hash seed:', os.environ['PYTHONHASHSEED']
         if options.time:
             outputtimes(options)
 
@@ -1190,7 +1187,6 @@
         # use a random python hash seed all the time
         # we do the randomness ourself to know what seed is used
         os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
-        print 'python hash seed:', os.environ['PYTHONHASHSEED']
 
     global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
     TESTDIR = os.environ["TESTDIR"] = os.getcwd()
--- a/tests/test-blackbox.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-blackbox.t	Tue Nov 26 21:55:21 2013 -0800
@@ -65,7 +65,7 @@
   $ hg rollback
   repository tip rolled back to revision 1 (undo pull)
 
-#if unix-permissions
+#if unix-permissions no-root
   $ chmod 000 .hg/blackbox.log
   $ hg --debug incoming
   warning: cannot write to blackbox.log: Permission denied
@@ -98,7 +98,7 @@
   (run 'hg update' to get a working copy)
 
 a failure reading from the log is fine
-#if unix-permissions
+#if unix-permissions no-root
   $ hg blackbox -l 3
   abort: Permission denied: $TESTTMP/blackboxtest2/.hg/blackbox.log
   [255]
--- a/tests/test-clone.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-clone.t	Tue Nov 26 21:55:21 2013 -0800
@@ -543,7 +543,7 @@
   $ rm -rf b # work around bug with http clone
 
 
-#if unix-permissions
+#if unix-permissions no-root
 
 Inaccessible source
 
@@ -596,7 +596,7 @@
   [255]
 
 
-#if unix-permissions
+#if unix-permissions no-root
 
 leave existing directory in place after clone failure
 
--- a/tests/test-command-template.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-command-template.t	Tue Nov 26 21:55:21 2013 -0800
@@ -447,7 +447,7 @@
 
 Error if style not readable:
 
-#if unix-permissions
+#if unix-permissions no-root
   $ touch q
   $ chmod 0 q
   $ hg log --style ./q
@@ -479,7 +479,7 @@
 Error if include fails:
 
   $ echo 'changeset = q' >> t
-#if unix-permissions
+#if unix-permissions no-root
   $ hg log --style ./t
   abort: template file ./q: Permission denied
   [255]
@@ -1445,7 +1445,7 @@
   $ hg ci -m h2e -d '4 0'
 
   $ hg merge -q
-  $ hg ci -m merge -d '5 0'
+  $ hg ci -m merge -d '5 -3600'
 
 No tag set:
 
@@ -1533,7 +1533,7 @@
   > EOF
 
   $ hg -R latesttag tip
-  test 10:dee8f28249af
+  test 10:9b4a630e5f5f
 
 Test recursive showlist template (issue1989):
 
@@ -1587,6 +1587,21 @@
   b
   a
 
+Test date format:
+
+  $ hg log -R latesttag --template 'date: {date(date, "%y %m %d %S %z")}\n'
+  date: 70 01 01 10 +0000
+  date: 70 01 01 09 +0000
+  date: 70 01 01 08 +0000
+  date: 70 01 01 07 +0000
+  date: 70 01 01 06 +0000
+  date: 70 01 01 05 +0100
+  date: 70 01 01 04 +0000
+  date: 70 01 01 03 +0000
+  date: 70 01 01 02 +0000
+  date: 70 01 01 01 +0000
+  date: 70 01 01 00 +0000
+
 Test string escaping:
 
   $ hg log -R latesttag -r 0 --template '>\n<>\\n<{if(rev, "[>\n<>\\n<]")}>\n<>\\n<\n'
--- a/tests/test-convert.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-convert.t	Tue Nov 26 21:55:21 2013 -0800
@@ -310,7 +310,7 @@
   abort: cannot create new bundle repository
   [255]
 
-#if unix-permissions
+#if unix-permissions no-root
 
 conversion to dir without permissions should fail
 
--- a/tests/test-doctest.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-doctest.py	Tue Nov 26 21:55:21 2013 -0800
@@ -1,49 +1,29 @@
 # this is hack to make sure no escape characters are inserted into the output
-import os
+import os, sys
 if 'TERM' in os.environ:
     del os.environ['TERM']
 import doctest
 
-import mercurial.util
-doctest.testmod(mercurial.util)
-# Only run doctests for the current platform
-doctest.testmod(mercurial.util.platform)
-
-import mercurial.changelog
-doctest.testmod(mercurial.changelog)
-
-import mercurial.dagparser
-doctest.testmod(mercurial.dagparser, optionflags=doctest.NORMALIZE_WHITESPACE)
-
-import mercurial.match
-doctest.testmod(mercurial.match)
-
-import mercurial.store
-doctest.testmod(mercurial.store)
-
-import mercurial.ui
-doctest.testmod(mercurial.ui)
+def testmod(name, optionflags=0, testtarget=None):
+    __import__(name)
+    mod = sys.modules[name]
+    if testtarget is not None:
+        mod = getattr(mod, testtarget)
+    doctest.testmod(mod, optionflags=optionflags)
 
-import mercurial.url
-doctest.testmod(mercurial.url)
-
-import mercurial.dispatch
-doctest.testmod(mercurial.dispatch)
-
-import mercurial.encoding
-doctest.testmod(mercurial.encoding)
-
-import mercurial.hgweb.hgwebdir_mod
-doctest.testmod(mercurial.hgweb.hgwebdir_mod)
-
-import hgext.convert.cvsps
-doctest.testmod(hgext.convert.cvsps)
-
-import mercurial.revset
-doctest.testmod(mercurial.revset)
-
-import mercurial.minirst
-doctest.testmod(mercurial.minirst)
-
-import mercurial.templatefilters
-doctest.testmod(mercurial.templatefilters)
+testmod('mercurial.changelog')
+testmod('mercurial.dagparser', optionflags=doctest.NORMALIZE_WHITESPACE)
+testmod('mercurial.dispatch')
+testmod('mercurial.encoding')
+testmod('mercurial.hgweb.hgwebdir_mod')
+testmod('mercurial.match')
+testmod('mercurial.minirst')
+testmod('mercurial.revset')
+testmod('mercurial.store')
+testmod('mercurial.templatefilters')
+testmod('mercurial.ui')
+testmod('mercurial.url')
+testmod('mercurial.util')
+testmod('mercurial.util', testtarget='platform')
+testmod('hgext.convert.cvsps')
+testmod('hgext.convert.filemap')
--- a/tests/test-filecache.py	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-filecache.py	Tue Nov 26 21:55:21 2013 -0800
@@ -18,9 +18,10 @@
     def sjoin(self, p):
         return p
 
-    @filecache('x')
+    @filecache('x', 'y')
     def cached(self):
         print 'creating'
+        return 'string from function'
 
     def invalidate(self):
         for k in self._filecache:
@@ -30,17 +31,20 @@
                 pass
 
 def basic(repo):
-    # file doesn't exist, calls function
+    print "* neither file exists"
+    # calls function
     repo.cached
 
     repo.invalidate()
-    # file still doesn't exist, uses cache
+    print "* neither file still exists"
+    # uses cache
     repo.cached
 
     # create empty file
     f = open('x', 'w')
     f.close()
     repo.invalidate()
+    print "* empty file x created"
     # should recreate the object
     repo.cached
 
@@ -48,11 +52,13 @@
     f.write('a')
     f.close()
     repo.invalidate()
+    print "* file x changed size"
     # should recreate the object
     repo.cached
 
     repo.invalidate()
-    # stats file again, nothing changed, reuses object
+    print "* nothing changed with either file"
+    # stats file again, reuses object
     repo.cached
 
     # atomic replace file, size doesn't change
@@ -63,6 +69,42 @@
     f.close()
 
     repo.invalidate()
+    print "* file x changed inode"
+    repo.cached
+
+    # create empty file y
+    f = open('y', 'w')
+    f.close()
+    repo.invalidate()
+    print "* empty file y created"
+    # should recreate the object
+    repo.cached
+
+    f = open('y', 'w')
+    f.write('A')
+    f.close()
+    repo.invalidate()
+    print "* file y changed size"
+    # should recreate the object
+    repo.cached
+
+    f = scmutil.opener('.')('y', 'w', atomictemp=True)
+    f.write('B')
+    f.close()
+
+    repo.invalidate()
+    print "* file y changed inode"
+    repo.cached
+
+    f = scmutil.opener('.')('x', 'w', atomictemp=True)
+    f.write('c')
+    f.close()
+    f = scmutil.opener('.')('y', 'w', atomictemp=True)
+    f.write('C')
+    f.close()
+
+    repo.invalidate()
+    print "* both files changed inode"
     repo.cached
 
 def fakeuncacheable():
@@ -76,10 +118,11 @@
     origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
                                             wrapcacheable)
 
-    try:
-        os.remove('x')
-    except OSError:
-        pass
+    for fn in ['x', 'y']:
+        try:
+            os.remove(fn)
+        except OSError:
+            pass
 
     basic(fakerepo())
 
@@ -103,13 +146,28 @@
 
 def setbeforeget(repo):
     os.remove('x')
-    repo.cached = 0
+    os.remove('y')
+    repo.cached = 'string set externally'
     repo.invalidate()
+    print "* neither file exists"
     print repo.cached
     repo.invalidate()
     f = open('x', 'w')
     f.write('a')
     f.close()
+    print "* file x created"
+    print repo.cached
+
+    repo.cached = 'string 2 set externally'
+    repo.invalidate()
+    print "* string set externally again"
+    print repo.cached
+
+    repo.invalidate()
+    f = open('y', 'w')
+    f.write('b')
+    f.close()
+    print "* file y created"
     print repo.cached
 
 print 'basic:'
--- a/tests/test-filecache.py.out	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-filecache.py.out	Tue Nov 26 21:55:21 2013 -0800
@@ -1,17 +1,45 @@
 basic:
 
+* neither file exists
 creating
+* neither file still exists
+* empty file x created
+creating
+* file x changed size
 creating
+* nothing changed with either file
+* file x changed inode
 creating
+* empty file y created
+creating
+* file y changed size
+creating
+* file y changed inode
+creating
+* both files changed inode
 creating
 
 fakeuncacheable:
 
+* neither file exists
 creating
+* neither file still exists
 creating
+* empty file x created
+creating
+* file x changed size
+creating
+* nothing changed with either file
 creating
+* file x changed inode
 creating
+* empty file y created
 creating
+* file y changed size
+creating
+* file y changed inode
+creating
+* both files changed inode
 creating
 repository tip rolled back to revision -1 (undo commit)
 working directory now based on revision -1
@@ -20,6 +48,13 @@
 
 setbeforeget:
 
-0
+* neither file exists
+string set externally
+* file x created
 creating
-None
+string from function
+* string set externally again
+string 2 set externally
+* file y created
+creating
+string from function
--- a/tests/test-journal-exists.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-journal-exists.t	Tue Nov 26 21:55:21 2013 -0800
@@ -22,7 +22,7 @@
 
 Check that zero-size journals are correctly aborted:
 
-#if unix-permissions
+#if unix-permissions no-root
   $ hg bundle -qa repo.hg
   $ chmod -w foo/.hg/store/00changelog.i
 
--- a/tests/test-lock-badness.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-lock-badness.t	Tue Nov 26 21:55:21 2013 -0800
@@ -1,5 +1,4 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
-
+#if unix-permissions no-root
   $ hg init a
   $ echo a > a/a
   $ hg -R a ci -A -m a
@@ -21,4 +20,4 @@
   [255]
 
   $ chmod 700 a/.hg/store
-
+#endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-module-imports.t	Tue Nov 26 21:55:21 2013 -0800
@@ -0,0 +1,39 @@
+This code uses the ast module, which was new in 2.6, so we'll skip
+this test on anything earlier.
+  $ python -c 'import sys ; assert sys.version_info >= (2, 6)' || exit 80
+
+  $ import_checker="$TESTDIR"/../contrib/import-checker.py
+Run the doctests from the import checker, and make sure
+it's working correctly.
+  $ TERM=dumb
+  $ export TERM
+  $ python -m doctest $import_checker
+
+  $ cd "$TESTDIR"/..
+  $ if hg identify -q > /dev/null 2>&1; then :
+  > else
+  >     echo "skipped: not a Mercurial working dir" >&2
+  >     exit 80
+  > fi
+
+There are a handful of cases here that require renaming a module so it
+doesn't overlap with a stdlib module name. There are also some cycles
+here that we should still endeavor to fix, and some cycles will be
+hidden by deduplication algorithm in the cycle detector, so fixing
+these may expose other cycles.
+
+  $ hg locate 'mercurial/**.py' | xargs python "$import_checker"
+  mercurial/dispatch.py mixed stdlib and relative imports:
+     commands, error, extensions, fancyopts, hg, hook, util
+  mercurial/fileset.py mixed stdlib and relative imports:
+     error, merge, parser, util
+  mercurial/revset.py mixed stdlib and relative imports:
+     discovery, error, hbisect, parser, phases, util
+  mercurial/templater.py mixed stdlib and relative imports:
+     config, error, parser, templatefilters, util
+  mercurial/ui.py mixed stdlib and relative imports:
+     config, error, formatter, scmutil, util
+  Import cycle: mercurial.cmdutil -> mercurial.subrepo -> mercurial.cmdutil
+  Import cycle: mercurial.repoview -> mercurial.revset -> mercurial.repoview
+  Import cycle: mercurial.fileset -> mercurial.merge -> mercurial.subrepo -> mercurial.match -> mercurial.fileset
+  Import cycle: mercurial.filemerge -> mercurial.match -> mercurial.fileset -> mercurial.merge -> mercurial.filemerge
--- a/tests/test-obsolete-checkheads.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-obsolete-checkheads.t	Tue Nov 26 21:55:21 2013 -0800
@@ -261,9 +261,6 @@
 
 Push should not complain about new heads.
 
-It should not complain about "unsynced remote changes!" either but that's not
-handled yet.
-
   $ hg push --traceback
   pushing to $TESTTMP/remote (glob)
   searching for changes
--- a/tests/test-permissions.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-permissions.t	Tue Nov 26 21:55:21 2013 -0800
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
+#ifdef unix-permissions no-root
 
   $ hg init t
   $ cd t
@@ -70,3 +70,5 @@
   $ chmod +rx dir
 
   $ cd ..
+
+#endif
--- a/tests/test-phases-exchange.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-phases-exchange.t	Tue Nov 26 21:55:21 2013 -0800
@@ -1062,7 +1062,7 @@
   |
   o  0 public a-A - 054250a37db4
   
-#if unix-permissions
+#if unix-permissions no-root
 
 Pushing From an unlockable repo
 --------------------------------
--- a/tests/test-pull-permission.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-pull-permission.t	Tue Nov 26 21:55:21 2013 -0800
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
+#if unix-permissions no-root
 
   $ hg init a
   $ cd a
@@ -30,3 +30,5 @@
   1 files, 1 changesets, 1 total revisions
 
   $ cd ..
+
+#endif
--- a/tests/test-push-warn.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-push-warn.t	Tue Nov 26 21:55:21 2013 -0800
@@ -38,8 +38,8 @@
   query 2; still undecided: 1, sample size is: 1
   2 total queries
   listing keys for "bookmarks"
-  new remote heads on branch 'default'
-  new remote head 1e108cc5548c
+  new remote heads on branch 'default':
+   1e108cc5548c
   abort: push creates new remote head 1e108cc5548c!
   (pull and merge or see "hg help push" for details about pushing new heads)
   [255]
@@ -129,9 +129,9 @@
   $ hg push -v -r 3 -r 4 ../c
   pushing to ../c
   searching for changes
-  new remote heads on branch 'default'
-  new remote head a5dda829a167
-  new remote head ee8fbc7a0295
+  new remote heads on branch 'default':
+   a5dda829a167
+   ee8fbc7a0295
   abort: push creates new remote head a5dda829a167!
   (merge or see "hg help push" for details about pushing new heads)
   [255]
@@ -367,7 +367,7 @@
   $ hg push --branch f --new-branch ../f
   pushing to ../f
   searching for changes
-  abort: push creates multiple headed new branch 'f'
+  abort: push creates new branch 'f' with multiple heads
   (merge or see "hg help push" for details about pushing new heads)
   [255]
   $ hg push --branch f --new-branch --force ../f
--- a/tests/test-repair-strip.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-repair-strip.t	Tue Nov 26 21:55:21 2013 -0800
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" unix-permissions || exit 80
+#if unix-permissions no-root
 
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "mq=">> $HGRCPATH
@@ -130,3 +130,5 @@
   2 files, 2 changesets, 2 total revisions
 
   $ cd ..
+
+#endif
--- a/tests/test-serve.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-serve.t	Tue Nov 26 21:55:21 2013 -0800
@@ -45,12 +45,14 @@
 
 With -v and -p daytime (should fail because low port)
 
+#if no-root
   $ KILLQUIETLY=Y
   $ hgserve -p daytime
   abort: cannot start server at 'localhost:13': Permission denied
   abort: child process failed to start
   % errors
   $ KILLQUIETLY=N
+#endif
 
 With --prefix foo
 
--- a/tests/test-transplant.t	Tue Nov 26 16:14:22 2013 -0800
+++ b/tests/test-transplant.t	Tue Nov 26 21:55:21 2013 -0800
@@ -430,6 +430,20 @@
   adding manifests
   adding file changes
   added 4 changesets with 4 changes to 4 files
+
+test "--merge" causing pull from source repository on local host
+
+  $ hg --config extensions.mq= -q strip 2
+  $ hg transplant -s ../t --merge tip
+  searching for changes
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  applying a53251cdf717
+  4:a53251cdf717 merged at 4831f4dc831a
+
   $ cd ..